From 7d21887d403a47362bfed47634cb75d9fadf70e8 Mon Sep 17 00:00:00 2001 From: pineappleEA Date: Fri, 9 Jul 2021 23:54:15 +0200 Subject: [PATCH] early-access version 1866 --- CMakeModules/GenerateSCMRev.cmake | 56 +- README.md | 2 +- externals/Vulkan-Headers/BUILD.gn | 1 + externals/Vulkan-Headers/CMakeLists.txt | 1 + externals/Vulkan-Headers/README.md | 48 +- .../vk_video/vulkan_video_codec_h264std.h | 299 + .../vulkan_video_codec_h264std_decode.h | 87 + .../vulkan_video_codec_h264std_encode.h | 94 + .../vk_video/vulkan_video_codec_h265std.h | 341 + .../vulkan_video_codec_h265std_decode.h | 59 + .../vk_video/vulkan_video_codecs_common.h | 21 + .../Vulkan-Headers/include/vulkan/vk_icd.h | 66 +- .../Vulkan-Headers/include/vulkan/vk_layer.h | 16 +- .../include/vulkan/vk_platform.h | 6 +- .../Vulkan-Headers/include/vulkan/vulkan.h | 14 +- .../Vulkan-Headers/include/vulkan/vulkan.hpp | 172042 +++++++++------ .../include/vulkan/vulkan_android.h | 2 +- .../include/vulkan/vulkan_beta.h | 1030 +- .../include/vulkan/vulkan_core.h | 2132 +- .../include/vulkan/vulkan_directfb.h | 54 + .../include/vulkan/vulkan_fuchsia.h | 76 +- .../include/vulkan/vulkan_ggp.h | 2 +- .../include/vulkan/vulkan_ios.h | 4 +- .../include/vulkan/vulkan_macos.h | 4 +- .../include/vulkan/vulkan_metal.h | 2 +- .../include/vulkan/vulkan_screen.h | 54 + .../Vulkan-Headers/include/vulkan/vulkan_vi.h | 2 +- .../include/vulkan/vulkan_wayland.h | 2 +- .../include/vulkan/vulkan_win32.h | 2 +- .../include/vulkan/vulkan_xcb.h | 2 +- .../include/vulkan/vulkan_xlib.h | 2 +- .../include/vulkan/vulkan_xlib_xrandr.h | 2 +- .../Vulkan-Headers/registry/cgenerator.py | 29 +- .../Vulkan-Headers/registry/conventions.py | 2 +- .../Vulkan-Headers/registry/generator.py | 246 +- externals/Vulkan-Headers/registry/genvk.py | 80 +- externals/Vulkan-Headers/registry/reg.py | 85 +- .../registry/spec_tools/util.py | 4 +- .../Vulkan-Headers/registry/validusage.json | 20214 +- externals/Vulkan-Headers/registry/vk.xml | 5860 +- .../Vulkan-Headers/registry/vkconventions.py | 16 +- externals/sirit/CMakeLists.txt | 2 +- .../sirit/externals/SPIRV-Headers/.gitignore | 1 + .../sirit/externals/SPIRV-Headers/BUILD.bazel | 166 + .../sirit/externals/SPIRV-Headers/BUILD.gn | 44 + .../externals/SPIRV-Headers/CMakeLists.txt | 89 +- .../SPIRV-Headers/CODE_OF_CONDUCT.md | 1 + .../sirit/externals/SPIRV-Headers/README.md | 97 +- .../sirit/externals/SPIRV-Headers/WORKSPACE | 0 .../SPIRV-Headers/cmake/Config.cmake.in | 4 + .../SPIRV-Headers/example/CMakeLists.txt | 5 - .../SPIRV-Headers/example/example.cpp | 6 +- .../SPIRV-Headers/include/spirv/1.0/spirv.cs | 993 + .../SPIRV-Headers/include/spirv/1.1/spirv.cs | 1015 + .../SPIRV-Headers/include/spirv/1.2/spirv.cs | 1021 + .../SPIRV-Headers/include/spirv/spir-v.xml | 173 +- .../include/spirv/unified1/AMD_gcn_shader.h | 52 + .../spirv/unified1/AMD_shader_ballot.h | 53 + .../AMD_shader_explicit_vertex_parameter.h | 50 + .../unified1/AMD_shader_trinary_minmax.h | 58 + .../include/spirv/unified1/DebugInfo.h | 144 + .../unified1/NonSemanticClspvReflection.h | 73 + .../spirv/unified1/NonSemanticDebugPrintf.h | 50 + .../include/spirv/unified1/OpenCL.std.h | 188 +- .../spirv/unified1/OpenCLDebugInfo100.h | 157 + .../unified1/extinst.debuginfo.grammar.json | 572 + ...t.nonsemantic.clspvreflection.grammar.json | 237 + ...tinst.nonsemantic.debugprintf.grammar.json | 13 + .../extinst.opencl.debuginfo.100.grammar.json | 636 + .../extinst.spv-amd-gcn-shader.grammar.json | 26 + ...extinst.spv-amd-shader-ballot.grammar.json | 41 + ...der-explicit-vertex-parameter.grammar.json | 14 + ...spv-amd-shader-trinary-minmax.grammar.json | 95 + .../spirv/unified1/spirv.core.grammar.json | 5809 +- .../include/spirv/unified1/spirv.cs | 1707 + .../include/spirv/unified1/spirv.h | 1210 +- .../include/spirv/unified1/spirv.hpp | 1210 +- .../include/spirv/unified1/spirv.hpp11 | 1210 +- .../include/spirv/unified1/spirv.json | 614 +- .../include/spirv/unified1/spirv.lua | 614 +- .../include/spirv/unified1/spirv.py | 614 +- .../include/spirv/unified1/spv.d | 1709 + .../bin/generate_language_headers.py | 242 + .../buildHeaders/bin/makeExtinstHeaders.py | 28 + .../tools/buildHeaders/bin/makeHeaders | 4 +- .../tools/buildHeaders/header.cpp | 174 +- .../SPIRV-Headers/tools/buildHeaders/header.h | 14 +- .../tools/buildHeaders/jsonToSpirv.cpp | 121 +- .../tools/buildHeaders/jsonToSpirv.h | 62 +- .../SPIRV-Headers/tools/buildHeaders/main.cpp | 20 +- externals/sirit/include/sirit/sirit.h | 541 +- externals/sirit/src/CMakeLists.txt | 10 +- .../sirit/src/instructions/annotation.cpp | 30 +- .../sirit/src/instructions/arithmetic.cpp | 25 +- externals/sirit/src/instructions/atomic.cpp | 138 +- externals/sirit/src/instructions/barrier.cpp | 17 +- externals/sirit/src/instructions/bit.cpp | 81 +- externals/sirit/src/instructions/constant.cpp | 34 +- .../sirit/src/instructions/conversion.cpp | 10 +- externals/sirit/src/instructions/debug.cpp | 39 +- .../sirit/src/instructions/derivatives.cpp | 29 + .../sirit/src/instructions/extension.cpp | 17 +- externals/sirit/src/instructions/flow.cpp | 99 +- externals/sirit/src/instructions/function.cpp | 29 +- externals/sirit/src/instructions/group.cpp | 60 +- externals/sirit/src/instructions/image.cpp | 163 +- externals/sirit/src/instructions/logical.cpp | 100 +- externals/sirit/src/instructions/memory.cpp | 89 +- externals/sirit/src/instructions/misc.cpp | 26 +- externals/sirit/src/instructions/type.cpp | 113 +- externals/sirit/src/sirit.cpp | 181 +- externals/sirit/src/stream.cpp | 51 - externals/sirit/src/stream.h | 247 +- externals/sirit/tests/main.cpp | 15 +- src/CMakeLists.txt | 1 + src/common/CMakeLists.txt | 56 +- src/common/logging/filter.cpp | 4 + src/common/logging/types.h | 4 + src/common/settings.cpp | 4 +- src/common/settings.h | 11 +- src/common/thread_worker.h | 3 +- src/core/telemetry_session.cpp | 4 +- src/shader_recompiler/CMakeLists.txt | 268 + src/shader_recompiler/backend/bindings.h | 19 + .../backend/glasm/emit_context.cpp | 154 + .../backend/glasm/emit_context.h | 80 + .../backend/glasm/emit_glasm.cpp | 489 + .../backend/glasm/emit_glasm.h | 25 + .../backend/glasm/emit_glasm_barriers.cpp | 0 .../glasm/emit_glasm_bitwise_conversion.cpp | 91 + .../backend/glasm/emit_glasm_composite.cpp | 244 + .../glasm/emit_glasm_context_get_set.cpp | 346 + .../backend/glasm/emit_glasm_control_flow.cpp | 0 .../backend/glasm/emit_glasm_convert.cpp | 231 + .../glasm/emit_glasm_floating_point.cpp | 414 + .../backend/glasm/emit_glasm_image.cpp | 850 + .../backend/glasm/emit_glasm_instructions.h | 625 + .../backend/glasm/emit_glasm_integer.cpp | 294 + .../backend/glasm/emit_glasm_logical.cpp | 0 .../backend/glasm/emit_glasm_memory.cpp | 568 + .../glasm/emit_glasm_not_implemented.cpp | 273 + .../backend/glasm/emit_glasm_select.cpp | 67 + .../glasm/emit_glasm_shared_memory.cpp | 58 + .../backend/glasm/emit_glasm_special.cpp | 0 .../backend/glasm/emit_glasm_undefined.cpp | 0 .../backend/glasm/emit_glasm_warp.cpp | 150 + .../backend/glasm/reg_alloc.cpp | 186 + .../backend/glasm/reg_alloc.h | 304 + .../backend/glsl/emit_context.cpp | 687 + .../backend/glsl/emit_context.h | 172 + .../backend/glsl/emit_glsl.cpp | 240 + .../backend/glsl/emit_glsl.h | 24 + .../backend/glsl/emit_glsl_atomic.cpp | 418 + .../backend/glsl/emit_glsl_barriers.cpp | 21 + .../glsl/emit_glsl_bitwise_conversion.cpp | 94 + .../backend/glsl/emit_glsl_composite.cpp | 219 + .../glsl/emit_glsl_context_get_set.cpp | 456 + .../backend/glsl/emit_glsl_control_flow.cpp | 21 + .../backend/glsl/emit_glsl_convert.cpp | 230 + .../backend/glsl/emit_glsl_floating_point.cpp | 458 + .../backend/glsl/emit_glsl_image.cpp | 799 + .../backend/glsl/emit_glsl_instructions.h | 702 + .../backend/glsl/emit_glsl_integer.cpp | 253 + .../backend/glsl/emit_glsl_logical.cpp | 28 + .../backend/glsl/emit_glsl_memory.cpp | 202 + .../glsl/emit_glsl_not_implemented.cpp | 105 + .../backend/glsl/emit_glsl_select.cpp | 55 + .../backend/glsl/emit_glsl_shared_memory.cpp | 79 + .../backend/glsl/emit_glsl_special.cpp | 111 + .../backend/glsl/emit_glsl_undefined.cpp | 32 + .../backend/glsl/emit_glsl_warp.cpp | 217 + .../backend/glsl/var_alloc.cpp | 308 + .../backend/glsl/var_alloc.h | 105 + .../backend/spirv/emit_context.cpp | 1368 + .../backend/spirv/emit_context.h | 307 + .../backend/spirv/emit_spirv.cpp | 541 + .../backend/spirv/emit_spirv.h | 27 + .../backend/spirv/emit_spirv_atomic.cpp | 448 + .../backend/spirv/emit_spirv_barriers.cpp | 38 + .../spirv/emit_spirv_bitwise_conversion.cpp | 66 + .../backend/spirv/emit_spirv_composite.cpp | 155 + .../spirv/emit_spirv_context_get_set.cpp | 505 + .../backend/spirv/emit_spirv_control_flow.cpp | 28 + .../backend/spirv/emit_spirv_convert.cpp | 269 + .../spirv/emit_spirv_floating_point.cpp | 396 + .../backend/spirv/emit_spirv_image.cpp | 457 + .../backend/spirv/emit_spirv_image_atomic.cpp | 183 + .../backend/spirv/emit_spirv_instructions.h | 579 + .../backend/spirv/emit_spirv_integer.cpp | 270 + .../backend/spirv/emit_spirv_logical.cpp | 26 + .../backend/spirv/emit_spirv_memory.cpp | 275 + .../backend/spirv/emit_spirv_select.cpp | 42 + .../spirv/emit_spirv_shared_memory.cpp | 174 + .../backend/spirv/emit_spirv_special.cpp | 150 + .../backend/spirv/emit_spirv_undefined.cpp | 30 + .../backend/spirv/emit_spirv_warp.cpp | 203 + src/shader_recompiler/environment.h | 53 + src/shader_recompiler/exception.h | 66 + .../frontend/ir/abstract_syntax_list.h | 58 + .../frontend/ir/attribute.cpp | 454 + src/shader_recompiler/frontend/ir/attribute.h | 250 + .../frontend/ir/basic_block.cpp | 149 + .../frontend/ir/basic_block.h | 185 + .../frontend/ir/breadth_first_search.h | 56 + .../frontend/ir/condition.cpp | 29 + src/shader_recompiler/frontend/ir/condition.h | 60 + .../frontend/ir/flow_test.cpp | 83 + src/shader_recompiler/frontend/ir/flow_test.h | 62 + .../frontend/ir/ir_emitter.cpp | 2017 + .../frontend/ir/ir_emitter.h | 413 + .../frontend/ir/microinstruction.cpp | 411 + src/shader_recompiler/frontend/ir/modifiers.h | 49 + src/shader_recompiler/frontend/ir/opcodes.cpp | 15 + src/shader_recompiler/frontend/ir/opcodes.h | 109 + src/shader_recompiler/frontend/ir/opcodes.inc | 550 + src/shader_recompiler/frontend/ir/patch.cpp | 28 + src/shader_recompiler/frontend/ir/patch.h | 149 + .../frontend/ir/post_order.cpp | 46 + .../frontend/ir/post_order.h | 14 + src/shader_recompiler/frontend/ir/pred.h | 44 + src/shader_recompiler/frontend/ir/program.cpp | 32 + src/shader_recompiler/frontend/ir/program.h | 35 + src/shader_recompiler/frontend/ir/reg.h | 332 + src/shader_recompiler/frontend/ir/type.cpp | 38 + src/shader_recompiler/frontend/ir/type.h | 61 + src/shader_recompiler/frontend/ir/value.cpp | 99 + src/shader_recompiler/frontend/ir/value.h | 398 + .../frontend/maxwell/control_flow.cpp | 643 + .../frontend/maxwell/control_flow.h | 170 + .../frontend/maxwell/decode.cpp | 149 + .../frontend/maxwell/decode.h | 14 + .../maxwell/indirect_branch_table_track.cpp | 108 + .../maxwell/indirect_branch_table_track.h | 28 + .../frontend/maxwell/instruction.h | 63 + .../frontend/maxwell/location.h | 112 + .../frontend/maxwell/maxwell.inc | 286 + .../frontend/maxwell/opcodes.cpp | 26 + .../frontend/maxwell/opcodes.h | 30 + .../maxwell/structured_control_flow.cpp | 888 + .../maxwell/structured_control_flow.h | 20 + .../impl/atomic_operations_global_memory.cpp | 214 + .../impl/atomic_operations_shared_memory.cpp | 110 + .../impl/attribute_memory_to_physical.cpp | 35 + .../translate/impl/barrier_operations.cpp | 96 + .../translate/impl/bitfield_extract.cpp | 74 + .../translate/impl/bitfield_insert.cpp | 62 + .../translate/impl/branch_indirect.cpp | 36 + .../maxwell/translate/impl/common_encoding.h | 57 + .../maxwell/translate/impl/common_funcs.cpp | 110 + .../maxwell/translate/impl/common_funcs.h | 24 + .../translate/impl/condition_code_set.cpp | 66 + .../maxwell/translate/impl/double_add.cpp | 55 + .../translate/impl/double_compare_and_set.cpp | 72 + .../impl/double_fused_multiply_add.cpp | 58 + .../maxwell/translate/impl/double_min_max.cpp | 55 + .../translate/impl/double_multiply.cpp | 50 + .../translate/impl/double_set_predicate.cpp | 54 + .../maxwell/translate/impl/exit_program.cpp | 43 + .../translate/impl/find_leading_one.cpp | 47 + .../translate/impl/floating_point_add.cpp | 82 + .../translate/impl/floating_point_compare.cpp | 55 + .../impl/floating_point_compare_and_set.cpp | 78 + ...oating_point_conversion_floating_point.cpp | 214 + .../floating_point_conversion_integer.cpp | 253 + .../floating_point_fused_multiply_add.cpp | 94 + .../translate/impl/floating_point_min_max.cpp | 62 + .../impl/floating_point_multi_function.cpp | 71 + .../impl/floating_point_multiply.cpp | 127 + .../impl/floating_point_range_reduction.cpp | 41 + .../impl/floating_point_set_predicate.cpp | 60 + .../impl/floating_point_swizzled_add.cpp | 44 + .../impl/half_floating_point_add.cpp | 125 + ...half_floating_point_fused_multiply_add.cpp | 169 + .../impl/half_floating_point_helper.cpp | 62 + .../impl/half_floating_point_helper.h | 42 + .../impl/half_floating_point_multiply.cpp | 143 + .../impl/half_floating_point_set.cpp | 117 + .../half_floating_point_set_predicate.cpp | 118 + .../frontend/maxwell/translate/impl/impl.cpp | 272 + .../frontend/maxwell/translate/impl/impl.h | 387 + .../maxwell/translate/impl/integer_add.cpp | 105 + .../impl/integer_add_three_input.cpp | 122 + .../translate/impl/integer_compare.cpp | 48 + .../impl/integer_compare_and_set.cpp | 123 + .../integer_floating_point_conversion.cpp | 180 + .../translate/impl/integer_funnel_shift.cpp | 82 + .../impl/integer_minimum_maximum.cpp | 64 + .../translate/impl/integer_popcount.cpp | 36 + .../translate/impl/integer_scaled_add.cpp | 86 + .../translate/impl/integer_set_predicate.cpp | 49 + .../translate/impl/integer_shift_left.cpp | 71 + .../translate/impl/integer_shift_right.cpp | 66 + .../impl/integer_short_multiply_add.cpp | 135 + .../impl/integer_to_integer_conversion.cpp | 126 + .../impl/internal_stage_buffer_entry_read.cpp | 53 + .../maxwell/translate/impl/load_constant.cpp | 62 + .../maxwell/translate/impl/load_constant.h | 39 + .../translate/impl/load_effective_address.cpp | 108 + .../translate/impl/load_store_attribute.cpp | 196 + .../impl/load_store_local_shared.cpp | 201 + .../translate/impl/load_store_memory.cpp | 184 + .../translate/impl/logic_operation.cpp | 116 + .../impl/logic_operation_three_input.cpp | 122 + .../impl/move_predicate_to_register.cpp | 66 + .../maxwell/translate/impl/move_register.cpp | 44 + .../impl/move_register_to_predicate.cpp | 71 + .../translate/impl/move_special_register.cpp | 181 + .../translate/impl/not_implemented.cpp | 283 + .../translate/impl/output_geometry.cpp | 45 + .../maxwell/translate/impl/pixel_load.cpp | 46 + .../impl/predicate_set_predicate.cpp | 38 + .../translate/impl/predicate_set_register.cpp | 53 + .../impl/select_source_with_predicate.cpp | 44 + .../impl/surface_atomic_operations.cpp | 208 + .../translate/impl/surface_load_store.cpp | 281 + .../maxwell/translate/impl/texture_fetch.cpp | 236 + .../translate/impl/texture_fetch_swizzled.cpp | 266 + .../maxwell/translate/impl/texture_gather.cpp | 208 + .../impl/texture_gather_swizzled.cpp | 134 + .../translate/impl/texture_gradient.cpp | 182 + .../maxwell/translate/impl/texture_load.cpp | 165 + .../translate/impl/texture_load_swizzled.cpp | 242 + .../translate/impl/texture_mipmap_level.cpp | 131 + .../maxwell/translate/impl/texture_query.cpp | 76 + .../maxwell/translate/impl/video_helper.cpp | 30 + .../maxwell/translate/impl/video_helper.h | 23 + .../translate/impl/video_minimum_maximum.cpp | 92 + .../translate/impl/video_multiply_add.cpp | 64 + .../translate/impl/video_set_predicate.cpp | 92 + .../frontend/maxwell/translate/impl/vote.cpp | 54 + .../maxwell/translate/impl/warp_shuffle.cpp | 69 + .../frontend/maxwell/translate/translate.cpp | 52 + .../frontend/maxwell/translate/translate.h | 14 + .../frontend/maxwell/translate_program.cpp | 216 + .../frontend/maxwell/translate_program.h | 23 + src/shader_recompiler/host_translate_info.h | 18 + .../ir_opt/collect_shader_info_pass.cpp | 928 + .../ir_opt/constant_propagation_pass.cpp | 609 + .../ir_opt/dead_code_elimination_pass.cpp | 28 + .../ir_opt/dual_vertex_pass.cpp | 36 + .../global_memory_to_storage_buffer_pass.cpp | 527 + .../ir_opt/identity_removal_pass.cpp | 38 + .../ir_opt/lower_fp16_to_fp32.cpp | 143 + .../ir_opt/lower_int64_to_int32.cpp | 217 + src/shader_recompiler/ir_opt/passes.h | 32 + .../ir_opt/ssa_rewrite_pass.cpp | 381 + src/shader_recompiler/ir_opt/texture_pass.cpp | 523 + .../ir_opt/verification_pass.cpp | 98 + src/shader_recompiler/object_pool.h | 104 + src/shader_recompiler/profile.h | 72 + src/shader_recompiler/program_header.h | 219 + src/shader_recompiler/runtime_info.h | 88 + src/shader_recompiler/shader_info.h | 193 + src/shader_recompiler/stage.h | 28 + src/shader_recompiler/varying_state.h | 69 + src/tests/common/unique_function.cpp | 2 + src/video_core/CMakeLists.txt | 80 +- src/video_core/buffer_cache/buffer_cache.h | 269 +- src/video_core/dirty_flags.cpp | 6 + src/video_core/dirty_flags.h | 2 + src/video_core/engines/kepler_compute.cpp | 45 +- src/video_core/engines/kepler_compute.h | 21 +- src/video_core/engines/maxwell_3d.cpp | 39 - src/video_core/engines/maxwell_3d.h | 46 +- src/video_core/engines/maxwell_dma.cpp | 3 +- src/video_core/memory_manager.cpp | 1 - src/video_core/rasterizer_interface.h | 16 +- .../renderer_opengl/gl_buffer_cache.cpp | 84 +- .../renderer_opengl/gl_buffer_cache.h | 57 +- .../renderer_opengl/gl_compute_pipeline.cpp | 213 + .../renderer_opengl/gl_compute_pipeline.h | 93 + src/video_core/renderer_opengl/gl_device.cpp | 147 +- src/video_core/renderer_opengl/gl_device.h | 84 +- .../renderer_opengl/gl_graphics_pipeline.cpp | 560 + .../renderer_opengl/gl_graphics_pipeline.h | 164 + .../renderer_opengl/gl_rasterizer.cpp | 446 +- .../renderer_opengl/gl_rasterizer.h | 44 +- .../renderer_opengl/gl_resource_manager.cpp | 27 - .../renderer_opengl/gl_resource_manager.h | 14 - .../renderer_opengl/gl_shader_cache.cpp | 993 +- .../renderer_opengl/gl_shader_cache.h | 170 +- .../renderer_opengl/gl_shader_context.h | 33 + .../renderer_opengl/gl_shader_manager.cpp | 146 - .../renderer_opengl/gl_shader_manager.h | 121 +- .../renderer_opengl/gl_shader_util.cpp | 120 +- .../renderer_opengl/gl_shader_util.h | 89 +- .../renderer_opengl/gl_state_tracker.cpp | 6 - .../renderer_opengl/gl_state_tracker.h | 1 - .../renderer_opengl/gl_texture_cache.cpp | 361 +- .../renderer_opengl/gl_texture_cache.h | 53 +- .../renderer_opengl/maxwell_to_gl.h | 108 + .../renderer_opengl/renderer_opengl.cpp | 62 +- .../renderer_opengl/renderer_opengl.h | 5 +- .../renderer_opengl/util_shaders.cpp | 24 +- src/video_core/renderer_vulkan/blit_image.cpp | 24 +- src/video_core/renderer_vulkan/blit_image.h | 2 +- .../renderer_vulkan/fixed_pipeline_state.cpp | 90 +- .../renderer_vulkan/fixed_pipeline_state.h | 81 +- .../renderer_vulkan/maxwell_to_vk.cpp | 54 +- .../renderer_vulkan/maxwell_to_vk.h | 7 +- .../renderer_vulkan/pipeline_helper.h | 154 + .../renderer_vulkan/renderer_vulkan.cpp | 77 +- .../renderer_vulkan/vk_blit_screen.cpp | 90 +- .../renderer_vulkan/vk_buffer_cache.cpp | 70 +- .../renderer_vulkan/vk_buffer_cache.h | 22 +- .../renderer_vulkan/vk_compute_pass.cpp | 272 +- .../renderer_vulkan/vk_compute_pass.h | 34 +- .../renderer_vulkan/vk_compute_pipeline.cpp | 286 +- .../renderer_vulkan/vk_compute_pipeline.h | 72 +- .../renderer_vulkan/vk_descriptor_pool.cpp | 172 +- .../renderer_vulkan/vk_descriptor_pool.h | 70 +- .../renderer_vulkan/vk_graphics_pipeline.cpp | 860 +- .../renderer_vulkan/vk_graphics_pipeline.h | 143 +- .../renderer_vulkan/vk_master_semaphore.h | 6 +- .../renderer_vulkan/vk_pipeline_cache.cpp | 910 +- .../renderer_vulkan/vk_pipeline_cache.h | 180 +- .../renderer_vulkan/vk_query_cache.cpp | 8 +- .../renderer_vulkan/vk_rasterizer.cpp | 475 +- .../renderer_vulkan/vk_rasterizer.h | 59 +- .../renderer_vulkan/vk_render_pass_cache.cpp | 96 + .../renderer_vulkan/vk_render_pass_cache.h | 55 + .../renderer_vulkan/vk_resource_pool.cpp | 12 +- .../renderer_vulkan/vk_resource_pool.h | 12 +- .../renderer_vulkan/vk_scheduler.cpp | 178 +- src/video_core/renderer_vulkan/vk_scheduler.h | 38 +- .../vk_staging_buffer_pool.cpp | 2 +- .../renderer_vulkan/vk_state_tracker.cpp | 58 +- .../renderer_vulkan/vk_state_tracker.h | 15 +- .../renderer_vulkan/vk_swapchain.cpp | 59 +- src/video_core/renderer_vulkan/vk_swapchain.h | 31 +- .../renderer_vulkan/vk_texture_cache.cpp | 243 +- .../renderer_vulkan/vk_texture_cache.h | 80 +- .../renderer_vulkan/vk_update_descriptor.cpp | 13 +- .../renderer_vulkan/vk_update_descriptor.h | 4 +- src/video_core/shader_cache.cpp | 250 + src/video_core/shader_cache.h | 215 +- src/video_core/shader_environment.cpp | 461 + src/video_core/shader_environment.h | 182 + src/video_core/shader_notify.cpp | 49 +- src/video_core/shader_notify.h | 28 +- src/video_core/texture_cache/formatter.cpp | 4 +- src/video_core/texture_cache/formatter.h | 3 +- .../texture_cache/image_view_base.cpp | 9 + .../texture_cache/image_view_base.h | 1 + src/video_core/texture_cache/texture_cache.h | 41 +- src/video_core/textures/texture.h | 9 + src/video_core/transform_feedback.cpp | 99 + src/video_core/transform_feedback.h | 30 + .../nsight_aftermath_tracker.cpp | 7 +- .../vulkan_common/nsight_aftermath_tracker.h | 21 +- .../vulkan_common/vulkan_device.cpp | 327 +- src/video_core/vulkan_common/vulkan_device.h | 161 +- .../vulkan_common/vulkan_wrapper.cpp | 5 +- src/video_core/vulkan_common/vulkan_wrapper.h | 44 +- src/yuzu/bootmanager.cpp | 13 +- src/yuzu/configuration/config.cpp | 11 +- src/yuzu/configuration/config.h | 3 +- src/yuzu/configuration/configure_debug.cpp | 8 + src/yuzu/configuration/configure_debug.ui | 26 + src/yuzu/configuration/configure_graphics.cpp | 75 +- src/yuzu/configuration/configure_graphics.h | 4 +- src/yuzu/configuration/configure_graphics.ui | 124 +- .../configure_graphics_advanced.cpp | 7 - .../configure_graphics_advanced.h | 1 - .../configure_graphics_advanced.ui | 18 +- src/yuzu/main.cpp | 42 +- src/yuzu_cmd/config.cpp | 4 +- src/yuzu_cmd/default_ini.h | 15 +- src/yuzu_cmd/yuzu.cpp | 8 +- 469 files changed, 201995 insertions(+), 78488 deletions(-) create mode 100755 externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std.h create mode 100755 externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std_decode.h create mode 100755 externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std_encode.h create mode 100755 externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h265std.h create mode 100755 externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h265std_decode.h create mode 100755 externals/Vulkan-Headers/include/vk_video/vulkan_video_codecs_common.h create mode 100755 externals/Vulkan-Headers/include/vulkan/vulkan_directfb.h create mode 100755 externals/Vulkan-Headers/include/vulkan/vulkan_screen.h create mode 100755 externals/sirit/externals/SPIRV-Headers/BUILD.bazel create mode 100755 externals/sirit/externals/SPIRV-Headers/BUILD.gn create mode 100755 externals/sirit/externals/SPIRV-Headers/CODE_OF_CONDUCT.md create mode 100755 externals/sirit/externals/SPIRV-Headers/WORKSPACE create mode 100755 externals/sirit/externals/SPIRV-Headers/cmake/Config.cmake.in create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/1.0/spirv.cs create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/1.1/spirv.cs create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/1.2/spirv.cs create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_gcn_shader.h create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_ballot.h create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_explicit_vertex_parameter.h create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_trinary_minmax.h create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/DebugInfo.h create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/NonSemanticClspvReflection.h create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/NonSemanticDebugPrintf.h create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/OpenCLDebugInfo100.h create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.debuginfo.grammar.json create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.nonsemantic.clspvreflection.grammar.json create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.nonsemantic.debugprintf.grammar.json create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.opencl.debuginfo.100.grammar.json create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-gcn-shader.grammar.json create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-ballot.grammar.json create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-explicit-vertex-parameter.grammar.json create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-trinary-minmax.grammar.json create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.cs create mode 100755 externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spv.d create mode 100755 externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/generate_language_headers.py create mode 100755 externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/makeExtinstHeaders.py create mode 100755 externals/sirit/src/instructions/derivatives.cpp create mode 100755 src/shader_recompiler/CMakeLists.txt create mode 100755 src/shader_recompiler/backend/bindings.h create mode 100755 src/shader_recompiler/backend/glasm/emit_context.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_context.h create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm.h create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_barriers.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_bitwise_conversion.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_composite.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_context_get_set.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_control_flow.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_convert.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_floating_point.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_image.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_instructions.h create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_integer.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_logical.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_memory.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_not_implemented.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_select.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_shared_memory.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_special.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_undefined.cpp create mode 100755 src/shader_recompiler/backend/glasm/emit_glasm_warp.cpp create mode 100755 src/shader_recompiler/backend/glasm/reg_alloc.cpp create mode 100755 src/shader_recompiler/backend/glasm/reg_alloc.h create mode 100755 src/shader_recompiler/backend/glsl/emit_context.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_context.h create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl.h create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_barriers.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_bitwise_conversion.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_composite.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_context_get_set.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_control_flow.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_convert.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_floating_point.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_image.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_instructions.h create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_integer.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_logical.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_select.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_shared_memory.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_special.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_undefined.cpp create mode 100755 src/shader_recompiler/backend/glsl/emit_glsl_warp.cpp create mode 100755 src/shader_recompiler/backend/glsl/var_alloc.cpp create mode 100755 src/shader_recompiler/backend/glsl/var_alloc.h create mode 100755 src/shader_recompiler/backend/spirv/emit_context.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_context.h create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv.h create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_barriers.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_bitwise_conversion.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_composite.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_context_get_set.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_control_flow.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_convert.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_floating_point.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_image.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_image_atomic.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_instructions.h create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_integer.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_logical.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_select.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_shared_memory.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_special.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_undefined.cpp create mode 100755 src/shader_recompiler/backend/spirv/emit_spirv_warp.cpp create mode 100755 src/shader_recompiler/environment.h create mode 100755 src/shader_recompiler/exception.h create mode 100755 src/shader_recompiler/frontend/ir/abstract_syntax_list.h create mode 100755 src/shader_recompiler/frontend/ir/attribute.cpp create mode 100755 src/shader_recompiler/frontend/ir/attribute.h create mode 100755 src/shader_recompiler/frontend/ir/basic_block.cpp create mode 100755 src/shader_recompiler/frontend/ir/basic_block.h create mode 100755 src/shader_recompiler/frontend/ir/breadth_first_search.h create mode 100755 src/shader_recompiler/frontend/ir/condition.cpp create mode 100755 src/shader_recompiler/frontend/ir/condition.h create mode 100755 src/shader_recompiler/frontend/ir/flow_test.cpp create mode 100755 src/shader_recompiler/frontend/ir/flow_test.h create mode 100755 src/shader_recompiler/frontend/ir/ir_emitter.cpp create mode 100755 src/shader_recompiler/frontend/ir/ir_emitter.h create mode 100755 src/shader_recompiler/frontend/ir/microinstruction.cpp create mode 100755 src/shader_recompiler/frontend/ir/modifiers.h create mode 100755 src/shader_recompiler/frontend/ir/opcodes.cpp create mode 100755 src/shader_recompiler/frontend/ir/opcodes.h create mode 100755 src/shader_recompiler/frontend/ir/opcodes.inc create mode 100755 src/shader_recompiler/frontend/ir/patch.cpp create mode 100755 src/shader_recompiler/frontend/ir/patch.h create mode 100755 src/shader_recompiler/frontend/ir/post_order.cpp create mode 100755 src/shader_recompiler/frontend/ir/post_order.h create mode 100755 src/shader_recompiler/frontend/ir/pred.h create mode 100755 src/shader_recompiler/frontend/ir/program.cpp create mode 100755 src/shader_recompiler/frontend/ir/program.h create mode 100755 src/shader_recompiler/frontend/ir/reg.h create mode 100755 src/shader_recompiler/frontend/ir/type.cpp create mode 100755 src/shader_recompiler/frontend/ir/type.h create mode 100755 src/shader_recompiler/frontend/ir/value.cpp create mode 100755 src/shader_recompiler/frontend/ir/value.h create mode 100755 src/shader_recompiler/frontend/maxwell/control_flow.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/control_flow.h create mode 100755 src/shader_recompiler/frontend/maxwell/decode.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/decode.h create mode 100755 src/shader_recompiler/frontend/maxwell/indirect_branch_table_track.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/indirect_branch_table_track.h create mode 100755 src/shader_recompiler/frontend/maxwell/instruction.h create mode 100755 src/shader_recompiler/frontend/maxwell/location.h create mode 100755 src/shader_recompiler/frontend/maxwell/maxwell.inc create mode 100755 src/shader_recompiler/frontend/maxwell/opcodes.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/opcodes.h create mode 100755 src/shader_recompiler/frontend/maxwell/structured_control_flow.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/structured_control_flow.h create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/atomic_operations_global_memory.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/atomic_operations_shared_memory.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/attribute_memory_to_physical.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/barrier_operations.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/bitfield_extract.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/bitfield_insert.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/branch_indirect.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/common_funcs.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/condition_code_set.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/double_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/double_compare_and_set.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/double_fused_multiply_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/double_min_max.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/double_multiply.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/double_set_predicate.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/exit_program.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/find_leading_one.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_compare.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_compare_and_set.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_conversion_floating_point.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_conversion_integer.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_fused_multiply_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_min_max.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_multi_function.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_multiply.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_range_reduction.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_set_predicate.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_swizzled_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_fused_multiply_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_multiply.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_set.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_set_predicate.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/impl.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/impl.h create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_add_three_input.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_compare.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_compare_and_set.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_floating_point_conversion.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_funnel_shift.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_minimum_maximum.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_popcount.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_scaled_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_set_predicate.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_shift_left.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_shift_right.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_short_multiply_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/integer_to_integer_conversion.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/internal_stage_buffer_entry_read.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/load_constant.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/load_constant.h create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/load_effective_address.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/load_store_attribute.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/load_store_local_shared.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/load_store_memory.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/logic_operation.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/logic_operation_three_input.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/move_predicate_to_register.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/move_register.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/move_register_to_predicate.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/move_special_register.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/not_implemented.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/output_geometry.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/pixel_load.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/predicate_set_predicate.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/predicate_set_register.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/select_source_with_predicate.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/surface_atomic_operations.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/surface_load_store.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/texture_fetch.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/texture_fetch_swizzled.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/texture_gather.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/texture_gather_swizzled.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/texture_gradient.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/texture_load.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/texture_load_swizzled.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/texture_mipmap_level.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/texture_query.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/video_helper.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/video_helper.h create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/video_minimum_maximum.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/video_multiply_add.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/video_set_predicate.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/vote.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/impl/warp_shuffle.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/translate.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate/translate.h create mode 100755 src/shader_recompiler/frontend/maxwell/translate_program.cpp create mode 100755 src/shader_recompiler/frontend/maxwell/translate_program.h create mode 100755 src/shader_recompiler/host_translate_info.h create mode 100755 src/shader_recompiler/ir_opt/collect_shader_info_pass.cpp create mode 100755 src/shader_recompiler/ir_opt/constant_propagation_pass.cpp create mode 100755 src/shader_recompiler/ir_opt/dead_code_elimination_pass.cpp create mode 100755 src/shader_recompiler/ir_opt/dual_vertex_pass.cpp create mode 100755 src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp create mode 100755 src/shader_recompiler/ir_opt/identity_removal_pass.cpp create mode 100755 src/shader_recompiler/ir_opt/lower_fp16_to_fp32.cpp create mode 100755 src/shader_recompiler/ir_opt/lower_int64_to_int32.cpp create mode 100755 src/shader_recompiler/ir_opt/passes.h create mode 100755 src/shader_recompiler/ir_opt/ssa_rewrite_pass.cpp create mode 100755 src/shader_recompiler/ir_opt/texture_pass.cpp create mode 100755 src/shader_recompiler/ir_opt/verification_pass.cpp create mode 100755 src/shader_recompiler/object_pool.h create mode 100755 src/shader_recompiler/profile.h create mode 100755 src/shader_recompiler/program_header.h create mode 100755 src/shader_recompiler/runtime_info.h create mode 100755 src/shader_recompiler/shader_info.h create mode 100755 src/shader_recompiler/stage.h create mode 100755 src/shader_recompiler/varying_state.h create mode 100755 src/video_core/renderer_opengl/gl_compute_pipeline.cpp create mode 100755 src/video_core/renderer_opengl/gl_compute_pipeline.h create mode 100755 src/video_core/renderer_opengl/gl_graphics_pipeline.cpp create mode 100755 src/video_core/renderer_opengl/gl_graphics_pipeline.h create mode 100755 src/video_core/renderer_opengl/gl_shader_context.h create mode 100755 src/video_core/renderer_vulkan/pipeline_helper.h create mode 100755 src/video_core/renderer_vulkan/vk_render_pass_cache.cpp create mode 100755 src/video_core/renderer_vulkan/vk_render_pass_cache.h create mode 100755 src/video_core/shader_cache.cpp create mode 100755 src/video_core/shader_environment.cpp create mode 100755 src/video_core/shader_environment.h create mode 100755 src/video_core/transform_feedback.cpp create mode 100755 src/video_core/transform_feedback.h diff --git a/CMakeModules/GenerateSCMRev.cmake b/CMakeModules/GenerateSCMRev.cmake index 311ba1c2e..77358768e 100755 --- a/CMakeModules/GenerateSCMRev.cmake +++ b/CMakeModules/GenerateSCMRev.cmake @@ -51,61 +51,7 @@ endif() # The variable SRC_DIR must be passed into the script (since it uses the current build directory for all values of CMAKE_*_DIR) set(VIDEO_CORE "${SRC_DIR}/src/video_core") set(HASH_FILES - "${VIDEO_CORE}/renderer_opengl/gl_arb_decompiler.cpp" - "${VIDEO_CORE}/renderer_opengl/gl_arb_decompiler.h" - "${VIDEO_CORE}/renderer_opengl/gl_shader_cache.cpp" - "${VIDEO_CORE}/renderer_opengl/gl_shader_cache.h" - "${VIDEO_CORE}/renderer_opengl/gl_shader_decompiler.cpp" - "${VIDEO_CORE}/renderer_opengl/gl_shader_decompiler.h" - "${VIDEO_CORE}/renderer_opengl/gl_shader_disk_cache.cpp" - "${VIDEO_CORE}/renderer_opengl/gl_shader_disk_cache.h" - "${VIDEO_CORE}/shader/decode/arithmetic.cpp" - "${VIDEO_CORE}/shader/decode/arithmetic_half.cpp" - "${VIDEO_CORE}/shader/decode/arithmetic_half_immediate.cpp" - "${VIDEO_CORE}/shader/decode/arithmetic_immediate.cpp" - "${VIDEO_CORE}/shader/decode/arithmetic_integer.cpp" - "${VIDEO_CORE}/shader/decode/arithmetic_integer_immediate.cpp" - "${VIDEO_CORE}/shader/decode/bfe.cpp" - "${VIDEO_CORE}/shader/decode/bfi.cpp" - "${VIDEO_CORE}/shader/decode/conversion.cpp" - "${VIDEO_CORE}/shader/decode/ffma.cpp" - "${VIDEO_CORE}/shader/decode/float_set.cpp" - "${VIDEO_CORE}/shader/decode/float_set_predicate.cpp" - "${VIDEO_CORE}/shader/decode/half_set.cpp" - "${VIDEO_CORE}/shader/decode/half_set_predicate.cpp" - "${VIDEO_CORE}/shader/decode/hfma2.cpp" - "${VIDEO_CORE}/shader/decode/image.cpp" - "${VIDEO_CORE}/shader/decode/integer_set.cpp" - "${VIDEO_CORE}/shader/decode/integer_set_predicate.cpp" - "${VIDEO_CORE}/shader/decode/memory.cpp" - "${VIDEO_CORE}/shader/decode/texture.cpp" - "${VIDEO_CORE}/shader/decode/other.cpp" - "${VIDEO_CORE}/shader/decode/predicate_set_predicate.cpp" - "${VIDEO_CORE}/shader/decode/predicate_set_register.cpp" - "${VIDEO_CORE}/shader/decode/register_set_predicate.cpp" - "${VIDEO_CORE}/shader/decode/shift.cpp" - "${VIDEO_CORE}/shader/decode/video.cpp" - "${VIDEO_CORE}/shader/decode/warp.cpp" - "${VIDEO_CORE}/shader/decode/xmad.cpp" - "${VIDEO_CORE}/shader/ast.cpp" - "${VIDEO_CORE}/shader/ast.h" - "${VIDEO_CORE}/shader/compiler_settings.cpp" - "${VIDEO_CORE}/shader/compiler_settings.h" - "${VIDEO_CORE}/shader/control_flow.cpp" - "${VIDEO_CORE}/shader/control_flow.h" - "${VIDEO_CORE}/shader/decode.cpp" - "${VIDEO_CORE}/shader/expr.cpp" - "${VIDEO_CORE}/shader/expr.h" - "${VIDEO_CORE}/shader/node.h" - "${VIDEO_CORE}/shader/node_helper.cpp" - "${VIDEO_CORE}/shader/node_helper.h" - "${VIDEO_CORE}/shader/registry.cpp" - "${VIDEO_CORE}/shader/registry.h" - "${VIDEO_CORE}/shader/shader_ir.cpp" - "${VIDEO_CORE}/shader/shader_ir.h" - "${VIDEO_CORE}/shader/track.cpp" - "${VIDEO_CORE}/shader/transform_feedback.cpp" - "${VIDEO_CORE}/shader/transform_feedback.h" + # ... ) set(COMBINED "") foreach (F IN LISTS HASH_FILES) diff --git a/README.md b/README.md index d78a96f01..c63f8608d 100755 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ yuzu emulator early access ============= -This is the source code for early-access 1865. +This is the source code for early-access 1866. ## Legal Notice diff --git a/externals/Vulkan-Headers/BUILD.gn b/externals/Vulkan-Headers/BUILD.gn index 67b9cf624..27a14df90 100755 --- a/externals/Vulkan-Headers/BUILD.gn +++ b/externals/Vulkan-Headers/BUILD.gn @@ -48,6 +48,7 @@ source_set("vulkan_headers") { "include/vulkan/vulkan.h", "include/vulkan/vulkan.hpp", "include/vulkan/vulkan_core.h", + "include/vulkan/vulkan_screen.h", ] public_configs = [ ":vulkan_headers_config" ] } diff --git a/externals/Vulkan-Headers/CMakeLists.txt b/externals/Vulkan-Headers/CMakeLists.txt index fc96c5e53..472dec6e6 100755 --- a/externals/Vulkan-Headers/CMakeLists.txt +++ b/externals/Vulkan-Headers/CMakeLists.txt @@ -46,6 +46,7 @@ target_include_directories(Vulkan-Registry INTERFACE "${CMAKE_CURRENT_SOURCE_DIR add_library(Vulkan::Registry ALIAS Vulkan-Registry) install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/vulkan" DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) +install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/vk_video" DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/registry" DESTINATION ${CMAKE_INSTALL_DATADIR}/vulkan) # uninstall target diff --git a/externals/Vulkan-Headers/README.md b/externals/Vulkan-Headers/README.md index 4687289a3..775c02759 100755 --- a/externals/Vulkan-Headers/README.md +++ b/externals/Vulkan-Headers/README.md @@ -4,23 +4,45 @@ Vulkan header files and API registry ## Repository Content -The contents of this repository are largely obtained from other repositories and are -collected, coordinated, and curated here. +The contents of this repository are largely obtained from other repositories +and are collected, coordinated, and curated here. -Do not propose pull requests to this repository which modify any files under -include/vulkan/ or registry/. All such files are generated from the -Vulkan-Docs repository and, in the case of include/vulkan/vulkan.hpp, the -Vulkan-Hpp repository. Any changes must be made in those repositories. +If proposing changes to any file originating from a different repository, +please propose such changes in that repository, rather than Vulkan-Headers. +Files in this repository originate from: -The projects for these repositories are: +### Specification repository (https://github.com/KhronosGroup/Vulkan-Docs) -- [KhronosGroup/Vulkan-Docs](https://github.com/KhronosGroup/Vulkan-Docs) - - Core Vulkan headers and Registry -- [KhronosGroup/Vulkan-Hpp](https://github.com/KhronosGroup/Vulkan-Hpp) - - C++ Bindings for Vulkan +* registry/cgenerator.py +* registry/conventions.py +* registry/generator.py +* registry/genvk.py +* registry/reg.py +* registry/spec_tools/util.py +* registry/validusage.json +* registry/vk.xml +* registry/vkconventions.py +* All files under include/vulkan/ which are *not* listed explicitly as originating from another repository. -Please visit the appropriate project in the above list for obtaining additional information, -asking questions, or opening issues. +### This repository (https://github.com/KhronosGroup/Vulkan-Headers) + +* .cmake-format.py +* BUILD.gn +* BUILD.md +* CMakeLists.txt +* CODE_OF_CONDUCT.md +* LICENSE.txt +* README.md +* cmake/Copyright_cmake.txt +* cmake/cmake_uninstall.cmake.in +* Non-API headers (report issues against @lenny-lunarg) + * include/vulkan/vk_icd.h + * include/vulkan/vk_layer.h + * include/vulkan/vk_sdk_platform.h + +### Vulkan C++ Binding Repository (https://github.com/KhronosGroup/Vulkan-Hpp) + +* include/vulkan/vulkan.hpp ## Version Tagging Scheme diff --git a/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std.h b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std.h new file mode 100755 index 000000000..f8c0cef49 --- /dev/null +++ b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std.h @@ -0,0 +1,299 @@ +/* +** Copyright (c) 2019-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +#ifndef VULKAN_VIDEO_CODEC_H264STD_H_ +#define VULKAN_VIDEO_CODEC_H264STD_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +#include "vk_video/vulkan_video_codecs_common.h" + +// Vulkan 0.9 provisional Vulkan video H.264 encode and decode std specification version number +#define VK_STD_VULKAN_VIDEO_CODEC_H264_API_VERSION_0_9 VK_MAKE_VIDEO_STD_VERSION(0, 9, 0) // Patch version should always be set to 0 + +// Format must be in the form XX.XX where the first two digits are the major and the second two, the minor. +#define VK_STD_VULKAN_VIDEO_CODEC_H264_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_API_VERSION_0_9 +#define VK_STD_VULKAN_VIDEO_CODEC_H264_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264" + +// ************************************************* +// Video H.264 common definitions: +// ************************************************* + +typedef enum StdVideoH264ChromaFormatIdc { + std_video_h264_chroma_format_idc_monochrome = 0, + std_video_h264_chroma_format_idc_420 = 1, + std_video_h264_chroma_format_idc_422 = 2, + std_video_h264_chroma_format_idc_444 = 3, +} StdVideoH264ChromaFormatIdc; + +typedef enum StdVideoH264ProfileIdc { + std_video_h264_profile_idc_baseline = 66, /* Only constrained baseline is supported */ + std_video_h264_profile_idc_main = 77, + std_video_h264_profile_idc_high = 100, + std_video_h264_profile_idc_high_444_predictive = 244, + std_video_h264_profile_idc_invalid = 0x7FFFFFFF +} StdVideoH264ProfileIdc; + +typedef enum StdVideoH264Level { + std_video_h264_level_1_0 = 0, + std_video_h264_level_1_1 = 1, + std_video_h264_level_1_2 = 2, + std_video_h264_level_1_3 = 3, + std_video_h264_level_2_0 = 4, + std_video_h264_level_2_1 = 5, + std_video_h264_level_2_2 = 6, + std_video_h264_level_3_0 = 7, + std_video_h264_level_3_1 = 8, + std_video_h264_level_3_2 = 9, + std_video_h264_level_4_0 = 10, + std_video_h264_level_4_1 = 11, + std_video_h264_level_4_2 = 12, + std_video_h264_level_5_0 = 13, + std_video_h264_level_5_1 = 14, + std_video_h264_level_5_2 = 15, + std_video_h264_level_6_0 = 16, + std_video_h264_level_6_1 = 17, + std_video_h264_level_6_2 = 18, + std_video_h264_level_invalid = 0x7FFFFFFF +} StdVideoH264Level; + +typedef enum StdVideoH264PocType { + std_video_h264_poc_type_0 = 0, + std_video_h264_poc_type_1 = 1, + std_video_h264_poc_type_2 = 2, + std_video_h264_poc_type_invalid = 0x7FFFFFFF +} StdVideoH264PocType; + +typedef enum StdVideoH264AspectRatioIdc { + std_video_h264_aspect_ratio_idc_unspecified = 0, + std_video_h264_aspect_ratio_idc_square = 1, + std_video_h264_aspect_ratio_idc_12_11 = 2, + std_video_h264_aspect_ratio_idc_10_11 = 3, + std_video_h264_aspect_ratio_idc_16_11 = 4, + std_video_h264_aspect_ratio_idc_40_33 = 5, + std_video_h264_aspect_ratio_idc_24_11 = 6, + std_video_h264_aspect_ratio_idc_20_11 = 7, + std_video_h264_aspect_ratio_idc_32_11 = 8, + std_video_h264_aspect_ratio_idc_80_33 = 9, + std_video_h264_aspect_ratio_idc_18_11 = 10, + std_video_h264_aspect_ratio_idc_15_11 = 11, + std_video_h264_aspect_ratio_idc_64_33 = 12, + std_video_h264_aspect_ratio_idc_160_99 = 13, + std_video_h264_aspect_ratio_idc_4_3 = 14, + std_video_h264_aspect_ratio_idc_3_2 = 15, + std_video_h264_aspect_ratio_idc_2_1 = 16, + std_video_h264_aspect_ratio_idc_extended_sar = 255, + std_video_h264_aspect_ratio_idc_invalid = 0x7FFFFFFF +} StdVideoH264AspectRatioIdc; + +typedef enum StdVideoH264WeightedBiPredIdc { + std_video_h264_default_weighted_b_slices_prediction_idc = 0, + std_video_h264_explicit_weighted_b_slices_prediction_idc = 1, + std_video_h264_implicit_weighted_b_slices_prediction_idc = 2, + std_video_h264_invalid_weighted_b_slices_prediction_idc = 0x7FFFFFFF +} StdVideoH264WeightedBiPredIdc; + +typedef enum StdVideoH264ModificationOfPicNumsIdc { + std_video_h264_modification_of_pic_nums_idc_short_term_subtract = 0, + std_video_h264_modification_of_pic_nums_idc_short_term_add = 1, + std_video_h264_modification_of_pic_nums_idc_long_term = 2, + std_video_h264_modification_of_pic_nums_idc_end = 3, + std_video_h264_modification_of_pic_nums_idc_invalid = 0x7FFFFFFF +} StdVideoH264ModificationOfPicNumsIdc; + +typedef enum StdVideoH264MemMgmtControlOp { + std_video_h264_mem_mgmt_control_op_end = 0, + std_video_h264_mem_mgmt_control_op_unmark_short_term = 1, + std_video_h264_mem_mgmt_control_op_unmark_long_term = 2, + std_video_h264_mem_mgmt_control_op_mark_long_term = 3, + std_video_h264_mem_mgmt_control_op_set_max_long_term_index = 4, + std_video_h264_mem_mgmt_control_op_unmark_all = 5, + std_video_h264_mem_mgmt_control_op_mark_current_as_long_term = 6, + std_video_h264_mem_mgmt_control_op_invalid = 0x7FFFFFFF +} StdVideoH264MemMgmtControlOp; + +typedef enum StdVideoH264CabacInitIdc { + std_video_h264_cabac_init_idc_0 = 0, + std_video_h264_cabac_init_idc_1 = 1, + std_video_h264_cabac_init_idc_2 = 2, + std_video_h264_cabac_init_idc_invalid = 0x7FFFFFFF +} StdVideoH264CabacInitIdc; + +typedef enum StdVideoH264DisableDeblockingFilterIdc { + std_video_h264_disable_deblocking_filter_idc_disabled = 0, + std_video_h264_disable_deblocking_filter_idc_enabled = 1, + std_video_h264_disable_deblocking_filter_idc_partial = 2, + std_video_h264_disable_deblocking_filter_idc_invalid = 0x7FFFFFFF +} StdVideoH264DisableDeblockingFilterIdc; + +typedef enum StdVideoH264PictureType { + std_video_h264_picture_type_i = 0, + std_video_h264_picture_type_p = 1, + std_video_h264_picture_type_b = 2, + std_video_h264_picture_type_invalid = 0x7FFFFFFF +} StdVideoH264PictureType; + +typedef enum StdVideoH264SliceType { + std_video_h264_slice_type_i = 0, + std_video_h264_slice_type_p = 1, + std_video_h264_slice_type_b = 2, + std_video_h264_slice_type_invalid = 0x7FFFFFFF +} StdVideoH264SliceType; + +typedef enum StdVideoH264NonVclNaluType { + std_video_h264_non_vcl_nalu_type_sps = 0, + std_video_h264_non_vcl_nalu_type_pps = 1, + std_video_h264_non_vcl_nalu_type_aud = 2, + std_video_h264_non_vcl_nalu_type_prefix = 3, + std_video_h264_non_vcl_nalu_type_end_of_sequence = 4, + std_video_h264_non_vcl_nalu_type_end_of_stream = 5, + std_video_h264_non_vcl_nalu_type_precoded = 6, + std_video_h264_non_vcl_nalu_type_invalid = 0x7FFFFFFF +} StdVideoH264NonVclNaluType; + +typedef struct StdVideoH264SpsVuiFlags { + uint32_t aspect_ratio_info_present_flag:1; + uint32_t overscan_info_present_flag:1; + uint32_t overscan_appropriate_flag:1; + uint32_t video_signal_type_present_flag:1; + uint32_t video_full_range_flag:1; + uint32_t color_description_present_flag:1; + uint32_t chroma_loc_info_present_flag:1; + uint32_t timing_info_present_flag:1; + uint32_t fixed_frame_rate_flag:1; + uint32_t bitstream_restriction_flag:1; + uint32_t nal_hrd_parameters_present_flag:1; + uint32_t vcl_hrd_parameters_present_flag:1; +} StdVideoH264SpsVuiFlags; + +typedef struct StdVideoH264HrdParameters { + uint8_t cpb_cnt_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint32_t bit_rate_value_minus1[32]; + uint32_t cpb_size_value_minus1[32]; + uint8_t cbr_flag[32]; + uint32_t initial_cpb_removal_delay_length_minus1; + uint32_t cpb_removal_delay_length_minus1; + uint32_t dpb_output_delay_length_minus1; + uint32_t time_offset_length; +} StdVideoH264HrdParameters; + +typedef struct StdVideoH264SequenceParameterSetVui { + StdVideoH264AspectRatioIdc aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + uint8_t video_format; + uint8_t color_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; + uint32_t num_units_in_tick; + uint32_t time_scale; + StdVideoH264HrdParameters hrd_parameters; + uint8_t num_reorder_frames; + uint8_t max_dec_frame_buffering; + StdVideoH264SpsVuiFlags flags; +} StdVideoH264SequenceParameterSetVui; + +typedef struct StdVideoH264SpsFlags { + uint32_t constraint_set0_flag:1; + uint32_t constraint_set1_flag:1; + uint32_t constraint_set2_flag:1; + uint32_t constraint_set3_flag:1; + uint32_t constraint_set4_flag:1; + uint32_t constraint_set5_flag:1; + uint32_t direct_8x8_inference_flag:1; + uint32_t mb_adaptive_frame_field_flag:1; + uint32_t frame_mbs_only_flag:1; + uint32_t delta_pic_order_always_zero_flag:1; + uint32_t residual_colour_transform_flag:1; + uint32_t gaps_in_frame_num_value_allowed_flag:1; + uint32_t first_picture_after_seek_flag:1; // where is this being documented? + uint32_t qpprime_y_zero_transform_bypass_flag:1; + uint32_t frame_cropping_flag:1; + uint32_t scaling_matrix_present_flag:1; + uint32_t vui_parameters_present_flag:1; +} StdVideoH264SpsFlags; + +typedef struct StdVideoH264ScalingLists +{ + // scaling_list_present_mask has one bit for each + // seq_scaling_list_present_flag[i] for SPS OR + // pic_scaling_list_present_flag[i] for PPS, + // bit 0 - 5 are for each entry of ScalingList4x4 + // bit 6 - 7 are for each entry plus 6 for ScalingList8x8 + uint8_t scaling_list_present_mask; + // use_default_scaling_matrix_mask has one bit for each + // UseDefaultScalingMatrix4x4Flag[ i ] and + // UseDefaultScalingMatrix8x8Flag[ i - 6 ] for SPS OR PPS + // bit 0 - 5 are for each entry of ScalingList4x4 + // bit 6 - 7 are for each entry plus 6 for ScalingList8x8 + uint8_t use_default_scaling_matrix_mask; + uint8_t ScalingList4x4[6][16]; + uint8_t ScalingList8x8[2][64]; +} StdVideoH264ScalingLists; + +typedef struct StdVideoH264SequenceParameterSet +{ + StdVideoH264ProfileIdc profile_idc; + StdVideoH264Level level_idc; + uint8_t seq_parameter_set_id; + StdVideoH264ChromaFormatIdc chroma_format_idc; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t log2_max_frame_num_minus4; + StdVideoH264PocType pic_order_cnt_type; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + int32_t offset_for_non_ref_pic; + int32_t offset_for_top_to_bottom_field; + uint8_t num_ref_frames_in_pic_order_cnt_cycle; + uint8_t max_num_ref_frames; + uint32_t pic_width_in_mbs_minus1; + uint32_t pic_height_in_map_units_minus1; + uint32_t frame_crop_left_offset; + uint32_t frame_crop_right_offset; + uint32_t frame_crop_top_offset; + uint32_t frame_crop_bottom_offset; + StdVideoH264SpsFlags flags; + int32_t offset_for_ref_frame[255]; // The number of valid values are defined by the num_ref_frames_in_pic_order_cnt_cycle + StdVideoH264ScalingLists* pScalingLists; // Must be a valid pointer if scaling_matrix_present_flag is set + StdVideoH264SequenceParameterSetVui* pSequenceParameterSetVui; // Must be a valid pointer if StdVideoH264SpsFlags:vui_parameters_present_flag is set +} StdVideoH264SequenceParameterSet; + +typedef struct StdVideoH264PpsFlags { + uint32_t transform_8x8_mode_flag:1; + uint32_t redundant_pic_cnt_present_flag:1; + uint32_t constrained_intra_pred_flag:1; + uint32_t deblocking_filter_control_present_flag:1; + uint32_t weighted_bipred_idc_flag:1; + uint32_t weighted_pred_flag:1; + uint32_t pic_order_present_flag:1; + uint32_t entropy_coding_mode_flag:1; + uint32_t scaling_matrix_present_flag:1; +} StdVideoH264PpsFlags; + +typedef struct StdVideoH264PictureParameterSet +{ + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + StdVideoH264WeightedBiPredIdc weighted_bipred_idc; + int8_t pic_init_qp_minus26; + int8_t pic_init_qs_minus26; + int8_t chroma_qp_index_offset; + int8_t second_chroma_qp_index_offset; + StdVideoH264PpsFlags flags; + StdVideoH264ScalingLists* pScalingLists; // Must be a valid pointer if StdVideoH264PpsFlags::scaling_matrix_present_flag is set. +} StdVideoH264PictureParameterSet; + +#ifdef __cplusplus +} +#endif + +#endif // VULKAN_VIDEO_CODEC_H264STD_H_ diff --git a/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std_decode.h b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std_decode.h new file mode 100755 index 000000000..2b4fc6469 --- /dev/null +++ b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std_decode.h @@ -0,0 +1,87 @@ +/* +** Copyright (c) 2019-2020 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +#ifndef VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +#include "vk_video/vulkan_video_codec_h264std.h" + +// ************************************************* +// Video H.264 Decode related parameters: +// ************************************************* + +typedef struct StdVideoDecodeH264PictureInfoFlags { + uint32_t field_pic_flag:1; // Is field picture + uint32_t is_intra:1; // Is intra picture + uint32_t bottom_field_flag:1; // bottom (true) or top (false) field if field_pic_flag is set. + uint32_t is_reference:1; // This only applies to picture info, and not to the DPB lists. + uint32_t complementary_field_pair:1; // complementary field pair, complementary non-reference field pair, complementary reference field pair +} StdVideoDecodeH264PictureInfoFlags; + +typedef struct StdVideoDecodeH264PictureInfo { + uint8_t seq_parameter_set_id; // Selecting SPS from the Picture Parameters + uint8_t pic_parameter_set_id; // Selecting PPS from the Picture Parameters and the SPS + uint16_t reserved; // for structure members 32-bit packing/alignment + uint16_t frame_num; // 7.4.3 Slice header semantics + uint16_t idr_pic_id; // 7.4.3 Slice header semantics + // PicOrderCnt is based on TopFieldOrderCnt and BottomFieldOrderCnt. See 8.2.1 Decoding process for picture order count type 0 - 2 + int32_t PicOrderCnt[2]; // TopFieldOrderCnt and BottomFieldOrderCnt fields. + StdVideoDecodeH264PictureInfoFlags flags; +} StdVideoDecodeH264PictureInfo; + +typedef struct StdVideoDecodeH264ReferenceInfoFlags { + uint32_t top_field_flag:1; // Reference is used for top field reference. + uint32_t bottom_field_flag:1; // Reference is used for bottom field reference. + uint32_t is_long_term:1; // this is a long term reference + uint32_t is_non_existing:1; // Must be handled in accordance with 8.2.5.2: Decoding process for gaps in frame_num +} StdVideoDecodeH264ReferenceInfoFlags; + +typedef struct StdVideoDecodeH264ReferenceInfo { + // FrameNum = is_long_term ? long_term_frame_idx : frame_num + uint16_t FrameNum; // 7.4.3.3 Decoded reference picture marking semantics + uint16_t reserved; // for structure members 32-bit packing/alignment + int32_t PicOrderCnt[2]; // TopFieldOrderCnt and BottomFieldOrderCnt fields. + StdVideoDecodeH264ReferenceInfoFlags flags; +} StdVideoDecodeH264ReferenceInfo; + +typedef struct StdVideoDecodeH264MvcElementFlags { + uint32_t non_idr:1; + uint32_t anchor_pic:1; + uint32_t inter_view:1; +} StdVideoDecodeH264MvcElementFlags; + +typedef struct StdVideoDecodeH264MvcElement { + StdVideoDecodeH264MvcElementFlags flags; + uint16_t viewOrderIndex; + uint16_t viewId; + uint16_t temporalId; // move out? + uint16_t priorityId; // move out? + uint16_t numOfAnchorRefsInL0; + uint16_t viewIdOfAnchorRefsInL0[15]; + uint16_t numOfAnchorRefsInL1; + uint16_t viewIdOfAnchorRefsInL1[15]; + uint16_t numOfNonAnchorRefsInL0; + uint16_t viewIdOfNonAnchorRefsInL0[15]; + uint16_t numOfNonAnchorRefsInL1; + uint16_t viewIdOfNonAnchorRefsInL1[15]; +} StdVideoDecodeH264MvcElement; + +typedef struct StdVideoDecodeH264Mvc { + uint32_t viewId0; + uint32_t mvcElementCount; + StdVideoDecodeH264MvcElement* pMvcElements; +} StdVideoDecodeH264Mvc; + + +#ifdef __cplusplus +} +#endif + +#endif // VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ diff --git a/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std_encode.h b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std_encode.h new file mode 100755 index 000000000..718456203 --- /dev/null +++ b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h264std_encode.h @@ -0,0 +1,94 @@ +/* +** Copyright (c) 2019-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +#ifndef VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +#include "vk_video/vulkan_video_codec_h264std.h" + +// ************************************************* +// Video H.264 Encode related parameters: +// ************************************************* + +typedef struct StdVideoEncodeH264SliceHeaderFlags { + uint32_t idr_flag:1; + uint32_t is_reference_flag:1; + uint32_t num_ref_idx_active_override_flag:1; + uint32_t no_output_of_prior_pics_flag:1; + uint32_t long_term_reference_flag:1; + uint32_t adaptive_ref_pic_marking_mode_flag:1; + uint32_t no_prior_references_available_flag:1; +} StdVideoEncodeH264SliceHeaderFlags; + +typedef struct StdVideoEncodeH264PictureInfoFlags { + uint32_t idr_flag:1; + uint32_t is_reference_flag:1; + uint32_t long_term_reference_flag:1; +} StdVideoEncodeH264PictureInfoFlags; + +typedef struct StdVideoEncodeH264RefMgmtFlags { + uint32_t ref_pic_list_modification_l0_flag:1; + uint32_t ref_pic_list_modification_l1_flag:1; +} StdVideoEncodeH264RefMgmtFlags; + +typedef struct StdVideoEncodeH264RefListModEntry { + StdVideoH264ModificationOfPicNumsIdc modification_of_pic_nums_idc; + uint16_t abs_diff_pic_num_minus1; + uint16_t long_term_pic_num; +} StdVideoEncodeH264RefListModEntry; + +typedef struct StdVideoEncodeH264RefPicMarkingEntry { + StdVideoH264MemMgmtControlOp operation; + uint16_t difference_of_pic_nums_minus1; + uint16_t long_term_pic_num; + uint16_t long_term_frame_idx; + uint16_t max_long_term_frame_idx_plus1; +} StdVideoEncodeH264RefPicMarkingEntry; + +typedef struct StdVideoEncodeH264RefMemMgmtCtrlOperations { + StdVideoEncodeH264RefMgmtFlags flags; + uint8_t refList0ModOpCount; + StdVideoEncodeH264RefListModEntry* pRefList0ModOperations; + uint8_t refList1ModOpCount; + StdVideoEncodeH264RefListModEntry* pRefList1ModOperations; + uint8_t refPicMarkingOpCount; + StdVideoEncodeH264RefPicMarkingEntry* pRefPicMarkingOperations; +} StdVideoEncodeH264RefMemMgmtCtrlOperations; + +typedef struct StdVideoEncodeH264PictureInfo { + StdVideoEncodeH264PictureInfoFlags flags; + StdVideoH264PictureType pictureType; + uint32_t frameNum; + uint32_t pictureOrderCount; + uint16_t long_term_pic_num; + uint16_t long_term_frame_idx; +} StdVideoEncodeH264PictureInfo; + +typedef struct StdVideoEncodeH264SliceHeader { + StdVideoEncodeH264SliceHeaderFlags flags; + StdVideoH264SliceType slice_type; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint16_t idr_pic_id; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; + StdVideoH264CabacInitIdc cabac_init_idc; + StdVideoH264DisableDeblockingFilterIdc disable_deblocking_filter_idc; + int8_t slice_alpha_c0_offset_div2; + int8_t slice_beta_offset_div2; + StdVideoEncodeH264RefMemMgmtCtrlOperations* pMemMgmtCtrlOperations; +} StdVideoEncodeH264SliceHeader; + + +#ifdef __cplusplus +} +#endif + +#endif // VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ diff --git a/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h265std.h b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h265std.h new file mode 100755 index 000000000..185b55041 --- /dev/null +++ b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h265std.h @@ -0,0 +1,341 @@ +/* +** Copyright (c) 2019-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +#ifndef VULKAN_VIDEO_CODEC_H265STD_H_ +#define VULKAN_VIDEO_CODEC_H265STD_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +#include "vk_video/vulkan_video_codecs_common.h" + +// Vulkan 0.5 version number WIP +#define VK_STD_VULKAN_VIDEO_CODEC_H265_API_VERSION_0_5 VK_MAKE_VIDEO_STD_VERSION(0, 5, 0) // Patch version should always be set to 0 + +// Format must be in the form XX.XX where the first two digits are the major and the second two, the minor. +#define VK_STD_VULKAN_VIDEO_CODEC_H265_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_API_VERSION_0_5 +#define VK_STD_VULKAN_VIDEO_CODEC_H265_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265" + +typedef enum StdVideoH265ChromaFormatIdc { + std_video_h265_chroma_format_idc_monochrome = 0, + std_video_h265_chroma_format_idc_420 = 1, + std_video_h265_chroma_format_idc_422 = 2, + std_video_h265_chroma_format_idc_444 = 3, +} StdVideoH265ChromaFormatIdc; + +typedef enum StdVideoH265ProfileIdc { + std_video_h265_profile_idc_main = 1, + std_video_h265_profile_idc_main_10 = 2, + std_video_h265_profile_idc_main_still_picture = 3, + std_video_h265_profile_idc_format_range_extensions = 4, + std_video_h265_profile_idc_scc_extensions = 9, + std_video_h265_profile_idc_invalid = 0x7FFFFFFF +} StdVideoH265ProfileIdc; + +typedef enum StdVideoH265Level { + std_video_h265_level_1_0 = 0, + std_video_h265_level_2_0 = 1, + std_video_h265_level_2_1 = 2, + std_video_h265_level_3_0 = 3, + std_video_h265_level_3_1 = 4, + std_video_h265_level_4_0 = 5, + std_video_h265_level_4_1 = 6, + std_video_h265_level_5_0 = 7, + std_video_h265_level_5_1 = 8, + std_video_h265_level_5_2 = 9, + std_video_h265_level_6_0 = 10, + std_video_h265_level_6_1 = 11, + std_video_h265_level_6_2 = 12, + std_video_h265_level_invalid = 0x7FFFFFFF +} StdVideoH265Level; + + +typedef struct StdVideoH265DecPicBufMgr +{ + uint32_t max_latency_increase_plus1[7]; + uint8_t max_dec_pic_buffering_minus1[7]; + uint8_t max_num_reorder_pics[7]; +} StdVideoH265DecPicBufMgr; + +typedef struct StdVideoH265SubLayerHrdParameters { + uint32_t bit_rate_value_minus1[32]; + uint32_t cpb_size_value_minus1[32]; + uint32_t cpb_size_du_value_minus1[32]; + uint32_t bit_rate_du_value_minus1[32]; + uint32_t cbr_flag; // each bit represents a range of CpbCounts (bit 0 - cpb_cnt_minus1) per sub-layer +} StdVideoH265SubLayerHrdParameters; + +typedef struct StdVideoH265HrdFlags { + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + uint32_t sub_pic_hrd_params_present_flag : 1; + uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1; + uint8_t fixed_pic_rate_general_flag; // each bit represents a sublayer, bit 0 - vps_max_sub_layers_minus1 + uint8_t fixed_pic_rate_within_cvs_flag; // each bit represents a sublayer, bit 0 - vps_max_sub_layers_minus1 + uint8_t low_delay_hrd_flag; // each bit represents a sublayer, bit 0 - vps_max_sub_layers_minus1 +} StdVideoH265HrdFlags; + +typedef struct StdVideoH265HrdParameters { + uint8_t tick_divisor_minus2; + uint8_t du_cpb_removal_delay_increment_length_minus1; + uint8_t dpb_output_delay_du_length_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint8_t cpb_size_du_scale; + uint8_t initial_cpb_removal_delay_length_minus1; + uint8_t au_cpb_removal_delay_length_minus1; + uint8_t dpb_output_delay_length_minus1; + uint8_t cpb_cnt_minus1[7]; + uint16_t elemental_duration_in_tc_minus1[7]; + StdVideoH265SubLayerHrdParameters* SubLayerHrdParametersNal[7]; + StdVideoH265SubLayerHrdParameters* SubLayerHrdParametersVcl[7]; + StdVideoH265HrdFlags flags; +} StdVideoH265HrdParameters; + +typedef struct StdVideoH265VpsFlags { + uint32_t vps_temporal_id_nesting_flag : 1; + uint32_t vps_sub_layer_ordering_info_present_flag : 1; + uint32_t vps_timing_info_present_flag : 1; + uint32_t vps_poc_proportional_to_timing_flag : 1; +} StdVideoH265VpsFlags; + +typedef struct StdVideoH265VideoParameterSet +{ + uint8_t vps_video_parameter_set_id; + uint8_t vps_max_sub_layers_minus1; + uint32_t vps_num_units_in_tick; + uint32_t vps_time_scale; + uint32_t vps_num_ticks_poc_diff_one_minus1; + StdVideoH265DecPicBufMgr* pDecPicBufMgr; + StdVideoH265HrdParameters* hrd_parameters; + StdVideoH265VpsFlags flags; +} StdVideoH265VideoParameterSet; + +typedef struct StdVideoH265ScalingLists +{ + uint8_t ScalingList4x4[6][16]; // ScalingList[ 0 ][ MatrixID ][ i ] (sizeID = 0) + uint8_t ScalingList8x8[6][64]; // ScalingList[ 1 ][ MatrixID ][ i ] (sizeID = 1) + uint8_t ScalingList16x16[6][64]; // ScalingList[ 2 ][ MatrixID ][ i ] (sizeID = 2) + uint8_t ScalingList32x32[2][64]; // ScalingList[ 3 ][ MatrixID ][ i ] (sizeID = 3) + uint8_t ScalingListDCCoef16x16[6]; // scaling_list_dc_coef_minus8[ sizeID - 2 ][ matrixID ] + 8, sizeID = 2 + uint8_t ScalingListDCCoef32x32[2]; // scaling_list_dc_coef_minus8[ sizeID - 2 ][ matrixID ] + 8. sizeID = 3 +} StdVideoH265ScalingLists; + +typedef struct StdVideoH265SpsVuiFlags { + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t colour_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t neutral_chroma_indication_flag : 1; + uint32_t field_seq_flag : 1; + uint32_t frame_field_info_present_flag : 1; + uint32_t default_display_window_flag : 1; + uint32_t vui_timing_info_present_flag : 1; + uint32_t vui_poc_proportional_to_timing_flag : 1; + uint32_t vui_hrd_parameters_present_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t tiles_fixed_structure_flag : 1; + uint32_t motion_vectors_over_pic_boundaries_flag : 1; + uint32_t restricted_ref_pic_lists_flag : 1; +} StdVideoH265SpsVuiFlags; + +typedef struct StdVideoH265SequenceParameterSetVui { + uint8_t aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + uint8_t video_format; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coeffs; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + uint16_t def_disp_win_left_offset; + uint16_t def_disp_win_right_offset; + uint16_t def_disp_win_top_offset; + uint16_t def_disp_win_bottom_offset; + uint32_t vui_num_units_in_tick; + uint32_t vui_time_scale; + uint32_t vui_num_ticks_poc_diff_one_minus1; + StdVideoH265HrdParameters* hrd_parameters; + uint16_t min_spatial_segmentation_idc; + uint8_t max_bytes_per_pic_denom; + uint8_t max_bits_per_min_cu_denom; + uint8_t log2_max_mv_length_horizontal; + uint8_t log2_max_mv_length_vertical; + StdVideoH265SpsVuiFlags flags; +} StdVideoH265SequenceParameterSetVui; + +typedef struct StdVideoH265PredictorPaletteEntries +{ + uint16_t PredictorPaletteEntries[3][128]; +} StdVideoH265PredictorPaletteEntries; + + +typedef struct StdVideoH265SpsFlags { + uint32_t sps_temporal_id_nesting_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t scaling_list_enabled_flag : 1; + uint32_t sps_scaling_list_data_present_flag : 1; + uint32_t amp_enabled_flag : 1; + uint32_t sample_adaptive_offset_enabled_flag : 1; + uint32_t pcm_enabled_flag : 1; + uint32_t pcm_loop_filter_disabled_flag : 1; + uint32_t long_term_ref_pics_present_flag : 1; + uint32_t sps_temporal_mvp_enabled_flag : 1; + uint32_t strong_intra_smoothing_enabled_flag : 1; + uint32_t vui_parameters_present_flag : 1; + uint32_t sps_extension_present_flag : 1; + uint32_t sps_range_extension_flag : 1; + + // extension SPS flags, valid when std_video_h265_profile_idc_format_range_extensions is set + uint32_t transform_skip_rotation_enabled_flag : 1; + uint32_t transform_skip_context_enabled_flag : 1; + uint32_t implicit_rdpcm_enabled_flag : 1; + uint32_t explicit_rdpcm_enabled_flag : 1; + uint32_t extended_precision_processing_flag : 1; + uint32_t intra_smoothing_disabled_flag : 1; + uint32_t high_precision_offsets_enabled_flag : 1; + uint32_t persistent_rice_adaptation_enabled_flag : 1; + uint32_t cabac_bypass_alignment_enabled_flag : 1; + + // extension SPS flags, valid when std_video_h265_profile_idc_scc_extensions is set + uint32_t sps_curr_pic_ref_enabled_flag : 1; + uint32_t palette_mode_enabled_flag : 1; + uint32_t sps_palette_predictor_initializer_present_flag : 1; + uint32_t intra_boundary_filtering_disabled_flag : 1; +} StdVideoH265SpsFlags; + +typedef struct StdVideoH265SequenceParameterSet +{ + StdVideoH265ProfileIdc profile_idc; + StdVideoH265Level level_idc; + uint32_t pic_width_in_luma_samples; + uint32_t pic_height_in_luma_samples; + uint8_t sps_video_parameter_set_id; + uint8_t sps_max_sub_layers_minus1; + uint8_t sps_seq_parameter_set_id; + uint8_t chroma_format_idc; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + uint8_t sps_max_dec_pic_buffering_minus1; + uint8_t log2_min_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_luma_coding_block_size; + uint8_t log2_min_luma_transform_block_size_minus2; + uint8_t log2_diff_max_min_luma_transform_block_size; + uint8_t max_transform_hierarchy_depth_inter; + uint8_t max_transform_hierarchy_depth_intra; + uint8_t num_short_term_ref_pic_sets; + uint8_t num_long_term_ref_pics_sps; + uint8_t pcm_sample_bit_depth_luma_minus1; + uint8_t pcm_sample_bit_depth_chroma_minus1; + uint8_t log2_min_pcm_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_pcm_luma_coding_block_size; + uint32_t conf_win_left_offset; + uint32_t conf_win_right_offset; + uint32_t conf_win_top_offset; + uint32_t conf_win_bottom_offset; + StdVideoH265DecPicBufMgr* pDecPicBufMgr; + StdVideoH265SpsFlags flags; + StdVideoH265ScalingLists* pScalingLists; // Must be a valid pointer if sps_scaling_list_data_present_flag is set + StdVideoH265SequenceParameterSetVui* pSequenceParameterSetVui; // Must be a valid pointer if StdVideoH265SpsFlags:vui_parameters_present_flag is set palette_max_size; + + // extension SPS flags, valid when std_video_h265_profile_idc_scc_extensions is set + uint8_t palette_max_size; + uint8_t delta_palette_max_predictor_size; + uint8_t motion_vector_resolution_control_idc; + uint8_t sps_num_palette_predictor_initializer_minus1; + StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries; // Must be a valid pointer if sps_palette_predictor_initializer_present_flag is set +} StdVideoH265SequenceParameterSet; + + +typedef struct StdVideoH265PpsFlags { + uint32_t dependent_slice_segments_enabled_flag : 1; + uint32_t output_flag_present_flag : 1; + uint32_t sign_data_hiding_enabled_flag : 1; + uint32_t cabac_init_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t transform_skip_enabled_flag : 1; + uint32_t cu_qp_delta_enabled_flag : 1; + uint32_t pps_slice_chroma_qp_offsets_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t weighted_bipred_flag : 1; + uint32_t transquant_bypass_enabled_flag : 1; + uint32_t tiles_enabled_flag : 1; + uint32_t entropy_coding_sync_enabled_flag : 1; + uint32_t uniform_spacing_flag : 1; + uint32_t loop_filter_across_tiles_enabled_flag : 1; + uint32_t pps_loop_filter_across_slices_enabled_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t deblocking_filter_override_enabled_flag : 1; + uint32_t pps_deblocking_filter_disabled_flag : 1; + uint32_t pps_scaling_list_data_present_flag : 1; + uint32_t lists_modification_present_flag : 1; + uint32_t slice_segment_header_extension_present_flag : 1; + uint32_t pps_extension_present_flag : 1; + + // extension PPS flags, valid when std_video_h265_profile_idc_format_range_extensions is set + uint32_t cross_component_prediction_enabled_flag : 1; + uint32_t chroma_qp_offset_list_enabled_flag : 1; + + // extension PPS flags, valid when std_video_h265_profile_idc_scc_extensions is set + uint32_t pps_curr_pic_ref_enabled_flag : 1; + uint32_t residual_adaptive_colour_transform_enabled_flag : 1; + uint32_t pps_slice_act_qp_offsets_present_flag : 1; + uint32_t pps_palette_predictor_initializer_present_flag : 1; + uint32_t monochrome_palette_flag : 1; + uint32_t pps_range_extension_flag : 1; +} StdVideoH265PpsFlags; + +typedef struct StdVideoH265PictureParameterSet +{ + uint8_t pps_pic_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t num_extra_slice_header_bits; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + int8_t init_qp_minus26; + uint8_t diff_cu_qp_delta_depth; + int8_t pps_cb_qp_offset; + int8_t pps_cr_qp_offset; + uint8_t num_tile_columns_minus1; + uint8_t num_tile_rows_minus1; + uint16_t column_width_minus1[19]; + uint16_t row_height_minus1[21]; + int8_t pps_beta_offset_div2; + int8_t pps_tc_offset_div2; + uint8_t log2_parallel_merge_level_minus2; + StdVideoH265PpsFlags flags; + StdVideoH265ScalingLists* pScalingLists; // Must be a valid pointer if pps_scaling_list_data_present_flag is set + + // extension PPS, valid when std_video_h265_profile_idc_format_range_extensions is set + uint8_t log2_max_transform_skip_block_size_minus2; + uint8_t diff_cu_chroma_qp_offset_depth; + uint8_t chroma_qp_offset_list_len_minus1; + int8_t cb_qp_offset_list[6]; + int8_t cr_qp_offset_list[6]; + uint8_t log2_sao_offset_scale_luma; + uint8_t log2_sao_offset_scale_chroma; + + // extension PPS, valid when std_video_h265_profile_idc_scc_extensions is set + int8_t pps_act_y_qp_offset_plus5; + int8_t pps_act_cb_qp_offset_plus5; + int8_t pps_act_cr_qp_offset_plus5; + uint8_t pps_num_palette_predictor_initializer; + uint8_t luma_bit_depth_entry_minus8; + uint8_t chroma_bit_depth_entry_minus8; + StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries; // Must be a valid pointer if pps_palette_predictor_initializer_present_flag is set +} StdVideoH265PictureParameterSet; + +#ifdef __cplusplus +} +#endif + +#endif // VULKAN_VIDEO_CODEC_H265STD_H_ diff --git a/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h265std_decode.h b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h265std_decode.h new file mode 100755 index 000000000..4be8b5f1c --- /dev/null +++ b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codec_h265std_decode.h @@ -0,0 +1,59 @@ +/* +** Copyright (c) 2019-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +#ifndef VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +#include "vk_video/vulkan_video_codec_h265std.h" + +// ************************************************* +// Video h265 Decode related parameters: +// ************************************************* + +typedef struct StdVideoDecodeH265PictureInfoFlags { + uint32_t IrapPicFlag : 1; + uint32_t IdrPicFlag : 1; + uint32_t IsReference : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; +} StdVideoDecodeH265PictureInfoFlags; + +typedef struct StdVideoDecodeH265PictureInfo { + uint8_t vps_video_parameter_set_id; + uint8_t sps_seq_parameter_set_id; + uint8_t pps_pic_parameter_set_id; + uint8_t num_short_term_ref_pic_sets; + int32_t PicOrderCntVal; + uint16_t NumBitsForSTRefPicSetInSlice; // number of bits used in st_ref_pic_set() + //when short_term_ref_pic_set_sps_flag is 0; otherwise set to 0. + uint8_t NumDeltaPocsOfRefRpsIdx; // NumDeltaPocs[ RefRpsIdx ] when short_term_ref_pic_set_sps_flag = 1, otherwise 0 + uint8_t RefPicSetStCurrBefore[8]; // slotIndex as used in VkVideoReferenceSlotKHR structures representing + //pReferenceSlots in VkVideoDecodeInfoKHR, 0xff for invalid slotIndex + uint8_t RefPicSetStCurrAfter[8]; // slotIndex as used in VkVideoReferenceSlotKHR structures representing + //pReferenceSlots in VkVideoDecodeInfoKHR, 0xff for invalid slotIndex + uint8_t RefPicSetLtCurr[8]; // slotIndex as used in VkVideoReferenceSlotKHR structures representing + //pReferenceSlots in VkVideoDecodeInfoKHR, 0xff for invalid slotIndex + StdVideoDecodeH265PictureInfoFlags flags; +} StdVideoDecodeH265PictureInfo; + +typedef struct StdVideoDecodeH265ReferenceInfoFlags { + uint32_t is_long_term : 1; + uint32_t is_non_existing : 1; +} StdVideoDecodeH265ReferenceInfoFlags; + +typedef struct StdVideoDecodeH265ReferenceInfo { + int32_t PicOrderCntVal; + StdVideoDecodeH265ReferenceInfoFlags flags; +} StdVideoDecodeH265ReferenceInfo; + +#ifdef __cplusplus +} +#endif + +#endif // VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ diff --git a/externals/Vulkan-Headers/include/vk_video/vulkan_video_codecs_common.h b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codecs_common.h new file mode 100755 index 000000000..8cc227a63 --- /dev/null +++ b/externals/Vulkan-Headers/include/vk_video/vulkan_video_codecs_common.h @@ -0,0 +1,21 @@ +/* +** Copyright (c) 2019-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +#ifndef VULKAN_VIDEO_CODEC_COMMON_H_ +#define VULKAN_VIDEO_CODEC_COMMON_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +#define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + +#ifdef __cplusplus +} +#endif + +#endif // VULKAN_VIDEO_CODEC_COMMON_H_ diff --git a/externals/Vulkan-Headers/include/vulkan/vk_icd.h b/externals/Vulkan-Headers/include/vulkan/vk_icd.h index 5dff59a16..ae006d06d 100755 --- a/externals/Vulkan-Headers/include/vulkan/vk_icd.h +++ b/externals/Vulkan-Headers/include/vulkan/vk_icd.h @@ -41,17 +41,45 @@ // that if the loader is older, it should automatically fail a // call for any API version > 1.0. Otherwise, the loader will // manually determine if it can support the expected version. -#define CURRENT_LOADER_ICD_INTERFACE_VERSION 5 +// Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices. +#define CURRENT_LOADER_ICD_INTERFACE_VERSION 6 #define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0 #define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4 -typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); +// Old typedefs that don't follow a proper naming convention but are preserved for compatibility +typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); // This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this // file directly, it won't be found. #ifndef PFN_GetPhysicalDeviceProcAddr typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName); #endif +// Typedefs for loader/ICD interface +typedef VkResult (VKAPI_PTR *PFN_vk_icdNegotiateLoaderICDInterfaceVersion)(uint32_t* pVersion); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) +typedef VkResult (VKAPI_PTR *PFN_vk_icdEnumerateAdapterPhysicalDevices)(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif + +// Prototypes for loader/ICD interface +#if !defined(VK_NO_PROTOTYPES) +#ifdef __cplusplus +extern "C" { +#endif + VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance isntance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) + VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif +#ifdef __cplusplus +} +#endif +#endif + /* * The ICD must reserve space for a pointer for the loader's dispatch * table, at the start of . @@ -91,6 +119,10 @@ typedef enum { VK_ICD_WSI_PLATFORM_DISPLAY, VK_ICD_WSI_PLATFORM_HEADLESS, VK_ICD_WSI_PLATFORM_METAL, + VK_ICD_WSI_PLATFORM_DIRECTFB, + VK_ICD_WSI_PLATFORM_VI, + VK_ICD_WSI_PLATFORM_GGP, + VK_ICD_WSI_PLATFORM_SCREEN, } VkIcdWsiPlatform; typedef struct { @@ -137,6 +169,14 @@ typedef struct { } VkIcdSurfaceXlib; #endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +typedef struct { + VkIcdSurfaceBase base; + IDirectFB *dfb; + IDirectFBSurface *surface; +} VkIcdSurfaceDirectFB; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT + #ifdef VK_USE_PLATFORM_ANDROID_KHR typedef struct { VkIcdSurfaceBase base; @@ -158,6 +198,13 @@ typedef struct { } VkIcdSurfaceIOS; #endif // VK_USE_PLATFORM_IOS_MVK +#ifdef VK_USE_PLATFORM_GGP +typedef struct { + VkIcdSurfaceBase base; + GgpStreamDescriptor streamDescriptor; +} VkIcdSurfaceGgp; +#endif // VK_USE_PLATFORM_GGP + typedef struct { VkIcdSurfaceBase base; VkDisplayModeKHR displayMode; @@ -180,4 +227,19 @@ typedef struct { } VkIcdSurfaceMetal; #endif // VK_USE_PLATFORM_METAL_EXT +#ifdef VK_USE_PLATFORM_VI_NN +typedef struct { + VkIcdSurfaceBase base; + void *window; +} VkIcdSurfaceVi; +#endif // VK_USE_PLATFORM_VI_NN + +#ifdef VK_USE_PLATFORM_SCREEN_QNX +typedef struct { + VkIcdSurfaceBase base; + struct _screen_context *context; + struct _screen_window *window; +} VkIcdSurfaceScreen; +#endif // VK_USE_PLATFORM_SCREEN_QNX + #endif // VKICD_H diff --git a/externals/Vulkan-Headers/include/vulkan/vk_layer.h b/externals/Vulkan-Headers/include/vulkan/vk_layer.h index fa7652008..0651870c7 100755 --- a/externals/Vulkan-Headers/include/vulkan/vk_layer.h +++ b/externals/Vulkan-Headers/include/vulkan/vk_layer.h @@ -83,7 +83,8 @@ typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device); typedef enum VkLayerFunction_ { VK_LAYER_LINK_INFO = 0, VK_LOADER_DATA_CALLBACK = 1, - VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2 + VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2, + VK_LOADER_FEATURES = 3, } VkLayerFunction; typedef struct VkLayerInstanceLink_ { @@ -111,6 +112,12 @@ typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device, typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA); typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction); + +typedef enum VkLoaderFeastureFlagBits { + VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING = 0x00000001, +} VkLoaderFlagBits; +typedef VkFlags VkLoaderFeatureFlags; + typedef struct { VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO const void *pNext; @@ -119,9 +126,10 @@ typedef struct { VkLayerInstanceLink *pLayerInfo; PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData; struct { - PFN_vkLayerCreateDevice pfnLayerCreateDevice; - PFN_vkLayerDestroyDevice pfnLayerDestroyDevice; - } layerDevice; + PFN_vkLayerCreateDevice pfnLayerCreateDevice; + PFN_vkLayerDestroyDevice pfnLayerDestroyDevice; + } layerDevice; + VkLoaderFeatureFlags loaderFeatures; } u; } VkLayerInstanceCreateInfo; diff --git a/externals/Vulkan-Headers/include/vulkan/vk_platform.h b/externals/Vulkan-Headers/include/vulkan/vk_platform.h index 048322d93..18b913abc 100755 --- a/externals/Vulkan-Headers/include/vulkan/vk_platform.h +++ b/externals/Vulkan-Headers/include/vulkan/vk_platform.h @@ -2,7 +2,7 @@ // File: vk_platform.h // /* -** Copyright (c) 2014-2020 The Khronos Group Inc. +** Copyright 2014-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -58,7 +58,9 @@ extern "C" #define VKAPI_PTR #endif -#include +#if !defined(VK_NO_STDDEF_H) + #include +#endif // !defined(VK_NO_STDDEF_H) #if !defined(VK_NO_STDINT_H) #if defined(_MSC_VER) && (_MSC_VER < 1600) diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan.h b/externals/Vulkan-Headers/include/vulkan/vulkan.h index cb6535863..b187c9c17 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan.h @@ -2,7 +2,7 @@ #define VULKAN_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -61,6 +61,12 @@ #endif +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +#include +#include "vulkan_directfb.h" +#endif + + #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT #include #include @@ -74,6 +80,12 @@ #endif +#ifdef VK_USE_PLATFORM_SCREEN_QNX +#include +#include "vulkan_screen.h" +#endif + + #ifdef VK_ENABLE_BETA_EXTENSIONS #include "vulkan_beta.h" #endif diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan.hpp b/externals/Vulkan-Headers/include/vulkan/vulkan.hpp index fd0d449af..54fae4fc2 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan.hpp +++ b/externals/Vulkan-Headers/include/vulkan/vulkan.hpp @@ -1,7 +1,7 @@ -// Copyright (c) 2015-2020 The Khronos Group Inc. -// +// Copyright 2015-2021 The Khronos Group Inc. +// // SPDX-License-Identifier: Apache-2.0 OR MIT -// +// // This header is generated from the Khronos Vulkan XML API Registry. @@ -14,16 +14,16 @@ # define VULKAN_HPP_CPLUSPLUS __cplusplus #endif -#if VULKAN_HPP_CPLUSPLUS < 201103L -static_assert( false, "vulkan.hpp needs at least c++ standard version 11" ); -#elif VULKAN_HPP_CPLUSPLUS < 201402L -# define VULKAN_HPP_CPP_VERSION 11 -#elif VULKAN_HPP_CPLUSPLUS < 201703L -# define VULKAN_HPP_CPP_VERSION 14 -#elif VULKAN_HPP_CPLUSPLUS < 202002L -# define VULKAN_HPP_CPP_VERSION 17 -#else +#if 201703L < VULKAN_HPP_CPLUSPLUS # define VULKAN_HPP_CPP_VERSION 20 +#elif 201402L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 17 +#elif 201103L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 14 +#elif 199711L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 11 +#else +# error "vulkan.hpp needs at least c++ standard version 11" #endif #include @@ -33,6 +33,7 @@ static_assert( false, "vulkan.hpp needs at least c++ standard version 11" ); #include #include #include +#include #include #include #include @@ -40,127 +41,149 @@ static_assert( false, "vulkan.hpp needs at least c++ standard version 11" ); #include #if 17 <= VULKAN_HPP_CPP_VERSION -#include +# include #endif -#if defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) -# if !defined(VULKAN_HPP_NO_SMART_HANDLE) -# define VULKAN_HPP_NO_SMART_HANDLE -# endif +#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# if !defined( VULKAN_HPP_NO_SMART_HANDLE ) +# define VULKAN_HPP_NO_SMART_HANDLE +# endif #else -# include -# include +# include +# include #endif -#if !defined(VULKAN_HPP_ASSERT) -# include -# define VULKAN_HPP_ASSERT assert +#if !defined( VULKAN_HPP_ASSERT ) +# include +# define VULKAN_HPP_ASSERT assert #endif -#if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL) -# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 +#if !defined( VULKAN_HPP_ASSERT_ON_RESULT ) +# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT +#endif + +#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) +# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 #endif #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1 -# if defined(__linux__) || defined(__APPLE__) -# include +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) +# include +# elif defined( _WIN32 ) +typedef struct HINSTANCE__ * HINSTANCE; +# if defined( _WIN64 ) +typedef int64_t( __stdcall * FARPROC )(); +# else +typedef int( __stdcall * FARPROC )(); +# endif +extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName ); +extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule ); +extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName ); # endif #endif -#if 201711 <= __cpp_impl_three_way_comparison -# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR -#endif -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) -# include +#if !defined( __has_include ) +# define __has_include( x ) false #endif +#if ( 201711 <= __cpp_impl_three_way_comparison ) && __has_include( ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR ) +# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR +#endif +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +# include +#endif -static_assert( VK_HEADER_VERSION == 145 , "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 180, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION -#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) -# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) -# define VULKAN_HPP_TYPESAFE_CONVERSION -# endif +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) +# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_CONVERSION +# endif #endif // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) // which is an expression in a constructor initializer list. -#if defined(major) - #undef major +#if defined( major ) +# undef major #endif -#if defined(minor) - #undef minor +#if defined( minor ) +# undef minor #endif // Windows defines MemoryBarrier which is deprecated and collides // with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct. -#if defined(MemoryBarrier) - #undef MemoryBarrier +#if defined( MemoryBarrier ) +# undef MemoryBarrier #endif -#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS) -# if defined(__clang__) -# if __has_feature(cxx_unrestricted_unions) -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS ) +# if defined( __clang__ ) +# if __has_feature( cxx_unrestricted_unions ) +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( __GNUC__ ) +# define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ ) +# if 40600 <= GCC_VERSION +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( _MSC_VER ) +# if 1900 <= _MSC_VER +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif # endif -# elif defined(__GNUC__) -# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) -# if 40600 <= GCC_VERSION -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS -# endif -# elif defined(_MSC_VER) -# if 1900 <= _MSC_VER -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS -# endif -# endif #endif -#if !defined(VULKAN_HPP_INLINE) -# if defined(__clang__) -# if __has_attribute(always_inline) -# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ +#if !defined( VULKAN_HPP_INLINE ) +# if defined( __clang__ ) +# if __has_attribute( always_inline ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# else +# define VULKAN_HPP_INLINE inline +# endif +# elif defined( __GNUC__ ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# elif defined( _MSC_VER ) +# define VULKAN_HPP_INLINE inline # else -# define VULKAN_HPP_INLINE inline +# define VULKAN_HPP_INLINE inline # endif -# elif defined(__GNUC__) -# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ -# elif defined(_MSC_VER) -# define VULKAN_HPP_INLINE inline -# else -# define VULKAN_HPP_INLINE inline -# endif #endif -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) -# define VULKAN_HPP_TYPESAFE_EXPLICIT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_EXPLICIT #else -# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit #endif -#if defined(__cpp_constexpr) -# define VULKAN_HPP_CONSTEXPR constexpr -# if __cpp_constexpr >= 201304 -# define VULKAN_HPP_CONSTEXPR_14 constexpr -# else +#if defined( __cpp_constexpr ) +# define VULKAN_HPP_CONSTEXPR constexpr +# if __cpp_constexpr >= 201304 +# define VULKAN_HPP_CONSTEXPR_14 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_14 +# endif +# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr +#else +# define VULKAN_HPP_CONSTEXPR # define VULKAN_HPP_CONSTEXPR_14 -# endif -# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr -#else -# define VULKAN_HPP_CONSTEXPR -# define VULKAN_HPP_CONSTEXPR_14 -# define VULKAN_HPP_CONST_OR_CONSTEXPR const +# define VULKAN_HPP_CONST_OR_CONSTEXPR const #endif -#if !defined(VULKAN_HPP_NOEXCEPT) -# if defined(_MSC_VER) && (_MSC_VER <= 1800) -# define VULKAN_HPP_NOEXCEPT -# else -# define VULKAN_HPP_NOEXCEPT noexcept -# define VULKAN_HPP_HAS_NOEXCEPT 1 -# endif +#if !defined( VULKAN_HPP_NOEXCEPT ) +# if defined( _MSC_VER ) && ( _MSC_VER <= 1800 ) +# define VULKAN_HPP_NOEXCEPT +# else +# define VULKAN_HPP_NOEXCEPT noexcept +# define VULKAN_HPP_HAS_NOEXCEPT 1 +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept +# else +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS +# endif +# endif #endif #if 14 <= VULKAN_HPP_CPP_VERSION @@ -169,122 +192,146 @@ static_assert( VK_HEADER_VERSION == 145 , "Wrong VK_HEADER_VERSION!" ); # define VULKAN_HPP_DEPRECATED( msg ) #endif -#if !defined(VULKAN_HPP_NAMESPACE) -#define VULKAN_HPP_NAMESPACE vk +#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS ) +# define VULKAN_HPP_NODISCARD [[nodiscard]] +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]] +# else +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +# endif +#else +# define VULKAN_HPP_NODISCARD +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS #endif -#define VULKAN_HPP_STRINGIFY2(text) #text -#define VULKAN_HPP_STRINGIFY(text) VULKAN_HPP_STRINGIFY2(text) -#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY(VULKAN_HPP_NAMESPACE) +#if !defined( VULKAN_HPP_NAMESPACE ) +# define VULKAN_HPP_NAMESPACE vk +#endif + +#define VULKAN_HPP_STRINGIFY2( text ) #text +#define VULKAN_HPP_STRINGIFY( text ) VULKAN_HPP_STRINGIFY2( text ) +#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE ) namespace VULKAN_HPP_NAMESPACE { - -#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template class ArrayProxy { public: VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT - : m_count(0) - , m_ptr(nullptr) + : m_count( 0 ) + , m_ptr( nullptr ) {} - VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t) VULKAN_HPP_NOEXCEPT - : m_count(0) - , m_ptr(nullptr) + VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) {} - ArrayProxy(T & value) VULKAN_HPP_NOEXCEPT - : m_count(1) - , m_ptr(&value) + ArrayProxy( T & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) {} - template::value, int>::type = 0> - ArrayProxy(typename std::remove_const::type & value) VULKAN_HPP_NOEXCEPT - : m_count(1) - , m_ptr(&value) + template ::value, int>::type = 0> + ArrayProxy( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) {} - ArrayProxy(uint32_t count, T * ptr) VULKAN_HPP_NOEXCEPT - : m_count(count) - , m_ptr(ptr) + ArrayProxy( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) {} - template::value, int>::type = 0> - ArrayProxy(uint32_t count, typename std::remove_const::type * ptr) VULKAN_HPP_NOEXCEPT - : m_count(count) - , m_ptr(ptr) + template ::value, int>::type = 0> + ArrayProxy( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) {} - ArrayProxy(std::initializer_list const& list) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(list.size())) - , m_ptr(list.begin()) +# if __GNUC__ >= 9 +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Winit-list-lifetime" +# endif + + ArrayProxy( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} - template::value, int>::type = 0> - ArrayProxy(std::initializer_list::type> const& list) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(list.size())) - , m_ptr(list.begin()) + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} - ArrayProxy(std::initializer_list & list) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(list.size())) - , m_ptr(list.begin()) + ArrayProxy( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} - template::value, int>::type = 0> - ArrayProxy(std::initializer_list::type> & list) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(list.size())) - , m_ptr(list.begin()) + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} +# if __GNUC__ >= 9 +# pragma GCC diagnostic pop +# endif + template - ArrayProxy(std::array const & data) VULKAN_HPP_NOEXCEPT - : m_count(N) - , m_ptr(data.data()) + ArrayProxy( std::array const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) {} template ::value, int>::type = 0> - ArrayProxy(std::array::type, N> const & data) VULKAN_HPP_NOEXCEPT - : m_count(N) - , m_ptr(data.data()) + ArrayProxy( std::array::type, N> const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) {} template - ArrayProxy(std::array & data) VULKAN_HPP_NOEXCEPT - : m_count(N) - , m_ptr(data.data()) + ArrayProxy( std::array & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) {} template ::value, int>::type = 0> - ArrayProxy(std::array::type, N> & data) VULKAN_HPP_NOEXCEPT - : m_count(N) - , m_ptr(data.data()) + ArrayProxy( std::array::type, N> & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) {} template ::type>> - ArrayProxy(std::vector const & data) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(data.size())) - , m_ptr(data.data()) + ArrayProxy( std::vector const & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) {} - template ::type>, typename B = T, typename std::enable_if::value, int>::type = 0> - ArrayProxy(std::vector::type, Allocator> const& data) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(data.size())) - , m_ptr(data.data()) + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxy( std::vector::type, Allocator> const & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) {} template ::type>> - ArrayProxy(std::vector & data) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(data.size())) - , m_ptr(data.data()) + ArrayProxy( std::vector & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) {} - template ::type>, typename B = T, typename std::enable_if::value, int>::type = 0> - ArrayProxy(std::vector::type, Allocator> & data) VULKAN_HPP_NOEXCEPT - : m_count(static_cast(data.size())) - , m_ptr(data.data()) + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxy( std::vector::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) {} const T * begin() const VULKAN_HPP_NOEXCEPT @@ -299,19 +346,19 @@ namespace VULKAN_HPP_NAMESPACE const T & front() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(m_count && m_ptr); + VULKAN_HPP_ASSERT( m_count && m_ptr ); return *m_ptr; } const T & back() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(m_count && m_ptr); - return *(m_ptr + m_count - 1); + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); } bool empty() const VULKAN_HPP_NOEXCEPT { - return (m_count == 0); + return ( m_count == 0 ); } uint32_t size() const VULKAN_HPP_NOEXCEPT @@ -325,54 +372,248 @@ namespace VULKAN_HPP_NAMESPACE } private: - uint32_t m_count; - T * m_ptr; + uint32_t m_count; + T * m_ptr; + }; + + template + class ArrayProxyNoTemporaries + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + ArrayProxyNoTemporaries( T && value ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type && value ) = delete; + + ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + ArrayProxyNoTemporaries( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + ArrayProxyNoTemporaries( std::initializer_list const && list ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> const & list ) + VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> const && list ) = delete; + + ArrayProxyNoTemporaries( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + ArrayProxyNoTemporaries( std::initializer_list && list ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> && list ) = delete; + + template + ArrayProxyNoTemporaries( std::array const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template + ArrayProxyNoTemporaries( std::array const && data ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::array::type, N> const & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::array::type, N> const && data ) = delete; + + template + ArrayProxyNoTemporaries( std::array & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template + ArrayProxyNoTemporaries( std::array && data ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::array::type, N> & data ) VULKAN_HPP_NOEXCEPT + : m_count( N ) + , m_ptr( data.data() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::array::type, N> && data ) = delete; + + template ::type>> + ArrayProxyNoTemporaries( std::vector const & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>> + ArrayProxyNoTemporaries( std::vector const && data ) = delete; + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxyNoTemporaries( std::vector::type, Allocator> const & data ) + VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxyNoTemporaries( std::vector::type, Allocator> const && data ) = delete; + + template ::type>> + ArrayProxyNoTemporaries( std::vector & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>> + ArrayProxyNoTemporaries( std::vector && data ) = delete; + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxyNoTemporaries( std::vector::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( data.size() ) ) + , m_ptr( data.data() ) + {} + + template ::type>, + typename B = T, + typename std::enable_if::value, int>::type = 0> + ArrayProxyNoTemporaries( std::vector::type, Allocator> && data ) = delete; + + const T * begin() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return m_ptr + m_count; + } + + const T & front() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *m_ptr; + } + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); + } + + bool empty() const VULKAN_HPP_NOEXCEPT + { + return ( m_count == 0 ); + } + + uint32_t size() const VULKAN_HPP_NOEXCEPT + { + return m_count; + } + + T * data() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + private: + uint32_t m_count; + T * m_ptr; }; #endif template - class ArrayWrapper1D : public std::array + class ArrayWrapper1D : public std::array { public: - VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT - : std::array() + VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT : std::array() {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array const & data ) VULKAN_HPP_NOEXCEPT : std::array( data ) {} - VULKAN_HPP_CONSTEXPR ArrayWrapper1D(std::array const& data) VULKAN_HPP_NOEXCEPT - : std::array(data) - {} - -#if defined(_WIN32) && !defined(_WIN64) - VULKAN_HPP_CONSTEXPR T const& operator[](int index) const VULKAN_HPP_NOEXCEPT +#if defined( _WIN32 ) && !defined( _WIN64 ) + VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT { - return std::array::operator[](index); + return std::array::operator[]( index ); } - VULKAN_HPP_CONSTEXPR T & operator[](int index) VULKAN_HPP_NOEXCEPT + T & operator[]( int index ) VULKAN_HPP_NOEXCEPT { - return std::array::operator[](index); + return std::array::operator[]( index ); } #endif - operator T const* () const VULKAN_HPP_NOEXCEPT + operator T const *() const VULKAN_HPP_NOEXCEPT { return this->data(); } - operator T * () VULKAN_HPP_NOEXCEPT + operator T *() VULKAN_HPP_NOEXCEPT { return this->data(); } template ::value, int>::type = 0> - operator std::string const () const VULKAN_HPP_NOEXCEPT + operator std::string() const { return std::string( this->data() ); } #if 17 <= VULKAN_HPP_CPP_VERSION template ::value, int>::type = 0> - operator std::string_view const () const VULKAN_HPP_NOEXCEPT + operator std::string_view() const { return std::string_view( this->data() ); } @@ -417,57 +658,59 @@ namespace VULKAN_HPP_NAMESPACE // specialization of relational operators between std::string and arrays of chars template - bool operator<(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator<( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs < rhs.data(); } template - bool operator<=(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator<=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs <= rhs.data(); } template - bool operator>(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator>( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs > rhs.data(); } template - bool operator>=(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator>=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs >= rhs.data(); } template - bool operator==(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator==( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs == rhs.data(); } template - bool operator!=(std::string const& lhs, ArrayWrapper1D const& rhs) VULKAN_HPP_NOEXCEPT + bool operator!=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return lhs != rhs.data(); } template - class ArrayWrapper2D : public std::array,N> + class ArrayWrapper2D : public std::array, N> { public: - VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT - : std::array, N>() - {} + VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT : std::array, N>() {} - VULKAN_HPP_CONSTEXPR ArrayWrapper2D(std::array,N> const& data) VULKAN_HPP_NOEXCEPT - : std::array, N>(*reinterpret_cast,N> const*>(&data)) + VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array, N> const & data ) VULKAN_HPP_NOEXCEPT + : std::array, N>( *reinterpret_cast, N> const *>( &data ) ) {} }; - template struct FlagTraits + template + struct FlagTraits { - enum { allFlags = 0 }; + enum + { + allFlags = 0 + }; }; template @@ -477,52 +720,44 @@ namespace VULKAN_HPP_NAMESPACE using MaskType = typename std::underlying_type::type; // constructors - VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT - : m_mask(0) - {} + VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask( 0 ) {} - VULKAN_HPP_CONSTEXPR Flags(BitType bit) VULKAN_HPP_NOEXCEPT - : m_mask(static_cast(bit)) - {} + VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT : m_mask( static_cast( bit ) ) {} - VULKAN_HPP_CONSTEXPR Flags(Flags const& rhs) VULKAN_HPP_NOEXCEPT - : m_mask(rhs.m_mask) - {} + VULKAN_HPP_CONSTEXPR Flags( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR explicit Flags(MaskType flags) VULKAN_HPP_NOEXCEPT - : m_mask(flags) - {} + VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT : m_mask( flags ) {} // relational operators -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>(Flags const&) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Flags const & ) const = default; #else - VULKAN_HPP_CONSTEXPR bool operator<(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator<( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask < rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator<=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator<=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask <= rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator>(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator>( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask > rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator>=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator>=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask >= rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator==(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator==( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask == rhs.m_mask; } - VULKAN_HPP_CONSTEXPR bool operator!=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator!=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_mask != rhs.m_mask; } @@ -535,46 +770,42 @@ namespace VULKAN_HPP_NAMESPACE } // bitwise operators - VULKAN_HPP_CONSTEXPR Flags operator&(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator&( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask & rhs.m_mask); + return Flags( m_mask & rhs.m_mask ); } - VULKAN_HPP_CONSTEXPR Flags operator|(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator|( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask | rhs.m_mask); + return Flags( m_mask | rhs.m_mask ); } - VULKAN_HPP_CONSTEXPR Flags operator^(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator^( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask ^ rhs.m_mask); + return Flags( m_mask ^ rhs.m_mask ); } VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT { - return Flags(m_mask ^ FlagTraits::allFlags); + return Flags( m_mask ^ FlagTraits::allFlags ); } // assignment operators - VULKAN_HPP_CONSTEXPR_14 Flags & operator=(Flags const& rhs) VULKAN_HPP_NOEXCEPT - { - m_mask = rhs.m_mask; - return *this; - } + VULKAN_HPP_CONSTEXPR_14 Flags & operator=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 Flags & operator|=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Flags & operator|=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT { m_mask |= rhs.m_mask; return *this; } - VULKAN_HPP_CONSTEXPR_14 Flags & operator&=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Flags & operator&=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT { m_mask &= rhs.m_mask; return *this; } - VULKAN_HPP_CONSTEXPR_14 Flags & operator^=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Flags & operator^=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT { m_mask ^= rhs.m_mask; return *this; @@ -588,96 +819,125 @@ namespace VULKAN_HPP_NAMESPACE explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT { - return m_mask; + return m_mask; } +#if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC ) + public: +#else private: - MaskType m_mask; +#endif + MaskType m_mask; }; -#if !defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) +#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) // relational operators only needed for pre C++20 template - VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - return flags > bit; + return flags.operator>( bit ); } template - VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - return flags >= bit; + return flags.operator>=( bit ); } template - VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - return flags < bit; + return flags.operator<( bit ); } template - VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - return flags <= bit; + return flags.operator<=( bit ); } template - VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - return flags == bit; + return flags.operator==( bit ); } template - VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - return flags != bit; + return flags.operator!=( bit ); } #endif // bitwise operators template - VULKAN_HPP_CONSTEXPR Flags operator&(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator&(BitType bit, Flags const & flags)VULKAN_HPP_NOEXCEPT { - return flags & bit; + return flags.operator&( bit ); } template - VULKAN_HPP_CONSTEXPR Flags operator|(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator|( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - return flags | bit; + return flags.operator|( bit ); } template - VULKAN_HPP_CONSTEXPR Flags operator^(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR Flags operator^( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - return flags ^ bit; + return flags.operator^( bit ); } template class Optional { public: - Optional(RefType & reference) VULKAN_HPP_NOEXCEPT { m_ptr = &reference; } - Optional(RefType * ptr) VULKAN_HPP_NOEXCEPT { m_ptr = ptr; } - Optional(std::nullptr_t) VULKAN_HPP_NOEXCEPT { m_ptr = nullptr; } + Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT + { + m_ptr = &reference; + } + Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT + { + m_ptr = ptr; + } + Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_ptr = nullptr; + } - operator RefType*() const VULKAN_HPP_NOEXCEPT { return m_ptr; } - RefType const* operator->() const VULKAN_HPP_NOEXCEPT { return m_ptr; } - explicit operator bool() const VULKAN_HPP_NOEXCEPT { return !!m_ptr; } + operator RefType *() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + RefType const * operator->() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return !!m_ptr; + } private: - RefType *m_ptr; + RefType * m_ptr; }; - template struct StructExtends { enum { value = false }; }; + template + struct StructExtends + { + enum + { + value = false + }; + }; - template + template struct IsPartOfStructureChain { static const bool valid = false; }; - template + template struct IsPartOfStructureChain { static const bool valid = std::is_same::value || IsPartOfStructureChain::valid; @@ -686,14 +946,16 @@ namespace VULKAN_HPP_NAMESPACE template struct StructureChainContains { - static const bool value = std::is_same>::type>::value || - StructureChainContains::value; + static const bool value = + std::is_same>::type>::value || + StructureChainContains::value; }; template struct StructureChainContains<0, T, ChainElements...> { - static const bool value = std::is_same>::type>::value; + static const bool value = + std::is_same>::type>::value; }; template @@ -754,16 +1016,18 @@ namespace VULKAN_HPP_NAMESPACE StructureChain & operator=( StructureChain && rhs ) = delete; - template + template >::type, size_t Which = 0> T & get() VULKAN_HPP_NOEXCEPT { - return std::get::value>( *this ); + return std::get::value>( + static_cast &>( *this ) ); } - template + template >::type, size_t Which = 0> T const & get() const VULKAN_HPP_NOEXCEPT { - return std::get::value>( *this ); + return std::get::value>( + static_cast const &>( *this ) ); } template @@ -779,31 +1043,53 @@ namespace VULKAN_HPP_NAMESPACE } template - void relink() VULKAN_HPP_NOEXCEPT + typename std::enable_if< + std::is_same>::type>::value && + ( Which == 0 ), + bool>::type + isLinked() const VULKAN_HPP_NOEXCEPT + { + return true; + } + + template + typename std::enable_if< + !std::is_same>::type>::value || + ( Which != 0 ), + bool>::type + isLinked() const VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, + "Can't unlink Structure that's not part of this StructureChain!" ); + return isLinked( reinterpret_cast( &get() ) ); + } + + template + typename std::enable_if< + !std::is_same>::type>::value || + ( Which != 0 ), + void>::type + relink() VULKAN_HPP_NOEXCEPT { static_assert( IsPartOfStructureChain::valid, "Can't relink Structure that's not part of this StructureChain!" ); - static_assert( - !std::is_same>::type>::value || (Which != 0), - "It's not allowed to have the first element unlinked!" ); - auto pNext = reinterpret_cast( &get() ); VULKAN_HPP_ASSERT( !isLinked( pNext ) ); - auto & headElement = std::get<0>( *this ); - pNext->pNext = reinterpret_cast(headElement.pNext); + auto & headElement = std::get<0>( static_cast &>( *this ) ); + pNext->pNext = reinterpret_cast( headElement.pNext ); headElement.pNext = pNext; } template - void unlink() VULKAN_HPP_NOEXCEPT + typename std::enable_if< + !std::is_same>::type>::value || + ( Which != 0 ), + void>::type + unlink() VULKAN_HPP_NOEXCEPT { static_assert( IsPartOfStructureChain::valid, "Can't unlink Structure that's not part of this StructureChain!" ); - static_assert( - !std::is_same>::type>::value || (Which != 0), - "It's not allowed to unlink the first element!" ); - - unlink( reinterpret_cast( &get() ) ); + unlink( reinterpret_cast( &get() ) ); } private: @@ -838,9 +1124,10 @@ namespace VULKAN_HPP_NAMESPACE Types...> : std::integral_constant {}; - bool isLinked( VkBaseInStructure const * pNext ) + bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT { - VkBaseInStructure const * elementPtr = reinterpret_cast(&std::get<0>( *this )); + VkBaseInStructure const * elementPtr = reinterpret_cast( + &std::get<0>( static_cast const &>( *this ) ) ); while ( elementPtr ) { if ( elementPtr->pNext == pNext ) @@ -855,8 +1142,8 @@ namespace VULKAN_HPP_NAMESPACE template typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT { - auto & x = std::get( *this ); - x.pNext = &std::get( *this ); + auto & x = std::get( static_cast &>( *this ) ); + x.pNext = &std::get( static_cast &>( *this ) ); link(); } @@ -864,27 +1151,17 @@ namespace VULKAN_HPP_NAMESPACE typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT {} - template - typename std::enable_if::type unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT + void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT { - auto & element = std::get( *this ); - if ( element.pNext == pNext ) + VkBaseOutStructure * elementPtr = + reinterpret_cast( &std::get<0>( static_cast &>( *this ) ) ); + while ( elementPtr && ( elementPtr->pNext != pNext ) ) { - element.pNext = pNext->pNext; + elementPtr = elementPtr->pNext; } - else + if ( elementPtr ) { - unlink( pNext ); - } - } - - template - typename std::enable_if::type unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT - { - auto & element = std::get<0>( *this ); - if ( element.pNext == pNext ) - { - element.pNext = pNext->pNext; + elementPtr->pNext = pNext->pNext; } else { @@ -893,46 +1170,47 @@ namespace VULKAN_HPP_NAMESPACE } }; -#if !defined(VULKAN_HPP_NO_SMART_HANDLE) - template class UniqueHandleTraits; +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + class UniqueHandleTraits; template - class UniqueHandle : public UniqueHandleTraits::deleter + class UniqueHandle : public UniqueHandleTraits::deleter { private: - using Deleter = typename UniqueHandleTraits::deleter; + using Deleter = typename UniqueHandleTraits::deleter; public: using element_type = Type; - UniqueHandle() - : Deleter() - , m_value() - {} + UniqueHandle() : Deleter(), m_value() {} - explicit UniqueHandle( Type const& value, Deleter const& deleter = Deleter() ) VULKAN_HPP_NOEXCEPT - : Deleter( deleter) + explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT + : Deleter( deleter ) , m_value( value ) {} - UniqueHandle( UniqueHandle const& ) = delete; + UniqueHandle( UniqueHandle const & ) = delete; UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT - : Deleter( std::move( static_cast( other ) ) ) + : Deleter( std::move( static_cast( other ) ) ) , m_value( other.release() ) {} ~UniqueHandle() VULKAN_HPP_NOEXCEPT { - if ( m_value ) this->destroy( m_value ); + if ( m_value ) + { + this->destroy( m_value ); + } } - UniqueHandle & operator=( UniqueHandle const& ) = delete; + UniqueHandle & operator=( UniqueHandle const & ) = delete; UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT { reset( other.release() ); - *static_cast(this) = std::move( static_cast(other) ); + *static_cast( this ) = std::move( static_cast( other ) ); return *this; } @@ -941,7 +1219,7 @@ namespace VULKAN_HPP_NAMESPACE return m_value.operator bool(); } - Type const* operator->() const VULKAN_HPP_NOEXCEPT + Type const * operator->() const VULKAN_HPP_NOEXCEPT { return &m_value; } @@ -951,7 +1229,7 @@ namespace VULKAN_HPP_NAMESPACE return &m_value; } - Type const& operator*() const VULKAN_HPP_NOEXCEPT + Type const & operator*() const VULKAN_HPP_NOEXCEPT { return m_value; } @@ -971,11 +1249,14 @@ namespace VULKAN_HPP_NAMESPACE return m_value; } - void reset( Type const& value = Type() ) VULKAN_HPP_NOEXCEPT + void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT { if ( m_value != value ) { - if ( m_value ) this->destroy( m_value ); + if ( m_value ) + { + this->destroy( m_value ); + } m_value = value; } } @@ -983,235 +1264,1769 @@ namespace VULKAN_HPP_NAMESPACE Type release() VULKAN_HPP_NOEXCEPT { Type value = m_value; - m_value = nullptr; + m_value = nullptr; return value; } - void swap( UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + void swap( UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT { - std::swap(m_value, rhs.m_value); - std::swap(static_cast(*this), static_cast(rhs)); + std::swap( m_value, rhs.m_value ); + std::swap( static_cast( *this ), static_cast( rhs ) ); } private: - Type m_value; + Type m_value; }; template - VULKAN_HPP_INLINE std::vector uniqueToRaw(std::vector const& handles) + VULKAN_HPP_INLINE std::vector + uniqueToRaw( std::vector const & handles ) { - std::vector newBuffer(handles.size()); - std::transform(handles.begin(), handles.end(), newBuffer.begin(), [](UniqueType const& handle) { return handle.get(); }); + std::vector newBuffer( handles.size() ); + std::transform( + handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } ); return newBuffer; } template - VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, + UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT { lhs.swap( rhs ); } #endif -#if !defined(VK_NO_PROTOTYPES) +#if !defined( VK_NO_PROTOTYPES ) class DispatchLoaderStatic { public: - VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const VULKAN_HPP_NOEXCEPT + //=== VK_VERSION_1_0 === + + VkResult vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); } - VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyInstance( instance, pAllocator ); + } + + VkResult vkEnumeratePhysicalDevices( VkInstance instance, + uint32_t * pPhysicalDeviceCount, + VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); + } + + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties( + physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); + } + + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetInstanceProcAddr( instance, pName ); + } + + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceProcAddr( device, pName ); + } + + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); + } + + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDevice( device, pAllocator ); + } + + VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); } - VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, + const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); } - VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkEnumerateInstanceVersion( pApiVersion ); + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const VULKAN_HPP_NOEXCEPT + void vkGetDeviceQueue( VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); + } + + VkResult vkQueueSubmit( VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo * pSubmits, + VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); + } + + VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueWaitIdle( queue ); + } + + VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeviceWaitIdle( device ); + } + + VkResult vkAllocateMemory( VkDevice device, + const VkMemoryAllocateInfo * pAllocateInfo, + const VkAllocationCallbacks * pAllocator, + VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); + } + + void vkFreeMemory( VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeMemory( device, memory, pAllocator ); + } + + VkResult vkMapMemory( VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData ); + } + + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory( device, memory ); + } + + VkResult vkFlushMappedMemoryRanges( VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + void vkGetDeviceMemoryCommitment( VkDevice device, + VkDeviceMemory memory, + VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); + } + + VkResult vkBindBufferMemory( VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); + } + + VkResult vkBindImageMemory( VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory( device, image, memory, memoryOffset ); + } + + void vkGetBufferMemoryRequirements( VkDevice device, + VkBuffer buffer, + VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); + } + + void vkGetImageMemoryRequirements( VkDevice device, + VkImage image, + VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements( VkDevice device, + VkImage image, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements( + device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties( + physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); + } + + VkResult vkQueueBindSparse( VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo * pBindInfo, + VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); + } + + VkResult vkCreateFence( VkDevice device, + const VkFenceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); + } + + void vkDestroyFence( VkDevice device, + VkFence fence, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFence( device, fence, pAllocator ); + } + + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetFences( device, fenceCount, pFences ); + } + + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceStatus( device, fence ); + } + + VkResult vkWaitForFences( VkDevice device, + uint32_t fenceCount, + const VkFence * pFences, + VkBool32 waitAll, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); + } + + VkResult vkCreateSemaphore( VkDevice device, + const VkSemaphoreCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); + } + + void vkDestroySemaphore( VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySemaphore( device, semaphore, pAllocator ); + } + + VkResult vkCreateEvent( VkDevice device, + const VkEventCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); + } + + void vkDestroyEvent( VkDevice device, + VkEvent event, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyEvent( device, event, pAllocator ); + } + + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEventStatus( device, event ); + } + + VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetEvent( device, event ); + } + + VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetEvent( device, event ); + } + + VkResult vkCreateQueryPool( VkDevice device, + const VkQueryPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); + } + + void vkDestroyQueryPool( VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyQueryPool( device, queryPool, pAllocator ); + } + + VkResult vkGetQueryPoolResults( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); + } + + VkResult vkCreateBuffer( VkDevice device, + const VkBufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); + } + + void vkDestroyBuffer( VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBuffer( device, buffer, pAllocator ); + } + + VkResult vkCreateBufferView( VkDevice device, + const VkBufferViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyBufferView( VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferView( device, bufferView, pAllocator ); + } + + VkResult vkCreateImage( VkDevice device, + const VkImageCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImage * pImage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); + } + + void vkDestroyImage( VkDevice device, + VkImage image, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImage( device, image, pAllocator ); + } + + void vkGetImageSubresourceLayout( VkDevice device, + VkImage image, + const VkImageSubresource * pSubresource, + VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); + } + + VkResult vkCreateImageView( VkDevice device, + const VkImageViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImageView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyImageView( VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImageView( device, imageView, pAllocator ); + } + + VkResult vkCreateShaderModule( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); + } + + void vkDestroyShaderModule( VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); + } + + VkResult vkCreatePipelineCache( VkDevice device, + const VkPipelineCacheCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); + } + + void vkDestroyPipelineCache( VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); + } + + VkResult vkGetPipelineCacheData( VkDevice device, + VkPipelineCache pipelineCache, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); + } + + VkResult vkMergePipelineCaches( VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkCreateGraphicsPipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateGraphicsPipelines( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkCreateComputePipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + void vkDestroyPipeline( VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipeline( device, pipeline, pAllocator ); + } + + VkResult vkCreatePipelineLayout( VkDevice device, + const VkPipelineLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); + } + + void vkDestroyPipelineLayout( VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); + } + + VkResult vkCreateSampler( VkDevice device, + const VkSamplerCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); + } + + void vkDestroySampler( VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySampler( device, sampler, pAllocator ); + } + + VkResult vkCreateDescriptorSetLayout( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); + } + + void vkDestroyDescriptorSetLayout( VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); + } + + VkResult vkCreateDescriptorPool( VkDevice device, + const VkDescriptorPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); + } + + void vkDestroyDescriptorPool( VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); + } + + VkResult vkResetDescriptorPool( VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetDescriptorPool( device, descriptorPool, flags ); + } + + VkResult vkAllocateDescriptorSets( VkDevice device, + const VkDescriptorSetAllocateInfo * pAllocateInfo, + VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); + } + + VkResult vkFreeDescriptorSets( VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); + } + + void vkUpdateDescriptorSets( VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSets( + device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + } + + VkResult vkCreateFramebuffer( VkDevice device, + const VkFramebufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); + } + + void vkDestroyFramebuffer( VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); + } + + VkResult vkCreateRenderPass( VkDevice device, + const VkRenderPassCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkDestroyRenderPass( VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyRenderPass( device, renderPass, pAllocator ); + } + + void vkGetRenderAreaGranularity( VkDevice device, + VkRenderPass renderPass, + VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); + } + + VkResult vkCreateCommandPool( VkDevice device, + const VkCommandPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); + } + + void vkDestroyCommandPool( VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCommandPool( device, commandPool, pAllocator ); + } + + VkResult vkResetCommandPool( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandPool( device, commandPool, flags ); + } + + VkResult vkAllocateCommandBuffers( VkDevice device, + const VkCommandBufferAllocateInfo * pAllocateInfo, + VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); + } + + void vkFreeCommandBuffers( VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); + } + + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); } - void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); + return ::vkEndCommandBuffer( commandBuffer ); } - void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + return ::vkResetCommandBuffer( commandBuffer, flags ); } - void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); - } - - void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); - } - - void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); - } - - void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } - - void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } - - void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); - } - - void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); - } - - void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); - } - - void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); } - void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetViewport( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); } - void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetScissor( VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); } - void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); + return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); } - void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, + const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); + } + + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); + } + + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); + } + + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); + } + + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); + } + + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets( commandBuffer, + pipelineBindPoint, + layout, + firstSet, + descriptorSetCount, + pDescriptorSets, + dynamicOffsetCount, + pDynamicOffsets ); + } + + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); + } + + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); } - void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides ) const VULKAN_HPP_NOEXCEPT + void vkCmdDraw( VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); } - void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdBuildAccelerationStructureIndirectKHR( VkCommandBuffer commandBuffer, const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, VkBuffer indirectBuffer, VkDeviceSize indirectOffset, uint32_t indirectStride ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBuildAccelerationStructureIndirectKHR( commandBuffer, pInfo, indirectBuffer, indirectOffset, indirectStride ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdBuildAccelerationStructureKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructureKHR( commandBuffer, infoCount, pInfos, ppOffsetInfos ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); } - void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); } - void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT + void vkCmdDispatch( VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); } - void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); } - void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + void vkCmdCopyImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdBlitImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit * pRegions, + VkFilter filter ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } + + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); } - void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); - } - - void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); } -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void * pData ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); } - void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdFillBuffer( VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); + } + + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue * pColor, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); + } + + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); + } + + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment * pAttachments, + uint32_t rectCount, + const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); + } + + void vkCmdResolveImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdSetEvent( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdResetEvent( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents( commandBuffer, + eventCount, + pEvents, + srcStageMask, + dstStageMask, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier( commandBuffer, + srcStageMask, + dstStageMask, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); + } + + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQuery( commandBuffer, queryPool, query ); + } + + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); + } + + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); + } + + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyQueryPoolResults( + commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + } + + void vkCmdPushConstants( VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); + } + + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); + } + + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass( commandBuffer, contents ); + } + + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass( commandBuffer ); + } + + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); + } + + //=== VK_VERSION_1_1 === + + VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceVersion( pApiVersion ); + } + + VkResult vkBindBufferMemory2( VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2( VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); + } + + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeatures( + device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBase( + commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + void vkGetImageMemoryRequirements2( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( + physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + void vkTrimCommandPool( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPool( device, commandPool, flags ); + } + + void vkGetDeviceQueue2( VkDevice device, + const VkDeviceQueueInfo2 * pQueueInfo, + VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); + } + + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversion( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); + } + + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplate( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplate( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferProperties( + physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFenceProperties( + physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + + void vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( + physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + + void vkGetDescriptorSetLayoutSupport( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); + } + + //=== VK_VERSION_1_2 === + + void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCount( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCount( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + VkResult vkCreateRenderPass2( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); + } + + void vkResetQueryPool( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); + } + + VkResult + vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphores( VkDevice device, + const VkSemaphoreWaitInfo * pWaitInfo, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphores( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphore( device, pSignalInfo ); + } + + VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddress( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( + VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); + } + + //=== VK_KHR_surface === + + void vkDestroySurfaceKHR( VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); + } + + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); + } + + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); + } + + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); + } + + //=== VK_KHR_swapchain === + + VkResult vkCreateSwapchainKHR( VkDevice device, + const VkSwapchainCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); + } + + void vkDestroySwapchainKHR( VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); + } + + VkResult vkGetSwapchainImagesKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + } + + VkResult vkAcquireNextImageKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); + } + + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueuePresentKHR( queue, pPresentInfo ); + } + + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); + } + + VkResult vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); + } + + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pRectCount, + VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); + } + + VkResult vkAcquireNextImage2KHR( VkDevice device, + const VkAcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); + } + + //=== VK_KHR_display === + + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlanePropertiesKHR * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t * pDisplayCount, + VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); + } + + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); + } + + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); + } + + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_display_swapchain === + + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); + } + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, + const VkXlibSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, + const VkXcbSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, + const VkWin32SurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); + } + + void vkDestroyDebugReportCallbackEXT( VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); + } + + void vkDebugReportMessageEXT( VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugReportMessageEXT( + instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + } + + //=== VK_EXT_debug_marker === + + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, + const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); + } + + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, + const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); + } + + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); } @@ -1221,269 +3036,378 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDebugMarkerEndEXT( commandBuffer ); } - void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); } - void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, + const VkVideoProfileKHR * pVideoProfile, + VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); + return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); } - void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( + physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); } - void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateVideoSessionKHR( VkDevice device, + const VkVideoSessionCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); } - void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + void vkDestroyVideoSessionKHR( VkDevice device, + VkVideoSessionKHR videoSession, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); + return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); } - void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetVideoSessionMemoryRequirementsKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t * pVideoSessionMemoryRequirementsCount, + VkVideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + return ::vkGetVideoSessionMemoryRequirementsKHR( + device, videoSession, pVideoSessionMemoryRequirementsCount, pVideoSessionMemoryRequirements ); } - void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + VkResult + vkBindVideoSessionMemoryKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VkVideoBindMemoryKHR * pVideoSessionBindMemories ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + return ::vkBindVideoSessionMemoryKHR( + device, videoSession, videoSessionBindMemoryCount, pVideoSessionBindMemories ); } - void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateVideoSessionParametersKHR( VkDevice device, + const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionParametersKHR * pVideoSessionParameters ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); + return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); } - void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); } - void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkDestroyVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); } - void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); } - void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, + const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); + return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); } - void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, + const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); + return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, + const VkVideoDecodeInfoKHR * pFrameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecodeVideoKHR( commandBuffer, pFrameInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindTransformFeedbackBuffersEXT( + commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); } - void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdBeginTransformFeedbackEXT( + commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); } - void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdEndTransformFeedbackEXT( + commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); } - void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); } - void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); - } - - void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); - } - - void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); - } - - void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); - } - - void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndQuery( commandBuffer, queryPool, query ); - } - - void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); } - void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndRenderPass( commandBuffer ); + return ::vkCmdDrawIndirectByteCountEXT( + commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); } - void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + //=== VK_NVX_binary_import === + + VkResult vkCreateCuModuleNVX( VkDevice device, + const VkCuModuleCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); + return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); } - void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateCuFunctionNVX( VkDevice device, + const VkCuFunctionCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); } - void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + void vkDestroyCuModuleNVX( VkDevice device, + VkCuModuleNVX module, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + return ::vkDestroyCuModuleNVX( device, module, pAllocator ); } - void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + void vkDestroyCuFunctionNVX( VkDevice device, + VkCuFunctionNVX function, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); + return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); } - void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, + const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); } - void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT + //=== VK_NVX_image_view_handle === + + uint32_t vkGetImageViewHandleNVX( VkDevice device, + const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); + return ::vkGetImageViewHandleNVX( device, pInfo ); } - void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetImageViewAddressNVX( VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); } - void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + //=== VK_AMD_draw_indirect_count === + + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdNextSubpass( commandBuffer, contents ); + return ::vkCmdDrawIndirectCountAMD( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + return ::vkCmdDrawIndexedIndirectCountAMD( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + //=== VK_AMD_shader_info === + + VkResult vkGetShaderInfoAMD( VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); } - void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); + return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); } - void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + VkResult vkGetMemoryWin32HandleNV( VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); } - void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); } - void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); } - void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); } - void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdResetEvent( commandBuffer, event, stageMask ); + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); } - void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); + return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); } - void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + physicalDevice, pFormatInfo, pPropertyCount, pProperties ); } - void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); - } + //=== VK_KHR_device_group === - void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void* pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT + void + vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); - } - - void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); - } - - void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); - } - - void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - } - - void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); - } - - void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); - } - - void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); - } - - void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); - } - - void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); - } - - void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( + device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); } void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT @@ -1491,1145 +3415,429 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); } - void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + return ::vkCmdDispatchBaseKHR( + commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } - void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent( commandBuffer, event, stageMask ); - } - - void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); - } - - void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); - } - - void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); - } - - void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); - } - - VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); - } - - VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); - } - - VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); - } - - void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); - } - - void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); - } - - void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); - } - - void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); - } - - void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); - } - - void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); - } - - void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); - } - - void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); - } - - void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); - } - - void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); - } - - void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); - } - - void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); - } - - void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysIndirectKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, buffer, offset ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth ); - } - - void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); - } - - void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); - } - - void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); - } - - void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); - } - - VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEndCommandBuffer( commandBuffer ); - } - - VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandBuffer( commandBuffer, flags ); - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); - } - - VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); - } - - VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); - } - - VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireProfilingLockKHR( device, pInfo ); - } - - VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); - } - - VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); - } - - VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkBindAccelerationStructureMemoryKHR( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindAccelerationStructureMemoryKHR( device, bindInfoCount, pBindInfos ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); - } - - VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory( device, image, memory, memoryOffset ); - } - - VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkBuildAccelerationStructureKHR( VkDevice device, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBuildAccelerationStructureKHR( device, infoCount, pInfos, ppOffsetInfos ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCompileDeferredNV( device, pipeline, shader ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCopyAccelerationStructureKHR( VkDevice device, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyAccelerationStructureKHR( device, pInfo ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyAccelerationStructureToMemoryKHR( device, pInfo ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToAccelerationStructureKHR( device, pInfo ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); - } - - VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); - } - - VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); - } - - VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); - } - - VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); - } - - VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); - } - - VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); - } - - VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); - } - - VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); - } - - VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); - } - - VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); - } - - VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); - } - - VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); - } - - VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); - } - - VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); - } - - VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); - } - - VkResult vkCreatePrivateDataSlotEXT( VkDevice device, const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); - } +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === - VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const VULKAN_HPP_NOEXCEPT + VkResult vkCreateViSurfaceNN( VkInstance instance, + const VkViSurfaceCreateInfoNN * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); } +# endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRayTracingPipelinesKHR( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } + //=== VK_KHR_maintenance1 === - VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT + void vkTrimCommandPoolKHR( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); + return ::vkTrimCommandPoolKHR( device, commandPool, flags ); } - VkResult vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); - } + //=== VK_KHR_device_group_creation === - VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + return ::vkEnumeratePhysicalDeviceGroupsKHR( + instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); } - VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); - } + //=== VK_KHR_external_memory_capabilities === - VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( + physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); } - VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); - } +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === - VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetMemoryWin32HandleKHR( VkDevice device, + const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); } - VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); - } - - VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); - } - - VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); - } - - VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); - } - - VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDeferredOperationJoinKHR( device, operation ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); - } - - void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBuffer( device, buffer, pAllocator ); - } - - void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBufferView( device, bufferView, pAllocator ); - } - - void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCommandPool( device, commandPool, pAllocator ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); - } - - void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); - } - - void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); - } - - void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); - } - - void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDevice( device, pAllocator ); - } - - void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyEvent( device, event, pAllocator ); - } - - void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyFence( device, fence, pAllocator ); - } - - void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); - } - - void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyImage( device, image, pAllocator ); - } - - void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyImageView( device, imageView, pAllocator ); - } - - void vkDestroyIndirectCommandsLayoutNV( VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); - } - - void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipeline( device, pipeline, pAllocator ); - } - - void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); - } - - void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); - } - - void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); - } - - void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyQueryPool( device, queryPool, pAllocator ); - } - - void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyRenderPass( device, renderPass, pAllocator ); - } - - void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySampler( device, sampler, pAllocator ); - } - - void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); - } - - void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); - } - - void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySemaphore( device, semaphore, pAllocator ); - } - - void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); - } - - void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); - } - - void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); - } - - VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDeviceWaitIdle( device ); - } - - VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); - } - - VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); - } - - void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); - } - - VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); - } - - void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeMemory( device, memory, pAllocator ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - void vkGetAccelerationStructureMemoryRequirementsKHR( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); - } - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); - } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddress( device, pInfo ); - } - - VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddressEXT( device, pInfo ); - } - - VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddressKHR( device, pInfo ); - } - - void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); - } - - void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); - } - - void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); - } - - uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); - } - - uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); - } - - VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeferredOperationResultKHR( device, operation ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); - } - - void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionKHR* version ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, version ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); - } - - void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); - } - - VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); - } - - void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); - } - - uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); - } - - uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); - } - - PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceProcAddr( device, pName ); - } - - void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); - } - - void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); - } - - VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetEventStatus( device, event ); - } - - VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); - } - - VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceStatus( device, fence ); - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); - } - - VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); - } - - void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); - } - - void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); - } - - void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); - } - - void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); - } - - VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); - } - - uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewHandleNVX( device, pInfo ); - } - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); - } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + //=== VK_KHR_external_memory_fd === - VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT + VkResult + vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT { return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); } - VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); } - VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_external_semaphore_capabilities === + + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT + VkResult vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); } - VkResult vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + VkResult + vkImportSemaphoreFdKHR( VkDevice device, + const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); } - VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); - } - - VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); - } - - VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); - } - - VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); - } - - void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); - } - - VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } - - VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); - } - - void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); - } - - VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); - } - - VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); - } - - VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetSemaphoreFdKHR( VkDevice device, + const VkSemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_KHR_push_descriptor === - VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); + return ::vkCmdPushDescriptorSetKHR( + commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); } - VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const VULKAN_HPP_NOEXCEPT + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } + + //=== VK_EXT_conditional_rendering === + + void vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); + } + + void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); + } + + //=== VK_KHR_descriptor_update_template === + + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + //=== VK_NV_clip_space_w_scaling === + + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); + } + + //=== VK_EXT_direct_mode_display === + + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseDisplayEXT( physicalDevice, display ); + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, + Display * dpy, + VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); + } + + VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, + Display * dpy, + RROutput rrOutput, + VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); + } + + //=== VK_EXT_display_control === + + VkResult vkDisplayPowerControlEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); + } + + VkResult vkRegisterDeviceEventEXT( VkDevice device, + const VkDeviceEventInfoEXT * pDeviceEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); + } + + VkResult vkRegisterDisplayEventEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT * pDisplayEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); + } + + VkResult vkGetSwapchainCounterEXT( VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); } - VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + //=== VK_GOOGLE_display_timing === + + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); } + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VkPastPresentationTimingGOOGLE * pPresentationTimings ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); + } + + //=== VK_EXT_discard_rectangles === + + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEXT( + commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + } + + //=== VK_EXT_hdr_metadata === + + void vkSetHdrMetadataEXT( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR * pSwapchains, + const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + } + + //=== VK_KHR_create_renderpass2 === + + VkResult vkCreateRenderPass2KHR( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + } + + //=== VK_KHR_shared_presentable_image === + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSwapchainStatusKHR( device, swapchain ); } - VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_external_fence_capabilities === + + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( + physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); } - VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + VkResult vkImportFenceWin32HandleKHR( + VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + } + + VkResult vkGetFenceWin32HandleKHR( VkDevice device, + const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + VkResult vkImportFenceFdKHR( VkDevice device, + const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + VkResult + vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT { - return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_KHR_performance_query === - VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VkPerformanceCounterKHR * pCounters, + VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT { - return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); } - VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT { - return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); } - VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const VULKAN_HPP_NOEXCEPT + VkResult vkAcquireProfilingLockKHR( VkDevice device, + const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkMapMemory( device, memory, offset, size, flags, ppData ); - } - - VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); - } - - VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); - } - - VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); - } - - VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + return ::vkAcquireProfilingLockKHR( device, pInfo ); } void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT @@ -2637,687 +3845,1625 @@ namespace VULKAN_HPP_NAMESPACE return ::vkReleaseProfilingLockKHR( device ); } - VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandPool( device, commandPool, flags ); - } + //=== VK_KHR_get_surface_capabilities2 === - VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetDescriptorPool( device, descriptorPool, flags ); - } - - VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetEvent( device, event ); - } - - VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetFences( device, fenceCount, pFences ); - } - - void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); - } - - void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); - } - - VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); - } - - VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); - } - - VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetEvent( device, event ); - } - - void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); - } - - void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); - } - - VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); - } - - VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphore( device, pSignalInfo ); - } - - VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphoreKHR( device, pSignalInfo ); - } - - void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPool( device, commandPool, flags ); - } - - void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPoolKHR( device, commandPool, flags ); - } - - void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUninitializePerformanceApiINTEL( device ); - } - - void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnmapMemory( device, memory ); - } - - void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); - } - - void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); - } - - void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); - } - - VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); - } - - VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphores( device, pWaitInfo, timeout ); - } - - VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); - } - -#ifdef VK_ENABLE_BETA_EXTENSIONS - VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); - } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); - } - - VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); - } - - VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } - - VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } - -#ifdef VK_USE_PLATFORM_IOS_MVK - VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - -#ifdef VK_USE_PLATFORM_FUCHSIA - VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#ifdef VK_USE_PLATFORM_MACOS_MVK - VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - -#ifdef VK_USE_PLATFORM_METAL_EXT - VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - -#ifdef VK_USE_PLATFORM_GGP - VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_GGP*/ - -#ifdef VK_USE_PLATFORM_VI_NN - VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_VI_NN*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_XCB_KHR - VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - - void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); - } - - void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); - } - - void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); - } - - void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyInstance( instance, pAllocator ); - } - - void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); - } - - VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } - - VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } - - VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); - } - - PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetInstanceProcAddr( instance, pName ); - } - - void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); - } - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); - } -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); - } - - VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); - } - - VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); - } - - VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); - } - - VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); - } - - VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); - } - - VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); - } - - VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); - } - - VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); - } - - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); - } - - VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } - - void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); - } - - void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); - } - - void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); - } - - void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); - } - - VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); - } - - void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); - } - - void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); - } - - void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); - } - - void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); - } - - void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); - } - - void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); - } - - void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); - } - - void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); - } - - VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); - } - - VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); - } - - VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); - } - - void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); - } - - void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); - } - - void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); - } - - void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); - } - - VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); - } - - void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); - } - - void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); - } - - void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); - } - - void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); - } - - void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } - - void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } - - void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } - - void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); - } - - void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); - } - - void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); - } - - VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); - } - - VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); - } - - VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); } - VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( + physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); } - VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_get_display_properties2 === + + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); + return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlaneProperties2KHR * pProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); + return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); } - VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const VULKAN_HPP_NOEXCEPT + VkResult + vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); } - VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, + const VkIOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, + const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); } -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const VULKAN_HPP_NOEXCEPT + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, + const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); - } -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_XCB_KHR - VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); - } -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); - } -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); - } -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseDisplayEXT( physicalDevice, display ); + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); } - void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); - } - - void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); } - VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); - } - void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueEndDebugUtilsLabelEXT( queue ); } - void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); } - VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkQueuePresentKHR( queue, pPresentInfo ); + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); } - VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); + } + + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); + } + + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); + } + + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); + } + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, + const struct AHardwareBuffer * buffer, + VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); + } + + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sample_locations === + + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); + } + + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT * pMultisampleProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); + } + + //=== VK_KHR_get_memory_requirements2 === + + void vkGetImageMemoryRequirements2KHR( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2KHR( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2KHR( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_KHR_acceleration_structure === + + VkResult + vkCreateAccelerationStructureKHR( VkDevice device, + const VkAccelerationStructureCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureKHR( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); + } + + void vkCmdBuildAccelerationStructuresKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); + } + + void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkDeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresIndirectKHR( + commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); + } + + VkResult vkBuildAccelerationStructuresKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); + } + + VkResult + vkCopyAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteAccelerationStructuresPropertiesKHR( + device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); + } + + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( + VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); + } + + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionInfoKHR * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); + } + + void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); + } + + //=== VK_KHR_bind_memory2 === + + VkResult vkBindBufferMemory2KHR( VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2KHR( VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + //=== VK_EXT_image_drm_format_modifier === + + VkResult vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); + } + + //=== VK_EXT_validation_cache === + + VkResult vkCreateValidationCacheEXT( VkDevice device, + const VkValidationCacheCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); + } + + void vkDestroyValidationCacheEXT( VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); + } + + VkResult vkMergeValidationCachesEXT( VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkGetValidationCacheDataEXT( VkDevice device, + VkValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + } + + //=== VK_NV_shading_rate_image === + + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); + } + + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV * pShadingRatePalettes ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportShadingRatePaletteNV( + commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + } + + void + vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoarseSampleOrderNV( + commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } + + //=== VK_NV_ray_tracing === + + VkResult + vkCreateAccelerationStructureNV( VkDevice device, + const VkAccelerationStructureCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); + } + + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2KHR * pMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); + } + + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV * pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureNV( + commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + } + + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); + } + + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysNV( commandBuffer, + raygenShaderBindingTableBuffer, + raygenShaderBindingOffset, + missShaderBindingTableBuffer, + missShaderBindingOffset, + missShaderBindingStride, + hitShaderBindingTableBuffer, + hitShaderBindingOffset, + hitShaderBindingStride, + callableShaderBindingTableBuffer, + callableShaderBindingOffset, + callableShaderBindingStride, + width, + height, + depth ); + } + + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesNV( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); + } + + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesNV( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCompileDeferredNV( device, pipeline, shader ); + } + + //=== VK_KHR_maintenance3 === + + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); + } + + //=== VK_KHR_draw_indirect_count === + + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountKHR( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountKHR( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_EXT_external_memory_host === + + VkResult vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); + } + + //=== VK_AMD_buffer_marker === + + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainEXT * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_NV_mesh_shader === + + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); + } + + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountNV( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_NV_scissor_exclusive === + + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorNV( + commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); + } + + //=== VK_NV_device_diagnostic_checkpoints === + + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); + } + + void vkGetQueueCheckpointDataNV( VkQueue queue, + uint32_t * pCheckpointDataCount, + VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + } + + //=== VK_KHR_timeline_semaphore === + + VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, + VkSemaphore semaphore, + uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphoresKHR( VkDevice device, + const VkSemaphoreWaitInfo * pWaitInfo, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphoreKHR( VkDevice device, + const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphoreKHR( device, pSignalInfo ); + } + + //=== VK_INTEL_performance_query === + + VkResult vkInitializePerformanceApiINTEL( + VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + } + + void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUninitializePerformanceApiINTEL( device ); + } + + VkResult + vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceStreamMarkerINTEL( + VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult + vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); + } + + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VkPerformanceConfigurationINTEL * pConfiguration ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); + } + + VkResult + vkReleasePerformanceConfigurationINTEL( VkDevice device, + VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + } + + VkResult + vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, + VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); } - VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + VkResult vkGetPerformanceParameterINTEL( VkDevice device, + VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT { - return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); + return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); } - VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + //=== VK_AMD_display_native_hdr === + + void vkSetLocalDimmingAMD( VkDevice device, + VkSwapchainKHR swapChain, + VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT { - return ::vkQueueWaitIdle( queue ); + return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, + const VkMetalSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t * pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( + physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); + } + + void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, + const VkExtent2D * pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); + } + + //=== VK_EXT_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressEXT( device, pInfo ); + } + + //=== VK_EXT_tooling_info === + + VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolPropertiesEXT * pToolProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); + } + + //=== VK_NV_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesNV * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_NV_coverage_reduction_mode === + + VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t * pCombinationCount, + VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + physicalDevice, pCombinationCount, pCombinations ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( + physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); + } + + VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult + vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressKHR( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( + VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); + } + + //=== VK_EXT_line_rasterization === + + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + void vkResetQueryPoolEXT( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); + } + + void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); + } + + void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); + } + + void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); + } + + void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); + } + + void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2EXT( + commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + + void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); + } + + void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); + } + + void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + + //=== VK_KHR_deferred_host_operations === + + VkResult vkCreateDeferredOperationKHR( VkDevice device, + const VkAllocationCallbacks * pAllocator, + VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); + } + + void vkDestroyDeferredOperationKHR( VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); + } + + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, + VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); + } + + VkResult vkGetDeferredOperationResultKHR( VkDevice device, + VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationResultKHR( device, operation ); + } + + VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeferredOperationJoinKHR( device, operation ); + } + + //=== VK_KHR_pipeline_executable_properties === + + VkResult + vkGetPipelineExecutablePropertiesKHR( VkDevice device, + const VkPipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + } + + VkResult + vkGetPipelineExecutableStatisticsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + } + + VkResult vkGetPipelineExecutableInternalRepresentationsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableInternalRepresentationsKHR( + device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + } + + //=== VK_NV_device_generated_commands === + + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); + } + + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + } + + VkResult + vkCreateIndirectCommandsLayoutNV( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); + } + + //=== VK_EXT_private_data === + + VkResult vkCreatePrivateDataSlotEXT( VkDevice device, + const VkPrivateDataSlotCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlotEXT * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + + void vkDestroyPrivateDataSlotEXT( VkDevice device, + VkPrivateDataSlotEXT privateDataSlot, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); + } + + VkResult vkSetPrivateDataEXT( VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); + } + + void vkGetPrivateDataEXT( VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, + const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + + void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfoKHR * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); + } + + void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2KHR stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfoKHR * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } + + void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, + const VkDependencyInfoKHR * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); + } + + void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); + } + + VkResult vkQueueSubmit2KHR( VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2KHR * pSubmits, + VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); + } + + void vkCmdWriteBufferMarker2AMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); + } + + void vkGetQueueCheckpointData2NV( VkQueue queue, + uint32_t * pCheckpointDataCount, + VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); + } + + //=== VK_NV_fragment_shading_rate_enums === + + void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); + } + + //=== VK_KHR_copy_commands2 === + + void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2KHR * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); + } + + void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, + const VkCopyImageInfo2KHR * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); + } + + void + vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2KHR * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); + } + + void + vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2KHR * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); + } + + void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, + const VkBlitImageInfo2KHR * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); + } + + void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, + const VkResolveImageInfo2KHR * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); + } + + VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, + uint32_t deviceRelativeId, + VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_KHR_ray_tracing_pipeline === + + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysKHR( commandBuffer, + pRaygenShaderBindingTable, + pMissShaderBindingTable, + pHitShaderBindingTable, + pCallableShaderBindingTable, + width, + height, + depth ); + } + + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesKHR( + device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirectKHR( commandBuffer, + pRaygenShaderBindingTable, + pMissShaderBindingTable, + pHitShaderBindingTable, + pCallableShaderBindingTable, + indirectDeviceAddress ); + } + + VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); + } + + void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, + uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); + } + + //=== VK_EXT_vertex_input_dynamic_state === + + void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetVertexInputEXT( commandBuffer, + vertexBindingDescriptionCount, + pVertexBindingDescriptions, + vertexAttributeDescriptionCount, + pVertexAttributeDescriptions ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } + + VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( + device, handleType, zirconHandle, pMemoryZirconHandleProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + VkResult vkImportSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); + } + + VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_extended_dynamic_state2 === + + void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, + uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); + } + + void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); + } + + void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); + } + + void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); + } + + void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VkResult vkCreateScreenSurfaceQNX( VkInstance instance, + const VkScreenSurfaceCreateInfoQNX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); } }; #endif class DispatchLoaderDynamic; -#if !defined(VULKAN_HPP_DISPATCH_LOADER_DYNAMIC) -# if defined(VK_NO_PROTOTYPES) -# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 -# else -# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 -# endif +#if !defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) +# if defined( VK_NO_PROTOTYPES ) +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 +# else +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 +# endif #endif -#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER) -# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 -# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic -# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { DispatchLoaderDynamic defaultDispatchLoaderDynamic; } - extern DispatchLoaderDynamic defaultDispatchLoaderDynamic; -# else -# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic() -# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE -# endif +#if !defined( VULKAN_HPP_STORAGE_API ) +# if defined( VULKAN_HPP_STORAGE_SHARED ) +# if defined( _MSC_VER ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __declspec( dllexport ) +# else +# define VULKAN_HPP_STORAGE_API __declspec( dllimport ) +# endif +# elif defined( __clang__ ) || defined( __GNUC__ ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) ) +# else +# define VULKAN_HPP_STORAGE_API +# endif +# else +# define VULKAN_HPP_STORAGE_API +# pragma warning Unknown import / export semantics +# endif +# else +# define VULKAN_HPP_STORAGE_API +# endif #endif -#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER_TYPE) -# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 - #define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic -# else -# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic -# endif +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ + namespace VULKAN_HPP_NAMESPACE \ + { \ + VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ + } + extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic() +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE +# endif +#endif + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic +# endif +#endif + +#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER ) +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT +#else +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {} +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER #endif struct AllocationCallbacks; @@ -3325,76 +5471,82 @@ namespace VULKAN_HPP_NAMESPACE template class ObjectDestroy { - public: - ObjectDestroy() - : m_owner() - , m_allocationCallbacks( nullptr ) - , m_dispatch( nullptr ) - {} + public: + ObjectDestroy() = default; - ObjectDestroy( OwnerType owner, Optional allocationCallbacks = nullptr, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + ObjectDestroy( OwnerType owner, + Optional allocationCallbacks + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } - Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } - protected: - template - void destroy(T t) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); - } + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); + } - private: - OwnerType m_owner; - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; }; class NoParent; template - class ObjectDestroy + class ObjectDestroy { - public: - ObjectDestroy() - : m_allocationCallbacks( nullptr ) - , m_dispatch( nullptr ) - {} + public: + ObjectDestroy() = default; - ObjectDestroy( Optional allocationCallbacks, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT - : m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + ObjectDestroy( Optional allocationCallbacks, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} - Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } - protected: - template - void destroy(T t) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_dispatch ); - t.destroy( m_allocationCallbacks, *m_dispatch ); - } + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_dispatch ); + t.destroy( m_allocationCallbacks, *m_dispatch ); + } - private: - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; + private: + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; }; template class ObjectFree { public: - ObjectFree() : m_owner(), m_allocationCallbacks( nullptr ), m_dispatch( nullptr ) {} + ObjectFree() = default; ObjectFree( OwnerType owner, - Optional allocationCallbacks = nullptr, - Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT : m_owner( owner ) , m_allocationCallbacks( allocationCallbacks ) , m_dispatch( &dispatch ) @@ -3419,2241 +5571,2514 @@ namespace VULKAN_HPP_NAMESPACE } private: - OwnerType m_owner; - Optional m_allocationCallbacks; - Dispatch const * m_dispatch; + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectRelease + { + public: + ObjectRelease() = default; + + ObjectRelease( OwnerType owner, Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.release( t, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Dispatch const * m_dispatch = nullptr; }; template class PoolFree { - public: - PoolFree( OwnerType owner = OwnerType(), PoolType pool = PoolType(), Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_pool( pool ) - , m_dispatch( &dispatch ) - {} + public: + PoolFree() = default; - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } - PoolType getPool() const VULKAN_HPP_NOEXCEPT { return m_pool; } + PoolFree( OwnerType owner, + PoolType pool, + Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + {} - protected: - template - void destroy(T t) VULKAN_HPP_NOEXCEPT - { - m_owner.free( m_pool, t, *m_dispatch ); - } + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + PoolType getPool() const VULKAN_HPP_NOEXCEPT + { + return m_pool; + } - private: - OwnerType m_owner; - PoolType m_pool; - Dispatch const* m_dispatch; + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + m_owner.free( m_pool, t, *m_dispatch ); + } + + private: + OwnerType m_owner = OwnerType(); + PoolType m_pool = PoolType(); + Dispatch const * m_dispatch = nullptr; }; - using Bool32 = uint32_t; + using Bool32 = uint32_t; using DeviceAddress = uint64_t; - using DeviceSize = uint64_t; - using SampleMask = uint32_t; + using DeviceSize = uint64_t; + using SampleMask = uint32_t; template struct CppType {}; -#ifdef VK_ENABLE_BETA_EXTENSIONS - enum class AccelerationStructureBuildTypeKHR + template + struct isVulkanHandleType { - eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR, - eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR, - eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; }; - VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value ) + VULKAN_HPP_INLINE std::string toHexString( uint32_t value ) + { + std::stringstream stream; + stream << std::hex << value; + return stream.str(); + } + + //============= + //=== ENUMs === + //============= + + //=== VK_VERSION_1_0 === + + enum class Result + { + eSuccess = VK_SUCCESS, + eNotReady = VK_NOT_READY, + eTimeout = VK_TIMEOUT, + eEventSet = VK_EVENT_SET, + eEventReset = VK_EVENT_RESET, + eIncomplete = VK_INCOMPLETE, + eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, + eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, + eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, + eErrorDeviceLost = VK_ERROR_DEVICE_LOST, + eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, + eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, + eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, + eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, + eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, + eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, + eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, + eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, + eErrorUnknown = VK_ERROR_UNKNOWN, + eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorFragmentation = VK_ERROR_FRAGMENTATION, + eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, + eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, + eSuboptimalKHR = VK_SUBOPTIMAL_KHR, + eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, + eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, + eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, + eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eThreadIdleKHR = VK_THREAD_IDLE_KHR, + eThreadDoneKHR = VK_THREAD_DONE_KHR, + eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, + eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, + ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, + eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, + eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, + eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( Result value ) { switch ( value ) { - case AccelerationStructureBuildTypeKHR::eHost : return "Host"; - case AccelerationStructureBuildTypeKHR::eDevice : return "Device"; - case AccelerationStructureBuildTypeKHR::eHostOrDevice : return "HostOrDevice"; - default: return "invalid"; + case Result::eSuccess: return "Success"; + case Result::eNotReady: return "NotReady"; + case Result::eTimeout: return "Timeout"; + case Result::eEventSet: return "EventSet"; + case Result::eEventReset: return "EventReset"; + case Result::eIncomplete: return "Incomplete"; + case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory"; + case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory"; + case Result::eErrorInitializationFailed: return "ErrorInitializationFailed"; + case Result::eErrorDeviceLost: return "ErrorDeviceLost"; + case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed"; + case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent"; + case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent"; + case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent"; + case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver"; + case Result::eErrorTooManyObjects: return "ErrorTooManyObjects"; + case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported"; + case Result::eErrorFragmentedPool: return "ErrorFragmentedPool"; + case Result::eErrorUnknown: return "ErrorUnknown"; + case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory"; + case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle"; + case Result::eErrorFragmentation: return "ErrorFragmentation"; + case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress"; + case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR"; + case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR"; + case Result::eSuboptimalKHR: return "SuboptimalKHR"; + case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR"; + case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR"; + case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT"; + case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV"; + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; + case Result::eErrorNotPermittedEXT: return "ErrorNotPermittedEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case Result::eThreadIdleKHR: return "ThreadIdleKHR"; + case Result::eThreadDoneKHR: return "ThreadDoneKHR"; + case Result::eOperationDeferredKHR: return "OperationDeferredKHR"; + case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR"; + case Result::ePipelineCompileRequiredEXT: return "PipelineCompileRequiredEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } + + enum class StructureType + { + eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, + eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, + eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, + eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, + eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, + eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, + eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, + ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, + ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, + ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, + ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, + ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, + ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, + ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, + ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, + ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, + eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, + eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, + eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, + eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, + eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, + eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, + eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, + eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, + eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, + eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, + eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, + eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, + eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, + eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfo = + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, + ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, + ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, + eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, + eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, + ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES, + ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES, + ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES, + eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + eDescriptorSetVariableDescriptorCountAllocateInfo = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + eDescriptorSetVariableDescriptorCountLayoutSupport = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + ePhysicalDeviceUniformBufferStandardLayoutFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, + ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, + eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, + eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, + eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, + eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, + eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, + eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, + eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, + eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, + eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + ePipelineRasterizationStateRasterizationOrderAMD = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, + eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, + eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoProfileKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR, + eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR, + eVideoPictureResourceKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR, + eVideoGetMemoryPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR, + eVideoBindMemoryKHR = VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR, + eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR, + eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR, + eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR, + eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR, + eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR, + eVideoReferenceSlotKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR, + eVideoQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR, + eVideoProfilesKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR, + ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR, + eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR, + eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - enum class AccelerationStructureMemoryRequirementsTypeKHR - { - eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR, - eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR, - eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR + eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, + eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, + eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, + ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, + ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, + ePipelineRasterizationStateStreamCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, + eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX, + eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX, + eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX, + eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, + eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT, + eVideoEncodeH264SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT, + eVideoEncodeH264SessionParametersCreateInfoEXT = + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT, + eVideoEncodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT, + eVideoEncodeH264VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT, + eVideoEncodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT, + eVideoEncodeH264NaluSliceEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT, + eVideoEncodeH264EmitPictureParametersEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT, + eVideoEncodeH264ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT, + eVideoDecodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT, + eVideoDecodeH264SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT, + eVideoDecodeH264PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT, + eVideoDecodeH264MvcEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT, + eVideoDecodeH264ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT, + eVideoDecodeH264SessionParametersCreateInfoEXT = + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT, + eVideoDecodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT, + eVideoDecodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, +#if defined( VK_USE_PLATFORM_GGP ) + eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, +#endif /*VK_USE_PLATFORM_GGP*/ + ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, + eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, + eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, +#if defined( VK_USE_PLATFORM_VI_NN ) + eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, +#endif /*VK_USE_PLATFORM_VI_NN*/ + ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, + eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, + ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, + eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, + eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, + eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, + eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, + eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, + eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, + ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, + eCommandBufferInheritanceConditionalRenderingInfoEXT = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, + ePhysicalDeviceConditionalRenderingFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, + eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, + ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, + ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, + eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, + eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, + eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, + eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, + ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, + ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, + ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, + ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceConservativeRasterizationPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, + ePipelineRasterizationConservativeStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, + ePipelineRasterizationDepthClipStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, + eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, + eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, + eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, + ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR, + ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR, + eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR, + ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR, + eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR, + ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR, + ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR, + ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, + eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, + eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, + eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, + eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, + eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, + eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, + eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, + eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, + eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, + eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, + eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, + eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, + eDescriptorPoolInlineUniformBlockCreateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, + eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, + eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, + ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, + ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, + eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, + ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, + ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, + ePipelineColorBlendAdvancedStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, + ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR, + eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR, + eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR, + eAccelerationStructureGeometryInstancesDataKHR = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR, + eAccelerationStructureGeometryTrianglesDataKHR = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR, + eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR, + eAccelerationStructureVersionInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR, + eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR, + eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR, + eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR, + ePhysicalDeviceAccelerationStructureFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR, + ePhysicalDeviceAccelerationStructurePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR, + eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR, + eAccelerationStructureBuildSizesInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR, + ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR, + ePhysicalDeviceRayTracingPipelinePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR, + eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR, + eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR, + eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR, + ePhysicalDeviceRayQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR, + ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, + ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, + ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, + eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, + ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, + eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, + eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, + eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, + eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR, + ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePipelineViewportShadingRateImageStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, + ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, + ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, + ePipelineViewportCoarseSampleOrderStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, + eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, + eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, + eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, + eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, + eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, + eAccelerationStructureMemoryRequirementsInfoNV = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, + eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, + eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, + ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, + ePipelineRepresentativeFragmentTestStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, + ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, + eFilterCubicImageViewImageFormatPropertiesEXT = + VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, + eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, + ePhysicalDeviceExternalMemoryHostPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, + ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, + ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, + eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, + ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT, + eVideoDecodeH265SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT, + eVideoDecodeH265SessionParametersCreateInfoEXT = + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT, + eVideoDecodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT, + eVideoDecodeH265ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT, + eVideoDecodeH265PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT, + eVideoDecodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, + ePipelineVertexInputDivisorStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_GGP ) + ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, +#endif /*VK_USE_PLATFORM_GGP*/ + ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, + ePhysicalDeviceComputeShaderDerivativesFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, + ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, + ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, + ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, + ePhysicalDeviceShaderImageFootprintFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, + ePipelineViewportExclusiveScissorStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, + ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, + eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, + eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, + eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, + ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, + ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, + ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, + ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, + ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, + eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD, + eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMapPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, + eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, + ePhysicalDeviceSubgroupSizeControlPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, + ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + ePhysicalDeviceSubgroupSizeControlFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, + eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, + ePipelineFragmentShadingRateStateCreateInfoKHR = + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR, + ePhysicalDeviceFragmentShadingRatePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR, + ePhysicalDeviceFragmentShadingRateFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, + ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, + ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, + ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT, + ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, + ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, + eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, + eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, + ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, + ePhysicalDeviceBufferDeviceAddressFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, + ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, + eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, + ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, + eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCoverageReductionModeFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, + ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV, + eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, + ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, + ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, + ePhysicalDeviceProvokingVertexFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT, + ePipelineRasterizationProvokingVertexStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT, + ePhysicalDeviceProvokingVertexPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, + eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, + eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, + ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT, + ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + ePhysicalDeviceExtendedDynamicStateFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT, + ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, + ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, + ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, + ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, + ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, + ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, + eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, + eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, + eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV, + eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV, + eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV, + eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV, + ePhysicalDeviceInheritedViewportScissorFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV, + eCommandBufferInheritanceViewportScissorInfoNV = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV, + ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, + ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, + eCommandBufferInheritanceRenderPassTransformInfoQCOM = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, + eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM, + ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT, + eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT, + eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT, + ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT, + ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT, + eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT, + ePhysicalDeviceCustomBorderColorPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT, + ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT, + ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, + ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT, + eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT, + ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT, + ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR, + eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, + eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, + eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR, + eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR, + eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR, + eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, + eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR, + eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR, + eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR, + ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR, + eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, + eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, + ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR, + ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV, + ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV, + ePipelineFragmentShadingRateEnumStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV, + ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2FeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2PropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT, + eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM, + ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT, + ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR, + eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR, + eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR, + eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR, + eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR, + eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR, + eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR, + eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR, + eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR, + eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR, + eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR, + eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR, + ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT, +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE, + eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, + ePhysicalDeviceVertexInputDynamicStateFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT, + eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT, + eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA, + eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA, + eMemoryGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA, + eImportSemaphoreZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA, + eSemaphoreGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + ePhysicalDeviceExtendedDynamicState2FeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenSurfaceCreateInfoQNX = VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT, + ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT, + ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, + eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, + eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, + eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, + eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, + eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, + eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, + eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, + eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, + eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, + eDescriptorSetLayoutBindingFlagsCreateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, + eDescriptorSetVariableDescriptorCountAllocateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, + eDescriptorSetVariableDescriptorCountLayoutSupportEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, + eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, + eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, + eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, + eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, + eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, + eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, + ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, + ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, + ePhysicalDeviceBufferDeviceAddressFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, + ePhysicalDeviceDepthStencilResolvePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, + ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, + ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, + ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, + ePhysicalDeviceImagelessFramebufferFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, + ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, + ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, + ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, + ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, + ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, + ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, + ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, + ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, + ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePipelineTessellationDomainOriginStateCreateInfoKHR = + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, + eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, + eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, + eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, + eRenderPassInputAttachmentAspectCreateInfoKHR = + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, + eSamplerYcbcrConversionImageFormatPropertiesKHR = + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, + eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, + eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, + eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, + eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, + eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, + eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, + eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, + eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, + eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR }; - using AccelerationStructureMemoryRequirementsTypeNV = AccelerationStructureMemoryRequirementsTypeKHR; - VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeKHR value ) + VULKAN_HPP_INLINE std::string to_string( StructureType value ) { switch ( value ) { - case AccelerationStructureMemoryRequirementsTypeKHR::eObject : return "Object"; - case AccelerationStructureMemoryRequirementsTypeKHR::eBuildScratch : return "BuildScratch"; - case AccelerationStructureMemoryRequirementsTypeKHR::eUpdateScratch : return "UpdateScratch"; - default: return "invalid"; + case StructureType::eApplicationInfo: return "ApplicationInfo"; + case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo"; + case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo"; + case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo"; + case StructureType::eSubmitInfo: return "SubmitInfo"; + case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo"; + case StructureType::eMappedMemoryRange: return "MappedMemoryRange"; + case StructureType::eBindSparseInfo: return "BindSparseInfo"; + case StructureType::eFenceCreateInfo: return "FenceCreateInfo"; + case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo"; + case StructureType::eEventCreateInfo: return "EventCreateInfo"; + case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo"; + case StructureType::eBufferCreateInfo: return "BufferCreateInfo"; + case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo"; + case StructureType::eImageCreateInfo: return "ImageCreateInfo"; + case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo"; + case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo"; + case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo"; + case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo"; + case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo"; + case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo"; + case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo"; + case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo"; + case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo"; + case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo"; + case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo"; + case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo"; + case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo"; + case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo"; + case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo"; + case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo"; + case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo"; + case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo"; + case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo"; + case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo"; + case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet"; + case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet"; + case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo"; + case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo"; + case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo"; + case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo"; + case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo"; + case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo"; + case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo"; + case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier"; + case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier"; + case StructureType::eMemoryBarrier: return "MemoryBarrier"; + case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo"; + case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties"; + case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo"; + case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo"; + case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures"; + case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements"; + case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo"; + case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo"; + case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo"; + case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo"; + case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo"; + case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo"; + case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo"; + case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo"; + case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties"; + case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo"; + case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2"; + case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2"; + case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2"; + case StructureType::eMemoryRequirements2: return "MemoryRequirements2"; + case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2"; + case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2"; + case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2"; + case StructureType::eFormatProperties2: return "FormatProperties2"; + case StructureType::eImageFormatProperties2: return "ImageFormatProperties2"; + case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2"; + case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2"; + case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2"; + case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2"; + case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2"; + case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties"; + case StructureType::eRenderPassInputAttachmentAspectCreateInfo: + return "RenderPassInputAttachmentAspectCreateInfo"; + case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo"; + case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: + return "PipelineTessellationDomainOriginStateCreateInfo"; + case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo"; + case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures"; + case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties"; + case StructureType::ePhysicalDeviceVariablePointersFeatures: return "PhysicalDeviceVariablePointersFeatures"; + case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo"; + case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures"; + case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties"; + case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2"; + case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo"; + case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo"; + case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo"; + case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo"; + case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: + return "PhysicalDeviceSamplerYcbcrConversionFeatures"; + case StructureType::eSamplerYcbcrConversionImageFormatProperties: + return "SamplerYcbcrConversionImageFormatProperties"; + case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo"; + case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo"; + case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties"; + case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo"; + case StructureType::eExternalBufferProperties: return "ExternalBufferProperties"; + case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties"; + case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo"; + case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo"; + case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo"; + case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo"; + case StructureType::eExternalFenceProperties: return "ExternalFenceProperties"; + case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo"; + case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo"; + case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo"; + case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties"; + case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties"; + case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport"; + case StructureType::ePhysicalDeviceShaderDrawParametersFeatures: + return "PhysicalDeviceShaderDrawParametersFeatures"; + case StructureType::ePhysicalDeviceVulkan11Features: return "PhysicalDeviceVulkan11Features"; + case StructureType::ePhysicalDeviceVulkan11Properties: return "PhysicalDeviceVulkan11Properties"; + case StructureType::ePhysicalDeviceVulkan12Features: return "PhysicalDeviceVulkan12Features"; + case StructureType::ePhysicalDeviceVulkan12Properties: return "PhysicalDeviceVulkan12Properties"; + case StructureType::eImageFormatListCreateInfo: return "ImageFormatListCreateInfo"; + case StructureType::eAttachmentDescription2: return "AttachmentDescription2"; + case StructureType::eAttachmentReference2: return "AttachmentReference2"; + case StructureType::eSubpassDescription2: return "SubpassDescription2"; + case StructureType::eSubpassDependency2: return "SubpassDependency2"; + case StructureType::eRenderPassCreateInfo2: return "RenderPassCreateInfo2"; + case StructureType::eSubpassBeginInfo: return "SubpassBeginInfo"; + case StructureType::eSubpassEndInfo: return "SubpassEndInfo"; + case StructureType::ePhysicalDevice8BitStorageFeatures: return "PhysicalDevice8BitStorageFeatures"; + case StructureType::ePhysicalDeviceDriverProperties: return "PhysicalDeviceDriverProperties"; + case StructureType::ePhysicalDeviceShaderAtomicInt64Features: return "PhysicalDeviceShaderAtomicInt64Features"; + case StructureType::ePhysicalDeviceShaderFloat16Int8Features: return "PhysicalDeviceShaderFloat16Int8Features"; + case StructureType::ePhysicalDeviceFloatControlsProperties: return "PhysicalDeviceFloatControlsProperties"; + case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo: + return "DescriptorSetLayoutBindingFlagsCreateInfo"; + case StructureType::ePhysicalDeviceDescriptorIndexingFeatures: return "PhysicalDeviceDescriptorIndexingFeatures"; + case StructureType::ePhysicalDeviceDescriptorIndexingProperties: + return "PhysicalDeviceDescriptorIndexingProperties"; + case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo: + return "DescriptorSetVariableDescriptorCountAllocateInfo"; + case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport: + return "DescriptorSetVariableDescriptorCountLayoutSupport"; + case StructureType::ePhysicalDeviceDepthStencilResolveProperties: + return "PhysicalDeviceDepthStencilResolveProperties"; + case StructureType::eSubpassDescriptionDepthStencilResolve: return "SubpassDescriptionDepthStencilResolve"; + case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures: return "PhysicalDeviceScalarBlockLayoutFeatures"; + case StructureType::eImageStencilUsageCreateInfo: return "ImageStencilUsageCreateInfo"; + case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties: + return "PhysicalDeviceSamplerFilterMinmaxProperties"; + case StructureType::eSamplerReductionModeCreateInfo: return "SamplerReductionModeCreateInfo"; + case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures: return "PhysicalDeviceVulkanMemoryModelFeatures"; + case StructureType::ePhysicalDeviceImagelessFramebufferFeatures: + return "PhysicalDeviceImagelessFramebufferFeatures"; + case StructureType::eFramebufferAttachmentsCreateInfo: return "FramebufferAttachmentsCreateInfo"; + case StructureType::eFramebufferAttachmentImageInfo: return "FramebufferAttachmentImageInfo"; + case StructureType::eRenderPassAttachmentBeginInfo: return "RenderPassAttachmentBeginInfo"; + case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures: + return "PhysicalDeviceUniformBufferStandardLayoutFeatures"; + case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures: + return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures"; + case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures: + return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures"; + case StructureType::eAttachmentReferenceStencilLayout: return "AttachmentReferenceStencilLayout"; + case StructureType::eAttachmentDescriptionStencilLayout: return "AttachmentDescriptionStencilLayout"; + case StructureType::ePhysicalDeviceHostQueryResetFeatures: return "PhysicalDeviceHostQueryResetFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures: return "PhysicalDeviceTimelineSemaphoreFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreProperties: + return "PhysicalDeviceTimelineSemaphoreProperties"; + case StructureType::eSemaphoreTypeCreateInfo: return "SemaphoreTypeCreateInfo"; + case StructureType::eTimelineSemaphoreSubmitInfo: return "TimelineSemaphoreSubmitInfo"; + case StructureType::eSemaphoreWaitInfo: return "SemaphoreWaitInfo"; + case StructureType::eSemaphoreSignalInfo: return "SemaphoreSignalInfo"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures: + return "PhysicalDeviceBufferDeviceAddressFeatures"; + case StructureType::eBufferDeviceAddressInfo: return "BufferDeviceAddressInfo"; + case StructureType::eBufferOpaqueCaptureAddressCreateInfo: return "BufferOpaqueCaptureAddressCreateInfo"; + case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo: return "MemoryOpaqueCaptureAddressAllocateInfo"; + case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo: return "DeviceMemoryOpaqueCaptureAddressInfo"; + case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR"; + case StructureType::ePresentInfoKHR: return "PresentInfoKHR"; + case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR"; + case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR"; + case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR"; + case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR"; + case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR"; + case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR"; + case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR"; + case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR"; + case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR"; +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT"; + case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: + return "PipelineRasterizationStateRasterizationOrderAMD"; + case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT"; + case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT"; + case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoProfileKHR: return "VideoProfileKHR"; + case StructureType::eVideoCapabilitiesKHR: return "VideoCapabilitiesKHR"; + case StructureType::eVideoPictureResourceKHR: return "VideoPictureResourceKHR"; + case StructureType::eVideoGetMemoryPropertiesKHR: return "VideoGetMemoryPropertiesKHR"; + case StructureType::eVideoBindMemoryKHR: return "VideoBindMemoryKHR"; + case StructureType::eVideoSessionCreateInfoKHR: return "VideoSessionCreateInfoKHR"; + case StructureType::eVideoSessionParametersCreateInfoKHR: return "VideoSessionParametersCreateInfoKHR"; + case StructureType::eVideoSessionParametersUpdateInfoKHR: return "VideoSessionParametersUpdateInfoKHR"; + case StructureType::eVideoBeginCodingInfoKHR: return "VideoBeginCodingInfoKHR"; + case StructureType::eVideoEndCodingInfoKHR: return "VideoEndCodingInfoKHR"; + case StructureType::eVideoCodingControlInfoKHR: return "VideoCodingControlInfoKHR"; + case StructureType::eVideoReferenceSlotKHR: return "VideoReferenceSlotKHR"; + case StructureType::eVideoQueueFamilyProperties2KHR: return "VideoQueueFamilyProperties2KHR"; + case StructureType::eVideoProfilesKHR: return "VideoProfilesKHR"; + case StructureType::ePhysicalDeviceVideoFormatInfoKHR: return "PhysicalDeviceVideoFormatInfoKHR"; + case StructureType::eVideoFormatPropertiesKHR: return "VideoFormatPropertiesKHR"; + case StructureType::eVideoDecodeInfoKHR: return "VideoDecodeInfoKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV"; + case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV"; + case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV"; + case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT: + return "PhysicalDeviceTransformFeedbackFeaturesEXT"; + case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT: + return "PhysicalDeviceTransformFeedbackPropertiesEXT"; + case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT: + return "PipelineRasterizationStateStreamCreateInfoEXT"; + case StructureType::eCuModuleCreateInfoNVX: return "CuModuleCreateInfoNVX"; + case StructureType::eCuFunctionCreateInfoNVX: return "CuFunctionCreateInfoNVX"; + case StructureType::eCuLaunchInfoNVX: return "CuLaunchInfoNVX"; + case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX"; + case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoEncodeH264CapabilitiesEXT: return "VideoEncodeH264CapabilitiesEXT"; + case StructureType::eVideoEncodeH264SessionCreateInfoEXT: return "VideoEncodeH264SessionCreateInfoEXT"; + case StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT: + return "VideoEncodeH264SessionParametersCreateInfoEXT"; + case StructureType::eVideoEncodeH264SessionParametersAddInfoEXT: + return "VideoEncodeH264SessionParametersAddInfoEXT"; + case StructureType::eVideoEncodeH264VclFrameInfoEXT: return "VideoEncodeH264VclFrameInfoEXT"; + case StructureType::eVideoEncodeH264DpbSlotInfoEXT: return "VideoEncodeH264DpbSlotInfoEXT"; + case StructureType::eVideoEncodeH264NaluSliceEXT: return "VideoEncodeH264NaluSliceEXT"; + case StructureType::eVideoEncodeH264EmitPictureParametersEXT: return "VideoEncodeH264EmitPictureParametersEXT"; + case StructureType::eVideoEncodeH264ProfileEXT: return "VideoEncodeH264ProfileEXT"; + case StructureType::eVideoDecodeH264CapabilitiesEXT: return "VideoDecodeH264CapabilitiesEXT"; + case StructureType::eVideoDecodeH264SessionCreateInfoEXT: return "VideoDecodeH264SessionCreateInfoEXT"; + case StructureType::eVideoDecodeH264PictureInfoEXT: return "VideoDecodeH264PictureInfoEXT"; + case StructureType::eVideoDecodeH264MvcEXT: return "VideoDecodeH264MvcEXT"; + case StructureType::eVideoDecodeH264ProfileEXT: return "VideoDecodeH264ProfileEXT"; + case StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT: + return "VideoDecodeH264SessionParametersCreateInfoEXT"; + case StructureType::eVideoDecodeH264SessionParametersAddInfoEXT: + return "VideoDecodeH264SessionParametersAddInfoEXT"; + case StructureType::eVideoDecodeH264DpbSlotInfoEXT: return "VideoDecodeH264DpbSlotInfoEXT"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; +#if defined( VK_USE_PLATFORM_GGP ) + case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP"; +#endif /*VK_USE_PLATFORM_GGP*/ + case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV: + return "PhysicalDeviceCornerSampledImageFeaturesNV"; + case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV"; + case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV"; + case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT"; +#if defined( VK_USE_PLATFORM_VI_NN ) + case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN"; +#endif /*VK_USE_PLATFORM_VI_NN*/ + case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT: + return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT"; + case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT"; + case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR"; + case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR"; + case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR"; + case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR"; + case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR"; + case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR"; + case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR"; + case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR"; + case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR"; + case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR"; + case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR"; + case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: + return "PhysicalDevicePushDescriptorPropertiesKHR"; + case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: + return "CommandBufferInheritanceConditionalRenderingInfoEXT"; + case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: + return "PhysicalDeviceConditionalRenderingFeaturesEXT"; + case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT"; + case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR"; + case StructureType::ePipelineViewportWScalingStateCreateInfoNV: + return "PipelineViewportWScalingStateCreateInfoNV"; + case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT"; + case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT"; + case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT"; + case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT"; + case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; + case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; + case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: + return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; + case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: + return "PhysicalDeviceDiscardRectanglePropertiesEXT"; + case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: + return "PipelineDiscardRectangleStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: + return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; + case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: + return "PipelineRasterizationConservativeStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT: return "PhysicalDeviceDepthClipEnableFeaturesEXT"; + case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT: + return "PipelineRasterizationDepthClipStateCreateInfoEXT"; + case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT"; + case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR"; + case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR"; + case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR"; + case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR"; + case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR: + return "PhysicalDevicePerformanceQueryFeaturesKHR"; + case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR: + return "PhysicalDevicePerformanceQueryPropertiesKHR"; + case StructureType::eQueryPoolPerformanceCreateInfoKHR: return "QueryPoolPerformanceCreateInfoKHR"; + case StructureType::ePerformanceQuerySubmitInfoKHR: return "PerformanceQuerySubmitInfoKHR"; + case StructureType::eAcquireProfilingLockInfoKHR: return "AcquireProfilingLockInfoKHR"; + case StructureType::ePerformanceCounterKHR: return "PerformanceCounterKHR"; + case StructureType::ePerformanceCounterDescriptionKHR: return "PerformanceCounterDescriptionKHR"; + case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR"; + case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR"; + case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR"; + case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR"; + case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR"; + case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR"; + case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR"; + case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR"; +#if defined( VK_USE_PLATFORM_IOS_MVK ) + case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK"; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK"; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT"; + case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT"; + case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT"; + case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT"; + case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID"; + case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: + return "AndroidHardwareBufferFormatPropertiesANDROID"; + case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID"; + case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: + return "MemoryGetAndroidHardwareBufferInfoANDROID"; + case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT: + return "PhysicalDeviceInlineUniformBlockFeaturesEXT"; + case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT: + return "PhysicalDeviceInlineUniformBlockPropertiesEXT"; + case StructureType::eWriteDescriptorSetInlineUniformBlockEXT: return "WriteDescriptorSetInlineUniformBlockEXT"; + case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT: + return "DescriptorPoolInlineUniformBlockCreateInfoEXT"; + case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; + case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; + case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: + return "PipelineSampleLocationsStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: + return "PhysicalDeviceSampleLocationsPropertiesEXT"; + case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: + return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: + return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; + case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: + return "PipelineColorBlendAdvancedStateCreateInfoEXT"; + case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureKHR: + return "WriteDescriptorSetAccelerationStructureKHR"; + case StructureType::eAccelerationStructureBuildGeometryInfoKHR: + return "AccelerationStructureBuildGeometryInfoKHR"; + case StructureType::eAccelerationStructureDeviceAddressInfoKHR: + return "AccelerationStructureDeviceAddressInfoKHR"; + case StructureType::eAccelerationStructureGeometryAabbsDataKHR: + return "AccelerationStructureGeometryAabbsDataKHR"; + case StructureType::eAccelerationStructureGeometryInstancesDataKHR: + return "AccelerationStructureGeometryInstancesDataKHR"; + case StructureType::eAccelerationStructureGeometryTrianglesDataKHR: + return "AccelerationStructureGeometryTrianglesDataKHR"; + case StructureType::eAccelerationStructureGeometryKHR: return "AccelerationStructureGeometryKHR"; + case StructureType::eAccelerationStructureVersionInfoKHR: return "AccelerationStructureVersionInfoKHR"; + case StructureType::eCopyAccelerationStructureInfoKHR: return "CopyAccelerationStructureInfoKHR"; + case StructureType::eCopyAccelerationStructureToMemoryInfoKHR: return "CopyAccelerationStructureToMemoryInfoKHR"; + case StructureType::eCopyMemoryToAccelerationStructureInfoKHR: return "CopyMemoryToAccelerationStructureInfoKHR"; + case StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR: + return "PhysicalDeviceAccelerationStructureFeaturesKHR"; + case StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR: + return "PhysicalDeviceAccelerationStructurePropertiesKHR"; + case StructureType::eAccelerationStructureCreateInfoKHR: return "AccelerationStructureCreateInfoKHR"; + case StructureType::eAccelerationStructureBuildSizesInfoKHR: return "AccelerationStructureBuildSizesInfoKHR"; + case StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR: + return "PhysicalDeviceRayTracingPipelineFeaturesKHR"; + case StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR: + return "PhysicalDeviceRayTracingPipelinePropertiesKHR"; + case StructureType::eRayTracingPipelineCreateInfoKHR: return "RayTracingPipelineCreateInfoKHR"; + case StructureType::eRayTracingShaderGroupCreateInfoKHR: return "RayTracingShaderGroupCreateInfoKHR"; + case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR: return "RayTracingPipelineInterfaceCreateInfoKHR"; + case StructureType::ePhysicalDeviceRayQueryFeaturesKHR: return "PhysicalDeviceRayQueryFeaturesKHR"; + case StructureType::ePipelineCoverageModulationStateCreateInfoNV: + return "PipelineCoverageModulationStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV: return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV: + return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; + case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT"; + case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT: + return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; + case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT: + return "ImageDrmFormatModifierExplicitCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT"; + case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT"; + case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR: + return "PhysicalDevicePortabilitySubsetFeaturesKHR"; + case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR: + return "PhysicalDevicePortabilitySubsetPropertiesKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV: + return "PipelineViewportShadingRateImageStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV"; + case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV: + return "PhysicalDeviceShadingRateImagePropertiesNV"; + case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV: + return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; + case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV"; + case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV"; + case StructureType::eGeometryNV: return "GeometryNV"; + case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV"; + case StructureType::eGeometryAabbNV: return "GeometryAabbNV"; + case StructureType::eBindAccelerationStructureMemoryInfoNV: return "BindAccelerationStructureMemoryInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureNV: + return "WriteDescriptorSetAccelerationStructureNV"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoNV: + return "AccelerationStructureMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV"; + case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV"; + case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV"; + case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV: + return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; + case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: + return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; + case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT: + return "PhysicalDeviceImageViewImageFormatInfoEXT"; + case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT: + return "FilterCubicImageViewImageFormatPropertiesEXT"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT: return "DeviceQueueGlobalPriorityCreateInfoEXT"; + case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT"; + case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT"; + case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: + return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR"; + case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD"; + case StructureType::eCalibratedTimestampInfoEXT: return "CalibratedTimestampInfoEXT"; + case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoDecodeH265CapabilitiesEXT: return "VideoDecodeH265CapabilitiesEXT"; + case StructureType::eVideoDecodeH265SessionCreateInfoEXT: return "VideoDecodeH265SessionCreateInfoEXT"; + case StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT: + return "VideoDecodeH265SessionParametersCreateInfoEXT"; + case StructureType::eVideoDecodeH265SessionParametersAddInfoEXT: + return "VideoDecodeH265SessionParametersAddInfoEXT"; + case StructureType::eVideoDecodeH265ProfileEXT: return "VideoDecodeH265ProfileEXT"; + case StructureType::eVideoDecodeH265PictureInfoEXT: return "VideoDecodeH265PictureInfoEXT"; + case StructureType::eVideoDecodeH265DpbSlotInfoEXT: return "VideoDecodeH265DpbSlotInfoEXT"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: + return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT: + return "PipelineVertexInputDivisorStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT: + return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT"; +#if defined( VK_USE_PLATFORM_GGP ) + case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP"; +#endif /*VK_USE_PLATFORM_GGP*/ + case StructureType::ePipelineCreationFeedbackCreateInfoEXT: return "PipelineCreationFeedbackCreateInfoEXT"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV: + return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV: + return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV"; + case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: + return "PhysicalDeviceShaderImageFootprintFeaturesNV"; + case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV: + return "PipelineViewportExclusiveScissorStateCreateInfoNV"; + case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; + case StructureType::eCheckpointDataNV: return "CheckpointDataNV"; + case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL: + return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; + case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL"; + case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL"; + case StructureType::ePerformanceMarkerInfoINTEL: return "PerformanceMarkerInfoINTEL"; + case StructureType::ePerformanceStreamMarkerInfoINTEL: return "PerformanceStreamMarkerInfoINTEL"; + case StructureType::ePerformanceOverrideInfoINTEL: return "PerformanceOverrideInfoINTEL"; + case StructureType::ePerformanceConfigurationAcquireInfoINTEL: return "PerformanceConfigurationAcquireInfoINTEL"; + case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT"; + case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD: return "DisplayNativeHdrSurfaceCapabilitiesAMD"; + case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD: return "SwapchainDisplayNativeHdrCreateInfoAMD"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR: + return "PhysicalDeviceShaderTerminateInvocationFeaturesKHR"; +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case StructureType::eMetalSurfaceCreateInfoEXT: return "MetalSurfaceCreateInfoEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT: + return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT: + return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; + case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT: + return "RenderPassFragmentDensityMapCreateInfoEXT"; + case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT: + return "PhysicalDeviceSubgroupSizeControlPropertiesEXT"; + case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT: + return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT"; + case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT: + return "PhysicalDeviceSubgroupSizeControlFeaturesEXT"; + case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR"; + case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR: + return "PipelineFragmentShadingRateStateCreateInfoKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR: + return "PhysicalDeviceFragmentShadingRatePropertiesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR: + return "PhysicalDeviceFragmentShadingRateFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR"; + case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD"; + case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD"; + case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT: + return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT"; + case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT"; + case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT: return "PhysicalDeviceMemoryPriorityFeaturesEXT"; + case StructureType::eMemoryPriorityAllocateInfoEXT: return "MemoryPriorityAllocateInfoEXT"; + case StructureType::eSurfaceProtectedCapabilitiesKHR: return "SurfaceProtectedCapabilitiesKHR"; + case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV: + return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT: + return "PhysicalDeviceBufferDeviceAddressFeaturesEXT"; + case StructureType::eBufferDeviceAddressCreateInfoEXT: return "BufferDeviceAddressCreateInfoEXT"; + case StructureType::ePhysicalDeviceToolPropertiesEXT: return "PhysicalDeviceToolPropertiesEXT"; + case StructureType::eValidationFeaturesEXT: return "ValidationFeaturesEXT"; + case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV: + return "PhysicalDeviceCooperativeMatrixFeaturesNV"; + case StructureType::eCooperativeMatrixPropertiesNV: return "CooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV: + return "PhysicalDeviceCooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV: + return "PhysicalDeviceCoverageReductionModeFeaturesNV"; + case StructureType::ePipelineCoverageReductionStateCreateInfoNV: + return "PipelineCoverageReductionStateCreateInfoNV"; + case StructureType::eFramebufferMixedSamplesCombinationNV: return "FramebufferMixedSamplesCombinationNV"; + case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT: + return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT"; + case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT: + return "PhysicalDeviceYcbcrImageArraysFeaturesEXT"; + case StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT: return "PhysicalDeviceProvokingVertexFeaturesEXT"; + case StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT: + return "PipelineRasterizationProvokingVertexStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT: + return "PhysicalDeviceProvokingVertexPropertiesEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eSurfaceFullScreenExclusiveInfoEXT: return "SurfaceFullScreenExclusiveInfoEXT"; + case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT: + return "SurfaceCapabilitiesFullScreenExclusiveEXT"; + case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT: + return "PhysicalDeviceLineRasterizationFeaturesEXT"; + case StructureType::ePipelineRasterizationLineStateCreateInfoEXT: + return "PipelineRasterizationLineStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT: + return "PhysicalDeviceLineRasterizationPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT: + return "PhysicalDeviceShaderAtomicFloatFeaturesEXT"; + case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT: return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT: + return "PhysicalDeviceExtendedDynamicStateFeaturesEXT"; + case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR: + return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; + case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR"; + case StructureType::ePipelineExecutablePropertiesKHR: return "PipelineExecutablePropertiesKHR"; + case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR"; + case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR"; + case StructureType::ePipelineExecutableInternalRepresentationKHR: + return "PipelineExecutableInternalRepresentationKHR"; + case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT: + return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV: + return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; + case StructureType::eGraphicsShaderGroupCreateInfoNV: return "GraphicsShaderGroupCreateInfoNV"; + case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV: return "GraphicsPipelineShaderGroupsCreateInfoNV"; + case StructureType::eIndirectCommandsLayoutTokenNV: return "IndirectCommandsLayoutTokenNV"; + case StructureType::eIndirectCommandsLayoutCreateInfoNV: return "IndirectCommandsLayoutCreateInfoNV"; + case StructureType::eGeneratedCommandsInfoNV: return "GeneratedCommandsInfoNV"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV: + return "GeneratedCommandsMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV: + return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV"; + case StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV: + return "PhysicalDeviceInheritedViewportScissorFeaturesNV"; + case StructureType::eCommandBufferInheritanceViewportScissorInfoNV: + return "CommandBufferInheritanceViewportScissorInfoNV"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT: + return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT: + return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT"; + case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM: + return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; + case StructureType::eRenderPassTransformBeginInfoQCOM: return "RenderPassTransformBeginInfoQCOM"; + case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT: + return "PhysicalDeviceDeviceMemoryReportFeaturesEXT"; + case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT: return "DeviceDeviceMemoryReportCreateInfoEXT"; + case StructureType::eDeviceMemoryReportCallbackDataEXT: return "DeviceMemoryReportCallbackDataEXT"; + case StructureType::ePhysicalDeviceRobustness2FeaturesEXT: return "PhysicalDeviceRobustness2FeaturesEXT"; + case StructureType::ePhysicalDeviceRobustness2PropertiesEXT: return "PhysicalDeviceRobustness2PropertiesEXT"; + case StructureType::eSamplerCustomBorderColorCreateInfoEXT: return "SamplerCustomBorderColorCreateInfoEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT: + return "PhysicalDeviceCustomBorderColorPropertiesEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT: + return "PhysicalDeviceCustomBorderColorFeaturesEXT"; + case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR"; + case StructureType::ePhysicalDevicePrivateDataFeaturesEXT: return "PhysicalDevicePrivateDataFeaturesEXT"; + case StructureType::eDevicePrivateDataCreateInfoEXT: return "DevicePrivateDataCreateInfoEXT"; + case StructureType::ePrivateDataSlotCreateInfoEXT: return "PrivateDataSlotCreateInfoEXT"; + case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT: + return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoEncodeInfoKHR: return "VideoEncodeInfoKHR"; + case StructureType::eVideoEncodeRateControlInfoKHR: return "VideoEncodeRateControlInfoKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: + return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; + case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV"; + case StructureType::eMemoryBarrier2KHR: return "MemoryBarrier2KHR"; + case StructureType::eBufferMemoryBarrier2KHR: return "BufferMemoryBarrier2KHR"; + case StructureType::eImageMemoryBarrier2KHR: return "ImageMemoryBarrier2KHR"; + case StructureType::eDependencyInfoKHR: return "DependencyInfoKHR"; + case StructureType::eSubmitInfo2KHR: return "SubmitInfo2KHR"; + case StructureType::eSemaphoreSubmitInfoKHR: return "SemaphoreSubmitInfoKHR"; + case StructureType::eCommandBufferSubmitInfoKHR: return "CommandBufferSubmitInfoKHR"; + case StructureType::ePhysicalDeviceSynchronization2FeaturesKHR: + return "PhysicalDeviceSynchronization2FeaturesKHR"; + case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV"; + case StructureType::eCheckpointData2NV: return "CheckpointData2NV"; + case StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR: + return "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR"; + case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR: + return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV: + return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV: + return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV"; + case StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV: + return "PipelineFragmentShadingRateEnumStateCreateInfoNV"; + case StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT: + return "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT: + return "PhysicalDeviceFragmentDensityMap2FeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT: + return "PhysicalDeviceFragmentDensityMap2PropertiesEXT"; + case StructureType::eCopyCommandTransformInfoQCOM: return "CopyCommandTransformInfoQCOM"; + case StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT: return "PhysicalDeviceImageRobustnessFeaturesEXT"; + case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: + return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR"; + case StructureType::eCopyBufferInfo2KHR: return "CopyBufferInfo2KHR"; + case StructureType::eCopyImageInfo2KHR: return "CopyImageInfo2KHR"; + case StructureType::eCopyBufferToImageInfo2KHR: return "CopyBufferToImageInfo2KHR"; + case StructureType::eCopyImageToBufferInfo2KHR: return "CopyImageToBufferInfo2KHR"; + case StructureType::eBlitImageInfo2KHR: return "BlitImageInfo2KHR"; + case StructureType::eResolveImageInfo2KHR: return "ResolveImageInfo2KHR"; + case StructureType::eBufferCopy2KHR: return "BufferCopy2KHR"; + case StructureType::eImageCopy2KHR: return "ImageCopy2KHR"; + case StructureType::eImageBlit2KHR: return "ImageBlit2KHR"; + case StructureType::eBufferImageCopy2KHR: return "BufferImageCopy2KHR"; + case StructureType::eImageResolve2KHR: return "ImageResolve2KHR"; + case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT"; +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT"; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE: + return "PhysicalDeviceMutableDescriptorTypeFeaturesVALVE"; + case StructureType::eMutableDescriptorTypeCreateInfoVALVE: return "MutableDescriptorTypeCreateInfoVALVE"; + case StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT: + return "PhysicalDeviceVertexInputDynamicStateFeaturesEXT"; + case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT"; + case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case StructureType::eImportMemoryZirconHandleInfoFUCHSIA: return "ImportMemoryZirconHandleInfoFUCHSIA"; + case StructureType::eMemoryZirconHandlePropertiesFUCHSIA: return "MemoryZirconHandlePropertiesFUCHSIA"; + case StructureType::eMemoryGetZirconHandleInfoFUCHSIA: return "MemoryGetZirconHandleInfoFUCHSIA"; + case StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA: return "ImportSemaphoreZirconHandleInfoFUCHSIA"; + case StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA: return "SemaphoreGetZirconHandleInfoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT: + return "PhysicalDeviceExtendedDynamicState2FeaturesEXT"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case StructureType::eScreenSurfaceCreateInfoQNX: return "ScreenSurfaceCreateInfoQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + case StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT: + return "PhysicalDeviceColorWriteEnableFeaturesEXT"; + case StructureType::ePipelineColorWriteCreateInfoEXT: return "PipelineColorWriteCreateInfoEXT"; + case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT: + return "PhysicalDeviceGlobalPriorityQueryFeaturesEXT"; + case StructureType::eQueueFamilyGlobalPriorityPropertiesEXT: return "QueueFamilyGlobalPriorityPropertiesEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class AccelerationStructureTypeKHR + enum class ObjectType { - eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, - eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR + eUnknown = VK_OBJECT_TYPE_UNKNOWN, + eInstance = VK_OBJECT_TYPE_INSTANCE, + ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + eDevice = VK_OBJECT_TYPE_DEVICE, + eQueue = VK_OBJECT_TYPE_QUEUE, + eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, + eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, + eFence = VK_OBJECT_TYPE_FENCE, + eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, + eBuffer = VK_OBJECT_TYPE_BUFFER, + eImage = VK_OBJECT_TYPE_IMAGE, + eEvent = VK_OBJECT_TYPE_EVENT, + eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, + eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, + eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, + eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, + ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, + ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, + ePipeline = VK_OBJECT_TYPE_PIPELINE, + eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + eSampler = VK_OBJECT_TYPE_SAMPLER, + eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, + eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, + eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, + eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, + eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, + eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, + eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoSessionKHR = VK_OBJECT_TYPE_VIDEO_SESSION_KHR, + eVideoSessionParametersKHR = VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eCuModuleNVX = VK_OBJECT_TYPE_CU_MODULE_NVX, + eCuFunctionNVX = VK_OBJECT_TYPE_CU_FUNCTION_NVX, + eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, + eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, + ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, + eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, + eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, + ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, + eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR }; - using AccelerationStructureTypeNV = AccelerationStructureTypeKHR; - VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value ) + VULKAN_HPP_INLINE std::string to_string( ObjectType value ) { switch ( value ) { - case AccelerationStructureTypeKHR::eTopLevel : return "TopLevel"; - case AccelerationStructureTypeKHR::eBottomLevel : return "BottomLevel"; - default: return "invalid"; + case ObjectType::eUnknown: return "Unknown"; + case ObjectType::eInstance: return "Instance"; + case ObjectType::ePhysicalDevice: return "PhysicalDevice"; + case ObjectType::eDevice: return "Device"; + case ObjectType::eQueue: return "Queue"; + case ObjectType::eSemaphore: return "Semaphore"; + case ObjectType::eCommandBuffer: return "CommandBuffer"; + case ObjectType::eFence: return "Fence"; + case ObjectType::eDeviceMemory: return "DeviceMemory"; + case ObjectType::eBuffer: return "Buffer"; + case ObjectType::eImage: return "Image"; + case ObjectType::eEvent: return "Event"; + case ObjectType::eQueryPool: return "QueryPool"; + case ObjectType::eBufferView: return "BufferView"; + case ObjectType::eImageView: return "ImageView"; + case ObjectType::eShaderModule: return "ShaderModule"; + case ObjectType::ePipelineCache: return "PipelineCache"; + case ObjectType::ePipelineLayout: return "PipelineLayout"; + case ObjectType::eRenderPass: return "RenderPass"; + case ObjectType::ePipeline: return "Pipeline"; + case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout"; + case ObjectType::eSampler: return "Sampler"; + case ObjectType::eDescriptorPool: return "DescriptorPool"; + case ObjectType::eDescriptorSet: return "DescriptorSet"; + case ObjectType::eFramebuffer: return "Framebuffer"; + case ObjectType::eCommandPool: return "CommandPool"; + case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case ObjectType::eSurfaceKHR: return "SurfaceKHR"; + case ObjectType::eSwapchainKHR: return "SwapchainKHR"; + case ObjectType::eDisplayKHR: return "DisplayKHR"; + case ObjectType::eDisplayModeKHR: return "DisplayModeKHR"; + case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ObjectType::eVideoSessionKHR: return "VideoSessionKHR"; + case ObjectType::eVideoSessionParametersKHR: return "VideoSessionParametersKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ObjectType::eCuModuleNVX: return "CuModuleNVX"; + case ObjectType::eCuFunctionNVX: return "CuFunctionNVX"; + case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT"; + case ObjectType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT"; + case ObjectType::eAccelerationStructureNV: return "AccelerationStructureNV"; + case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL"; + case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR"; + case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV"; + case ObjectType::ePrivateDataSlotEXT: return "PrivateDataSlotEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class AccessFlagBits : VkAccessFlags + enum class VendorId { - eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, - eIndexRead = VK_ACCESS_INDEX_READ_BIT, - eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, - eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, - eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, - eShaderRead = VK_ACCESS_SHADER_READ_BIT, - eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, - eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, - eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, - eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, - eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, - eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, - eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, - eHostRead = VK_ACCESS_HOST_READ_BIT, - eHostWrite = VK_ACCESS_HOST_WRITE_BIT, - eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, - eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, - eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, - eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, - eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, - eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, - eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, - eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, - eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, - eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, - eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, - eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, - eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, - eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, - eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV + eVIV = VK_VENDOR_ID_VIV, + eVSI = VK_VENDOR_ID_VSI, + eKazan = VK_VENDOR_ID_KAZAN, + eCodeplay = VK_VENDOR_ID_CODEPLAY, + eMESA = VK_VENDOR_ID_MESA, + ePocl = VK_VENDOR_ID_POCL }; - VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value ) + VULKAN_HPP_INLINE std::string to_string( VendorId value ) { switch ( value ) { - case AccessFlagBits::eIndirectCommandRead : return "IndirectCommandRead"; - case AccessFlagBits::eIndexRead : return "IndexRead"; - case AccessFlagBits::eVertexAttributeRead : return "VertexAttributeRead"; - case AccessFlagBits::eUniformRead : return "UniformRead"; - case AccessFlagBits::eInputAttachmentRead : return "InputAttachmentRead"; - case AccessFlagBits::eShaderRead : return "ShaderRead"; - case AccessFlagBits::eShaderWrite : return "ShaderWrite"; - case AccessFlagBits::eColorAttachmentRead : return "ColorAttachmentRead"; - case AccessFlagBits::eColorAttachmentWrite : return "ColorAttachmentWrite"; - case AccessFlagBits::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead"; - case AccessFlagBits::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite"; - case AccessFlagBits::eTransferRead : return "TransferRead"; - case AccessFlagBits::eTransferWrite : return "TransferWrite"; - case AccessFlagBits::eHostRead : return "HostRead"; - case AccessFlagBits::eHostWrite : return "HostWrite"; - case AccessFlagBits::eMemoryRead : return "MemoryRead"; - case AccessFlagBits::eMemoryWrite : return "MemoryWrite"; - case AccessFlagBits::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT"; - case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT"; - case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT"; - case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT"; - case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT"; - case AccessFlagBits::eAccelerationStructureReadKHR : return "AccelerationStructureReadKHR"; - case AccessFlagBits::eAccelerationStructureWriteKHR : return "AccelerationStructureWriteKHR"; - case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV"; - case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT"; - case AccessFlagBits::eCommandPreprocessReadNV : return "CommandPreprocessReadNV"; - case AccessFlagBits::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV"; - default: return "invalid"; + case VendorId::eVIV: return "VIV"; + case VendorId::eVSI: return "VSI"; + case VendorId::eKazan: return "Kazan"; + case VendorId::eCodeplay: return "Codeplay"; + case VendorId::eMESA: return "MESA"; + case VendorId::ePocl: return "Pocl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR - {}; - - VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR ) + enum class PipelineCacheHeaderVersion { - return "(void)"; - } - - enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags - { - eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT + eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE }; - VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value ) + VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value ) { switch ( value ) { - case AttachmentDescriptionFlagBits::eMayAlias : return "MayAlias"; - default: return "invalid"; - } - } - - enum class AttachmentLoadOp - { - eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, - eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, - eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE - }; - - VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value ) - { - switch ( value ) - { - case AttachmentLoadOp::eLoad : return "Load"; - case AttachmentLoadOp::eClear : return "Clear"; - case AttachmentLoadOp::eDontCare : return "DontCare"; - default: return "invalid"; - } - } - - enum class AttachmentStoreOp - { - eStore = VK_ATTACHMENT_STORE_OP_STORE, - eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE, - eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM - }; - - VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value ) - { - switch ( value ) - { - case AttachmentStoreOp::eStore : return "Store"; - case AttachmentStoreOp::eDontCare : return "DontCare"; - case AttachmentStoreOp::eNoneQCOM : return "NoneQCOM"; - default: return "invalid"; - } - } - - enum class BlendFactor - { - eZero = VK_BLEND_FACTOR_ZERO, - eOne = VK_BLEND_FACTOR_ONE, - eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, - eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, - eDstColor = VK_BLEND_FACTOR_DST_COLOR, - eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, - eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, - eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, - eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, - eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, - eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, - eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, - eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, - eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, - eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, - eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, - eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, - eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, - eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - }; - - VULKAN_HPP_INLINE std::string to_string( BlendFactor value ) - { - switch ( value ) - { - case BlendFactor::eZero : return "Zero"; - case BlendFactor::eOne : return "One"; - case BlendFactor::eSrcColor : return "SrcColor"; - case BlendFactor::eOneMinusSrcColor : return "OneMinusSrcColor"; - case BlendFactor::eDstColor : return "DstColor"; - case BlendFactor::eOneMinusDstColor : return "OneMinusDstColor"; - case BlendFactor::eSrcAlpha : return "SrcAlpha"; - case BlendFactor::eOneMinusSrcAlpha : return "OneMinusSrcAlpha"; - case BlendFactor::eDstAlpha : return "DstAlpha"; - case BlendFactor::eOneMinusDstAlpha : return "OneMinusDstAlpha"; - case BlendFactor::eConstantColor : return "ConstantColor"; - case BlendFactor::eOneMinusConstantColor : return "OneMinusConstantColor"; - case BlendFactor::eConstantAlpha : return "ConstantAlpha"; - case BlendFactor::eOneMinusConstantAlpha : return "OneMinusConstantAlpha"; - case BlendFactor::eSrcAlphaSaturate : return "SrcAlphaSaturate"; - case BlendFactor::eSrc1Color : return "Src1Color"; - case BlendFactor::eOneMinusSrc1Color : return "OneMinusSrc1Color"; - case BlendFactor::eSrc1Alpha : return "Src1Alpha"; - case BlendFactor::eOneMinusSrc1Alpha : return "OneMinusSrc1Alpha"; - default: return "invalid"; - } - } - - enum class BlendOp - { - eAdd = VK_BLEND_OP_ADD, - eSubtract = VK_BLEND_OP_SUBTRACT, - eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, - eMin = VK_BLEND_OP_MIN, - eMax = VK_BLEND_OP_MAX, - eZeroEXT = VK_BLEND_OP_ZERO_EXT, - eSrcEXT = VK_BLEND_OP_SRC_EXT, - eDstEXT = VK_BLEND_OP_DST_EXT, - eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, - eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, - eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, - eDstInEXT = VK_BLEND_OP_DST_IN_EXT, - eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, - eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, - eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, - eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, - eXorEXT = VK_BLEND_OP_XOR_EXT, - eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, - eScreenEXT = VK_BLEND_OP_SCREEN_EXT, - eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, - eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, - eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, - eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, - eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, - eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, - eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, - eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, - eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, - eInvertEXT = VK_BLEND_OP_INVERT_EXT, - eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, - eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, - eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, - eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, - eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, - ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, - eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, - eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, - eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, - eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, - eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, - ePlusEXT = VK_BLEND_OP_PLUS_EXT, - ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, - ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, - ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, - eMinusEXT = VK_BLEND_OP_MINUS_EXT, - eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, - eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, - eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, - eRedEXT = VK_BLEND_OP_RED_EXT, - eGreenEXT = VK_BLEND_OP_GREEN_EXT, - eBlueEXT = VK_BLEND_OP_BLUE_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( BlendOp value ) - { - switch ( value ) - { - case BlendOp::eAdd : return "Add"; - case BlendOp::eSubtract : return "Subtract"; - case BlendOp::eReverseSubtract : return "ReverseSubtract"; - case BlendOp::eMin : return "Min"; - case BlendOp::eMax : return "Max"; - case BlendOp::eZeroEXT : return "ZeroEXT"; - case BlendOp::eSrcEXT : return "SrcEXT"; - case BlendOp::eDstEXT : return "DstEXT"; - case BlendOp::eSrcOverEXT : return "SrcOverEXT"; - case BlendOp::eDstOverEXT : return "DstOverEXT"; - case BlendOp::eSrcInEXT : return "SrcInEXT"; - case BlendOp::eDstInEXT : return "DstInEXT"; - case BlendOp::eSrcOutEXT : return "SrcOutEXT"; - case BlendOp::eDstOutEXT : return "DstOutEXT"; - case BlendOp::eSrcAtopEXT : return "SrcAtopEXT"; - case BlendOp::eDstAtopEXT : return "DstAtopEXT"; - case BlendOp::eXorEXT : return "XorEXT"; - case BlendOp::eMultiplyEXT : return "MultiplyEXT"; - case BlendOp::eScreenEXT : return "ScreenEXT"; - case BlendOp::eOverlayEXT : return "OverlayEXT"; - case BlendOp::eDarkenEXT : return "DarkenEXT"; - case BlendOp::eLightenEXT : return "LightenEXT"; - case BlendOp::eColordodgeEXT : return "ColordodgeEXT"; - case BlendOp::eColorburnEXT : return "ColorburnEXT"; - case BlendOp::eHardlightEXT : return "HardlightEXT"; - case BlendOp::eSoftlightEXT : return "SoftlightEXT"; - case BlendOp::eDifferenceEXT : return "DifferenceEXT"; - case BlendOp::eExclusionEXT : return "ExclusionEXT"; - case BlendOp::eInvertEXT : return "InvertEXT"; - case BlendOp::eInvertRgbEXT : return "InvertRgbEXT"; - case BlendOp::eLineardodgeEXT : return "LineardodgeEXT"; - case BlendOp::eLinearburnEXT : return "LinearburnEXT"; - case BlendOp::eVividlightEXT : return "VividlightEXT"; - case BlendOp::eLinearlightEXT : return "LinearlightEXT"; - case BlendOp::ePinlightEXT : return "PinlightEXT"; - case BlendOp::eHardmixEXT : return "HardmixEXT"; - case BlendOp::eHslHueEXT : return "HslHueEXT"; - case BlendOp::eHslSaturationEXT : return "HslSaturationEXT"; - case BlendOp::eHslColorEXT : return "HslColorEXT"; - case BlendOp::eHslLuminosityEXT : return "HslLuminosityEXT"; - case BlendOp::ePlusEXT : return "PlusEXT"; - case BlendOp::ePlusClampedEXT : return "PlusClampedEXT"; - case BlendOp::ePlusClampedAlphaEXT : return "PlusClampedAlphaEXT"; - case BlendOp::ePlusDarkerEXT : return "PlusDarkerEXT"; - case BlendOp::eMinusEXT : return "MinusEXT"; - case BlendOp::eMinusClampedEXT : return "MinusClampedEXT"; - case BlendOp::eContrastEXT : return "ContrastEXT"; - case BlendOp::eInvertOvgEXT : return "InvertOvgEXT"; - case BlendOp::eRedEXT : return "RedEXT"; - case BlendOp::eGreenEXT : return "GreenEXT"; - case BlendOp::eBlueEXT : return "BlueEXT"; - default: return "invalid"; - } - } - - enum class BlendOverlapEXT - { - eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, - eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, - eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value ) - { - switch ( value ) - { - case BlendOverlapEXT::eUncorrelated : return "Uncorrelated"; - case BlendOverlapEXT::eDisjoint : return "Disjoint"; - case BlendOverlapEXT::eConjoint : return "Conjoint"; - default: return "invalid"; - } - } - - enum class BorderColor - { - eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, - eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, - eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, - eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, - eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, - eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE, - eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT, - eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( BorderColor value ) - { - switch ( value ) - { - case BorderColor::eFloatTransparentBlack : return "FloatTransparentBlack"; - case BorderColor::eIntTransparentBlack : return "IntTransparentBlack"; - case BorderColor::eFloatOpaqueBlack : return "FloatOpaqueBlack"; - case BorderColor::eIntOpaqueBlack : return "IntOpaqueBlack"; - case BorderColor::eFloatOpaqueWhite : return "FloatOpaqueWhite"; - case BorderColor::eIntOpaqueWhite : return "IntOpaqueWhite"; - case BorderColor::eFloatCustomEXT : return "FloatCustomEXT"; - case BorderColor::eIntCustomEXT : return "IntCustomEXT"; - default: return "invalid"; - } - } - - enum class BufferCreateFlagBits : VkBufferCreateFlags - { - eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, - eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, - eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, - eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, - eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, - eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT, - eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value ) - { - switch ( value ) - { - case BufferCreateFlagBits::eSparseBinding : return "SparseBinding"; - case BufferCreateFlagBits::eSparseResidency : return "SparseResidency"; - case BufferCreateFlagBits::eSparseAliased : return "SparseAliased"; - case BufferCreateFlagBits::eProtected : return "Protected"; - case BufferCreateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay"; - default: return "invalid"; - } - } - - enum class BufferUsageFlagBits : VkBufferUsageFlags - { - eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, - eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, - eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, - eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, - eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, - eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, - eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, - eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, - eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, - eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, - eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, - eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, - eRayTracingKHR = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR, - eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, - eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, - eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value ) - { - switch ( value ) - { - case BufferUsageFlagBits::eTransferSrc : return "TransferSrc"; - case BufferUsageFlagBits::eTransferDst : return "TransferDst"; - case BufferUsageFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer"; - case BufferUsageFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer"; - case BufferUsageFlagBits::eUniformBuffer : return "UniformBuffer"; - case BufferUsageFlagBits::eStorageBuffer : return "StorageBuffer"; - case BufferUsageFlagBits::eIndexBuffer : return "IndexBuffer"; - case BufferUsageFlagBits::eVertexBuffer : return "VertexBuffer"; - case BufferUsageFlagBits::eIndirectBuffer : return "IndirectBuffer"; - case BufferUsageFlagBits::eShaderDeviceAddress : return "ShaderDeviceAddress"; - case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT"; - case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT"; - case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; - case BufferUsageFlagBits::eRayTracingKHR : return "RayTracingKHR"; - default: return "invalid"; - } - } - - enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR - { - eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, - eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, - ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, - ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, - eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR - }; - using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR; - - VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value ) - { - switch ( value ) - { - case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate : return "AllowUpdate"; - case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction : return "AllowCompaction"; - case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace : return "PreferFastTrace"; - case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild : return "PreferFastBuild"; - case BuildAccelerationStructureFlagBitsKHR::eLowMemory : return "LowMemory"; - default: return "invalid"; - } - } - - enum class ChromaLocation - { - eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, - eMidpoint = VK_CHROMA_LOCATION_MIDPOINT - }; - using ChromaLocationKHR = ChromaLocation; - - VULKAN_HPP_INLINE std::string to_string( ChromaLocation value ) - { - switch ( value ) - { - case ChromaLocation::eCositedEven : return "CositedEven"; - case ChromaLocation::eMidpoint : return "Midpoint"; - default: return "invalid"; - } - } - - enum class CoarseSampleOrderTypeNV - { - eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, - eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, - ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, - eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV - }; - - VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value ) - { - switch ( value ) - { - case CoarseSampleOrderTypeNV::eDefault : return "Default"; - case CoarseSampleOrderTypeNV::eCustom : return "Custom"; - case CoarseSampleOrderTypeNV::ePixelMajor : return "PixelMajor"; - case CoarseSampleOrderTypeNV::eSampleMajor : return "SampleMajor"; - default: return "invalid"; - } - } - - enum class ColorComponentFlagBits : VkColorComponentFlags - { - eR = VK_COLOR_COMPONENT_R_BIT, - eG = VK_COLOR_COMPONENT_G_BIT, - eB = VK_COLOR_COMPONENT_B_BIT, - eA = VK_COLOR_COMPONENT_A_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value ) - { - switch ( value ) - { - case ColorComponentFlagBits::eR : return "R"; - case ColorComponentFlagBits::eG : return "G"; - case ColorComponentFlagBits::eB : return "B"; - case ColorComponentFlagBits::eA : return "A"; - default: return "invalid"; - } - } - - enum class ColorSpaceKHR - { - eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, - eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, - eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, - eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, - eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, - eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, - eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, - eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, - eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, - eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, - eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, - eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, - ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, - eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, - eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, - eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, - eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value ) - { - switch ( value ) - { - case ColorSpaceKHR::eSrgbNonlinear : return "SrgbNonlinear"; - case ColorSpaceKHR::eDisplayP3NonlinearEXT : return "DisplayP3NonlinearEXT"; - case ColorSpaceKHR::eExtendedSrgbLinearEXT : return "ExtendedSrgbLinearEXT"; - case ColorSpaceKHR::eDisplayP3LinearEXT : return "DisplayP3LinearEXT"; - case ColorSpaceKHR::eDciP3NonlinearEXT : return "DciP3NonlinearEXT"; - case ColorSpaceKHR::eBt709LinearEXT : return "Bt709LinearEXT"; - case ColorSpaceKHR::eBt709NonlinearEXT : return "Bt709NonlinearEXT"; - case ColorSpaceKHR::eBt2020LinearEXT : return "Bt2020LinearEXT"; - case ColorSpaceKHR::eHdr10St2084EXT : return "Hdr10St2084EXT"; - case ColorSpaceKHR::eDolbyvisionEXT : return "DolbyvisionEXT"; - case ColorSpaceKHR::eHdr10HlgEXT : return "Hdr10HlgEXT"; - case ColorSpaceKHR::eAdobergbLinearEXT : return "AdobergbLinearEXT"; - case ColorSpaceKHR::eAdobergbNonlinearEXT : return "AdobergbNonlinearEXT"; - case ColorSpaceKHR::ePassThroughEXT : return "PassThroughEXT"; - case ColorSpaceKHR::eExtendedSrgbNonlinearEXT : return "ExtendedSrgbNonlinearEXT"; - case ColorSpaceKHR::eDisplayNativeAMD : return "DisplayNativeAMD"; - default: return "invalid"; - } - } - - enum class CommandBufferLevel - { - ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, - eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY - }; - - VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value ) - { - switch ( value ) - { - case CommandBufferLevel::ePrimary : return "Primary"; - case CommandBufferLevel::eSecondary : return "Secondary"; - default: return "invalid"; - } - } - - enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags - { - eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value ) - { - switch ( value ) - { - case CommandBufferResetFlagBits::eReleaseResources : return "ReleaseResources"; - default: return "invalid"; - } - } - - enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags - { - eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, - eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, - eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value ) - { - switch ( value ) - { - case CommandBufferUsageFlagBits::eOneTimeSubmit : return "OneTimeSubmit"; - case CommandBufferUsageFlagBits::eRenderPassContinue : return "RenderPassContinue"; - case CommandBufferUsageFlagBits::eSimultaneousUse : return "SimultaneousUse"; - default: return "invalid"; - } - } - - enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags - { - eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, - eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, - eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value ) - { - switch ( value ) - { - case CommandPoolCreateFlagBits::eTransient : return "Transient"; - case CommandPoolCreateFlagBits::eResetCommandBuffer : return "ResetCommandBuffer"; - case CommandPoolCreateFlagBits::eProtected : return "Protected"; - default: return "invalid"; - } - } - - enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags - { - eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value ) - { - switch ( value ) - { - case CommandPoolResetFlagBits::eReleaseResources : return "ReleaseResources"; - default: return "invalid"; - } - } - - enum class CompareOp - { - eNever = VK_COMPARE_OP_NEVER, - eLess = VK_COMPARE_OP_LESS, - eEqual = VK_COMPARE_OP_EQUAL, - eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, - eGreater = VK_COMPARE_OP_GREATER, - eNotEqual = VK_COMPARE_OP_NOT_EQUAL, - eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, - eAlways = VK_COMPARE_OP_ALWAYS - }; - - VULKAN_HPP_INLINE std::string to_string( CompareOp value ) - { - switch ( value ) - { - case CompareOp::eNever : return "Never"; - case CompareOp::eLess : return "Less"; - case CompareOp::eEqual : return "Equal"; - case CompareOp::eLessOrEqual : return "LessOrEqual"; - case CompareOp::eGreater : return "Greater"; - case CompareOp::eNotEqual : return "NotEqual"; - case CompareOp::eGreaterOrEqual : return "GreaterOrEqual"; - case CompareOp::eAlways : return "Always"; - default: return "invalid"; - } - } - - enum class ComponentSwizzle - { - eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, - eZero = VK_COMPONENT_SWIZZLE_ZERO, - eOne = VK_COMPONENT_SWIZZLE_ONE, - eR = VK_COMPONENT_SWIZZLE_R, - eG = VK_COMPONENT_SWIZZLE_G, - eB = VK_COMPONENT_SWIZZLE_B, - eA = VK_COMPONENT_SWIZZLE_A - }; - - VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value ) - { - switch ( value ) - { - case ComponentSwizzle::eIdentity : return "Identity"; - case ComponentSwizzle::eZero : return "Zero"; - case ComponentSwizzle::eOne : return "One"; - case ComponentSwizzle::eR : return "R"; - case ComponentSwizzle::eG : return "G"; - case ComponentSwizzle::eB : return "B"; - case ComponentSwizzle::eA : return "A"; - default: return "invalid"; - } - } - - enum class ComponentTypeNV - { - eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV, - eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV, - eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV, - eSint8 = VK_COMPONENT_TYPE_SINT8_NV, - eSint16 = VK_COMPONENT_TYPE_SINT16_NV, - eSint32 = VK_COMPONENT_TYPE_SINT32_NV, - eSint64 = VK_COMPONENT_TYPE_SINT64_NV, - eUint8 = VK_COMPONENT_TYPE_UINT8_NV, - eUint16 = VK_COMPONENT_TYPE_UINT16_NV, - eUint32 = VK_COMPONENT_TYPE_UINT32_NV, - eUint64 = VK_COMPONENT_TYPE_UINT64_NV - }; - - VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value ) - { - switch ( value ) - { - case ComponentTypeNV::eFloat16 : return "Float16"; - case ComponentTypeNV::eFloat32 : return "Float32"; - case ComponentTypeNV::eFloat64 : return "Float64"; - case ComponentTypeNV::eSint8 : return "Sint8"; - case ComponentTypeNV::eSint16 : return "Sint16"; - case ComponentTypeNV::eSint32 : return "Sint32"; - case ComponentTypeNV::eSint64 : return "Sint64"; - case ComponentTypeNV::eUint8 : return "Uint8"; - case ComponentTypeNV::eUint16 : return "Uint16"; - case ComponentTypeNV::eUint32 : return "Uint32"; - case ComponentTypeNV::eUint64 : return "Uint64"; - default: return "invalid"; - } - } - - enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR - { - eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, - ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, - ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, - eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value ) - { - switch ( value ) - { - case CompositeAlphaFlagBitsKHR::eOpaque : return "Opaque"; - case CompositeAlphaFlagBitsKHR::ePreMultiplied : return "PreMultiplied"; - case CompositeAlphaFlagBitsKHR::ePostMultiplied : return "PostMultiplied"; - case CompositeAlphaFlagBitsKHR::eInherit : return "Inherit"; - default: return "invalid"; - } - } - - enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT - { - eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value ) - { - switch ( value ) - { - case ConditionalRenderingFlagBitsEXT::eInverted : return "Inverted"; - default: return "invalid"; - } - } - - enum class ConservativeRasterizationModeEXT - { - eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, - eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, - eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value ) - { - switch ( value ) - { - case ConservativeRasterizationModeEXT::eDisabled : return "Disabled"; - case ConservativeRasterizationModeEXT::eOverestimate : return "Overestimate"; - case ConservativeRasterizationModeEXT::eUnderestimate : return "Underestimate"; - default: return "invalid"; - } - } - - enum class CopyAccelerationStructureModeKHR - { - eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, - eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, - eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR, - eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR - }; - using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR; - - VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value ) - { - switch ( value ) - { - case CopyAccelerationStructureModeKHR::eClone : return "Clone"; - case CopyAccelerationStructureModeKHR::eCompact : return "Compact"; - case CopyAccelerationStructureModeKHR::eSerialize : return "Serialize"; - case CopyAccelerationStructureModeKHR::eDeserialize : return "Deserialize"; - default: return "invalid"; - } - } - - enum class CoverageModulationModeNV - { - eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, - eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, - eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, - eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV - }; - - VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value ) - { - switch ( value ) - { - case CoverageModulationModeNV::eNone : return "None"; - case CoverageModulationModeNV::eRgb : return "Rgb"; - case CoverageModulationModeNV::eAlpha : return "Alpha"; - case CoverageModulationModeNV::eRgba : return "Rgba"; - default: return "invalid"; - } - } - - enum class CoverageReductionModeNV - { - eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, - eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV - }; - - VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value ) - { - switch ( value ) - { - case CoverageReductionModeNV::eMerge : return "Merge"; - case CoverageReductionModeNV::eTruncate : return "Truncate"; - default: return "invalid"; - } - } - - enum class CullModeFlagBits : VkCullModeFlags - { - eNone = VK_CULL_MODE_NONE, - eFront = VK_CULL_MODE_FRONT_BIT, - eBack = VK_CULL_MODE_BACK_BIT, - eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK - }; - - VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value ) - { - switch ( value ) - { - case CullModeFlagBits::eNone : return "None"; - case CullModeFlagBits::eFront : return "Front"; - case CullModeFlagBits::eBack : return "Back"; - case CullModeFlagBits::eFrontAndBack : return "FrontAndBack"; - default: return "invalid"; - } - } - - enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT - { - eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, - eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, - ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, - eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, - eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value ) - { - switch ( value ) - { - case DebugReportFlagBitsEXT::eInformation : return "Information"; - case DebugReportFlagBitsEXT::eWarning : return "Warning"; - case DebugReportFlagBitsEXT::ePerformanceWarning : return "PerformanceWarning"; - case DebugReportFlagBitsEXT::eError : return "Error"; - case DebugReportFlagBitsEXT::eDebug : return "Debug"; - default: return "invalid"; - } - } - - enum class DebugReportObjectTypeEXT - { - eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, - eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, - ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, - eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, - eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, - eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, - eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, - eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, - eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, - eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, - eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, - eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, - eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, - eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, - eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, - eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, - ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, - ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, - eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, - ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, - eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, - eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, - eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, - eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, - eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, - eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, - eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, - eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, - eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, - eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, - eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, - eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, - eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, - eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, - eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, - eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, - eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, - eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, - eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, - eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value ) - { - switch ( value ) - { - case DebugReportObjectTypeEXT::eUnknown : return "Unknown"; - case DebugReportObjectTypeEXT::eInstance : return "Instance"; - case DebugReportObjectTypeEXT::ePhysicalDevice : return "PhysicalDevice"; - case DebugReportObjectTypeEXT::eDevice : return "Device"; - case DebugReportObjectTypeEXT::eQueue : return "Queue"; - case DebugReportObjectTypeEXT::eSemaphore : return "Semaphore"; - case DebugReportObjectTypeEXT::eCommandBuffer : return "CommandBuffer"; - case DebugReportObjectTypeEXT::eFence : return "Fence"; - case DebugReportObjectTypeEXT::eDeviceMemory : return "DeviceMemory"; - case DebugReportObjectTypeEXT::eBuffer : return "Buffer"; - case DebugReportObjectTypeEXT::eImage : return "Image"; - case DebugReportObjectTypeEXT::eEvent : return "Event"; - case DebugReportObjectTypeEXT::eQueryPool : return "QueryPool"; - case DebugReportObjectTypeEXT::eBufferView : return "BufferView"; - case DebugReportObjectTypeEXT::eImageView : return "ImageView"; - case DebugReportObjectTypeEXT::eShaderModule : return "ShaderModule"; - case DebugReportObjectTypeEXT::ePipelineCache : return "PipelineCache"; - case DebugReportObjectTypeEXT::ePipelineLayout : return "PipelineLayout"; - case DebugReportObjectTypeEXT::eRenderPass : return "RenderPass"; - case DebugReportObjectTypeEXT::ePipeline : return "Pipeline"; - case DebugReportObjectTypeEXT::eDescriptorSetLayout : return "DescriptorSetLayout"; - case DebugReportObjectTypeEXT::eSampler : return "Sampler"; - case DebugReportObjectTypeEXT::eDescriptorPool : return "DescriptorPool"; - case DebugReportObjectTypeEXT::eDescriptorSet : return "DescriptorSet"; - case DebugReportObjectTypeEXT::eFramebuffer : return "Framebuffer"; - case DebugReportObjectTypeEXT::eCommandPool : return "CommandPool"; - case DebugReportObjectTypeEXT::eSurfaceKHR : return "SurfaceKHR"; - case DebugReportObjectTypeEXT::eSwapchainKHR : return "SwapchainKHR"; - case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; - case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR"; - case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR"; - case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT"; - case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion"; - case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate"; - case DebugReportObjectTypeEXT::eAccelerationStructureKHR : return "AccelerationStructureKHR"; - default: return "invalid"; - } - } - - enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT - { - eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, - eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, - eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, - eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value ) - { - switch ( value ) - { - case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return "Verbose"; - case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return "Info"; - case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return "Warning"; - case DebugUtilsMessageSeverityFlagBitsEXT::eError : return "Error"; - default: return "invalid"; - } - } - - enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT - { - eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, - eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, - ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value ) - { - switch ( value ) - { - case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return "General"; - case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return "Validation"; - case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return "Performance"; - default: return "invalid"; - } - } - - enum class DependencyFlagBits : VkDependencyFlags - { - eByRegion = VK_DEPENDENCY_BY_REGION_BIT, - eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, - eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, - eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR, - eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value ) - { - switch ( value ) - { - case DependencyFlagBits::eByRegion : return "ByRegion"; - case DependencyFlagBits::eDeviceGroup : return "DeviceGroup"; - case DependencyFlagBits::eViewLocal : return "ViewLocal"; - default: return "invalid"; - } - } - - enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags - { - eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, - eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, - ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, - eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT - }; - using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits; - - VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value ) - { - switch ( value ) - { - case DescriptorBindingFlagBits::eUpdateAfterBind : return "UpdateAfterBind"; - case DescriptorBindingFlagBits::eUpdateUnusedWhilePending : return "UpdateUnusedWhilePending"; - case DescriptorBindingFlagBits::ePartiallyBound : return "PartiallyBound"; - case DescriptorBindingFlagBits::eVariableDescriptorCount : return "VariableDescriptorCount"; - default: return "invalid"; - } - } - - enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags - { - eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, - eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, - eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value ) - { - switch ( value ) - { - case DescriptorPoolCreateFlagBits::eFreeDescriptorSet : return "FreeDescriptorSet"; - case DescriptorPoolCreateFlagBits::eUpdateAfterBind : return "UpdateAfterBind"; - default: return "invalid"; - } - } - - enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags - { - eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, - ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, - eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value ) - { - switch ( value ) - { - case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool : return "UpdateAfterBindPool"; - case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return "PushDescriptorKHR"; - default: return "invalid"; - } - } - - enum class DescriptorType - { - eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, - eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, - eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, - eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, - eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, - eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, - eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, - eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, - eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, - eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, - eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, - eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, - eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV - }; - - VULKAN_HPP_INLINE std::string to_string( DescriptorType value ) - { - switch ( value ) - { - case DescriptorType::eSampler : return "Sampler"; - case DescriptorType::eCombinedImageSampler : return "CombinedImageSampler"; - case DescriptorType::eSampledImage : return "SampledImage"; - case DescriptorType::eStorageImage : return "StorageImage"; - case DescriptorType::eUniformTexelBuffer : return "UniformTexelBuffer"; - case DescriptorType::eStorageTexelBuffer : return "StorageTexelBuffer"; - case DescriptorType::eUniformBuffer : return "UniformBuffer"; - case DescriptorType::eStorageBuffer : return "StorageBuffer"; - case DescriptorType::eUniformBufferDynamic : return "UniformBufferDynamic"; - case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic"; - case DescriptorType::eInputAttachment : return "InputAttachment"; - case DescriptorType::eInlineUniformBlockEXT : return "InlineUniformBlockEXT"; - case DescriptorType::eAccelerationStructureKHR : return "AccelerationStructureKHR"; - default: return "invalid"; - } - } - - enum class DescriptorUpdateTemplateType - { - eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, - ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR - }; - using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; - - VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value ) - { - switch ( value ) - { - case DescriptorUpdateTemplateType::eDescriptorSet : return "DescriptorSet"; - case DescriptorUpdateTemplateType::ePushDescriptorsKHR : return "PushDescriptorsKHR"; - default: return "invalid"; - } - } - - enum class DeviceCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits ) - { - return "(void)"; - } - - enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV - { - eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV, - eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV, - eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV - }; - - VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value ) - { - switch ( value ) - { - case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo : return "EnableShaderDebugInfo"; - case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking : return "EnableResourceTracking"; - case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints : return "EnableAutomaticCheckpoints"; - default: return "invalid"; - } - } - - enum class DeviceEventTypeEXT - { - eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value ) - { - switch ( value ) - { - case DeviceEventTypeEXT::eDisplayHotplug : return "DisplayHotplug"; - default: return "invalid"; - } - } - - enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR - { - eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, - eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, - eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, - eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value ) - { - switch ( value ) - { - case DeviceGroupPresentModeFlagBitsKHR::eLocal : return "Local"; - case DeviceGroupPresentModeFlagBitsKHR::eRemote : return "Remote"; - case DeviceGroupPresentModeFlagBitsKHR::eSum : return "Sum"; - case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice : return "LocalMultiDevice"; - default: return "invalid"; - } - } - - enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags - { - eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value ) - { - switch ( value ) - { - case DeviceQueueCreateFlagBits::eProtected : return "Protected"; - default: return "invalid"; - } - } - - enum class DiscardRectangleModeEXT - { - eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, - eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value ) - { - switch ( value ) - { - case DiscardRectangleModeEXT::eInclusive : return "Inclusive"; - case DiscardRectangleModeEXT::eExclusive : return "Exclusive"; - default: return "invalid"; - } - } - - enum class DisplayEventTypeEXT - { - eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value ) - { - switch ( value ) - { - case DisplayEventTypeEXT::eFirstPixelOut : return "FirstPixelOut"; - default: return "invalid"; - } - } - - enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR - { - eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, - eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, - ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, - ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value ) - { - switch ( value ) - { - case DisplayPlaneAlphaFlagBitsKHR::eOpaque : return "Opaque"; - case DisplayPlaneAlphaFlagBitsKHR::eGlobal : return "Global"; - case DisplayPlaneAlphaFlagBitsKHR::ePerPixel : return "PerPixel"; - case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied : return "PerPixelPremultiplied"; - default: return "invalid"; - } - } - - enum class DisplayPowerStateEXT - { - eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, - eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, - eOn = VK_DISPLAY_POWER_STATE_ON_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value ) - { - switch ( value ) - { - case DisplayPowerStateEXT::eOff : return "Off"; - case DisplayPowerStateEXT::eSuspend : return "Suspend"; - case DisplayPowerStateEXT::eOn : return "On"; - default: return "invalid"; - } - } - - enum class DriverId - { - eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY, - eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE, - eMesaRadv = VK_DRIVER_ID_MESA_RADV, - eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY, - eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, - eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, - eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, - eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, - eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY, - eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, - eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, - eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, - eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE, - eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR - }; - using DriverIdKHR = DriverId; - - VULKAN_HPP_INLINE std::string to_string( DriverId value ) - { - switch ( value ) - { - case DriverId::eAmdProprietary : return "AmdProprietary"; - case DriverId::eAmdOpenSource : return "AmdOpenSource"; - case DriverId::eMesaRadv : return "MesaRadv"; - case DriverId::eNvidiaProprietary : return "NvidiaProprietary"; - case DriverId::eIntelProprietaryWindows : return "IntelProprietaryWindows"; - case DriverId::eIntelOpenSourceMESA : return "IntelOpenSourceMESA"; - case DriverId::eImaginationProprietary : return "ImaginationProprietary"; - case DriverId::eQualcommProprietary : return "QualcommProprietary"; - case DriverId::eArmProprietary : return "ArmProprietary"; - case DriverId::eGoogleSwiftshader : return "GoogleSwiftshader"; - case DriverId::eGgpProprietary : return "GgpProprietary"; - case DriverId::eBroadcomProprietary : return "BroadcomProprietary"; - case DriverId::eMesaLlvmpipe : return "MesaLlvmpipe"; - default: return "invalid"; - } - } - - enum class DynamicState - { - eViewport = VK_DYNAMIC_STATE_VIEWPORT, - eScissor = VK_DYNAMIC_STATE_SCISSOR, - eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, - eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, - eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, - eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, - eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, - eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, - eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, - eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, - eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, - eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, - eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, - eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, - eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, - eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, - eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT, - eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT, - ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, - eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, - eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, - eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, - eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, - eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, - eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, - eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, - eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, - eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DynamicState value ) - { - switch ( value ) - { - case DynamicState::eViewport : return "Viewport"; - case DynamicState::eScissor : return "Scissor"; - case DynamicState::eLineWidth : return "LineWidth"; - case DynamicState::eDepthBias : return "DepthBias"; - case DynamicState::eBlendConstants : return "BlendConstants"; - case DynamicState::eDepthBounds : return "DepthBounds"; - case DynamicState::eStencilCompareMask : return "StencilCompareMask"; - case DynamicState::eStencilWriteMask : return "StencilWriteMask"; - case DynamicState::eStencilReference : return "StencilReference"; - case DynamicState::eViewportWScalingNV : return "ViewportWScalingNV"; - case DynamicState::eDiscardRectangleEXT : return "DiscardRectangleEXT"; - case DynamicState::eSampleLocationsEXT : return "SampleLocationsEXT"; - case DynamicState::eViewportShadingRatePaletteNV : return "ViewportShadingRatePaletteNV"; - case DynamicState::eViewportCoarseSampleOrderNV : return "ViewportCoarseSampleOrderNV"; - case DynamicState::eExclusiveScissorNV : return "ExclusiveScissorNV"; - case DynamicState::eLineStippleEXT : return "LineStippleEXT"; - case DynamicState::eCullModeEXT : return "CullModeEXT"; - case DynamicState::eFrontFaceEXT : return "FrontFaceEXT"; - case DynamicState::ePrimitiveTopologyEXT : return "PrimitiveTopologyEXT"; - case DynamicState::eViewportWithCountEXT : return "ViewportWithCountEXT"; - case DynamicState::eScissorWithCountEXT : return "ScissorWithCountEXT"; - case DynamicState::eVertexInputBindingStrideEXT : return "VertexInputBindingStrideEXT"; - case DynamicState::eDepthTestEnableEXT : return "DepthTestEnableEXT"; - case DynamicState::eDepthWriteEnableEXT : return "DepthWriteEnableEXT"; - case DynamicState::eDepthCompareOpEXT : return "DepthCompareOpEXT"; - case DynamicState::eDepthBoundsTestEnableEXT : return "DepthBoundsTestEnableEXT"; - case DynamicState::eStencilTestEnableEXT : return "StencilTestEnableEXT"; - case DynamicState::eStencilOpEXT : return "StencilOpEXT"; - default: return "invalid"; - } - } - - enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags - { - eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, - eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT - }; - using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits; - - VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value ) - { - switch ( value ) - { - case ExternalFenceFeatureFlagBits::eExportable : return "Exportable"; - case ExternalFenceFeatureFlagBits::eImportable : return "Importable"; - default: return "invalid"; - } - } - - enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags - { - eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, - eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, - eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, - eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT - }; - using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits; - - VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value ) - { - switch ( value ) - { - case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; - case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalFenceHandleTypeFlagBits::eSyncFd : return "SyncFd"; - default: return "invalid"; - } - } - - enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags - { - eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, - eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, - eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT - }; - using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value ) - { - switch ( value ) - { - case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return "DedicatedOnly"; - case ExternalMemoryFeatureFlagBits::eExportable : return "Exportable"; - case ExternalMemoryFeatureFlagBits::eImportable : return "Importable"; - default: return "invalid"; - } - } - - enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV - { - eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, - eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, - eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV - }; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value ) - { - switch ( value ) - { - case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return "DedicatedOnly"; - case ExternalMemoryFeatureFlagBitsNV::eExportable : return "Exportable"; - case ExternalMemoryFeatureFlagBitsNV::eImportable : return "Importable"; - default: return "invalid"; - } - } - - enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags - { - eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, - eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, - eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, - eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, - eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, - eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, - eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, - eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, - eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, - eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, - eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT - }; - using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value ) - { - switch ( value ) - { - case ExternalMemoryHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; - case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalMemoryHandleTypeFlagBits::eD3D11Texture : return "D3D11Texture"; - case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt : return "D3D11TextureKmt"; - case ExternalMemoryHandleTypeFlagBits::eD3D12Heap : return "D3D12Heap"; - case ExternalMemoryHandleTypeFlagBits::eD3D12Resource : return "D3D12Resource"; - case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT : return "DmaBufEXT"; - case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID : return "AndroidHardwareBufferANDROID"; - case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT : return "HostAllocationEXT"; - case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT : return "HostMappedForeignMemoryEXT"; - default: return "invalid"; - } - } - - enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV - { - eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, - eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, - eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, - eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV - }; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value ) - { - switch ( value ) - { - case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return "D3D11Image"; - case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return "D3D11ImageKmt"; - default: return "invalid"; - } - } - - enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags - { - eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, - eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT - }; - using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits; - - VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value ) - { - switch ( value ) - { - case ExternalSemaphoreFeatureFlagBits::eExportable : return "Exportable"; - case ExternalSemaphoreFeatureFlagBits::eImportable : return "Importable"; - default: return "invalid"; - } - } - - enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags - { - eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, - eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, - eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, - eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, - eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT - }; - using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits; - - VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value ) - { - switch ( value ) - { - case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; - case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return "D3D12Fence"; - case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return "SyncFd"; - default: return "invalid"; - } - } - - enum class FenceCreateFlagBits : VkFenceCreateFlags - { - eSignaled = VK_FENCE_CREATE_SIGNALED_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value ) - { - switch ( value ) - { - case FenceCreateFlagBits::eSignaled : return "Signaled"; - default: return "invalid"; - } - } - - enum class FenceImportFlagBits : VkFenceImportFlags - { - eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT - }; - using FenceImportFlagBitsKHR = FenceImportFlagBits; - - VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value ) - { - switch ( value ) - { - case FenceImportFlagBits::eTemporary : return "Temporary"; - default: return "invalid"; - } - } - - enum class Filter - { - eNearest = VK_FILTER_NEAREST, - eLinear = VK_FILTER_LINEAR, - eCubicIMG = VK_FILTER_CUBIC_IMG, - eCubicEXT = VK_FILTER_CUBIC_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( Filter value ) - { - switch ( value ) - { - case Filter::eNearest : return "Nearest"; - case Filter::eLinear : return "Linear"; - case Filter::eCubicIMG : return "CubicIMG"; - default: return "invalid"; + case PipelineCacheHeaderVersion::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class Format { - eUndefined = VK_FORMAT_UNDEFINED, - eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, - eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, - eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, - eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, - eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, - eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, - eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, - eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, - eR8Unorm = VK_FORMAT_R8_UNORM, - eR8Snorm = VK_FORMAT_R8_SNORM, - eR8Uscaled = VK_FORMAT_R8_USCALED, - eR8Sscaled = VK_FORMAT_R8_SSCALED, - eR8Uint = VK_FORMAT_R8_UINT, - eR8Sint = VK_FORMAT_R8_SINT, - eR8Srgb = VK_FORMAT_R8_SRGB, - eR8G8Unorm = VK_FORMAT_R8G8_UNORM, - eR8G8Snorm = VK_FORMAT_R8G8_SNORM, - eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, - eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, - eR8G8Uint = VK_FORMAT_R8G8_UINT, - eR8G8Sint = VK_FORMAT_R8G8_SINT, - eR8G8Srgb = VK_FORMAT_R8G8_SRGB, - eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, - eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, - eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, - eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, - eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, - eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, - eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, - eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, - eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, - eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, - eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, - eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, - eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, - eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, - eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, - eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, - eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, - eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, - eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, - eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, - eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, - eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, - eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, - eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, - eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, - eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, - eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, - eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, - eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, - eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, - eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, - eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, - eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, - eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, - eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, - eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, - eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, - eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, - eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, - eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, - eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, - eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, - eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, - eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, - eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, - eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, - eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, - eR16Unorm = VK_FORMAT_R16_UNORM, - eR16Snorm = VK_FORMAT_R16_SNORM, - eR16Uscaled = VK_FORMAT_R16_USCALED, - eR16Sscaled = VK_FORMAT_R16_SSCALED, - eR16Uint = VK_FORMAT_R16_UINT, - eR16Sint = VK_FORMAT_R16_SINT, - eR16Sfloat = VK_FORMAT_R16_SFLOAT, - eR16G16Unorm = VK_FORMAT_R16G16_UNORM, - eR16G16Snorm = VK_FORMAT_R16G16_SNORM, - eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, - eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, - eR16G16Uint = VK_FORMAT_R16G16_UINT, - eR16G16Sint = VK_FORMAT_R16G16_SINT, - eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, - eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, - eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, - eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, - eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, - eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, - eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, - eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, - eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, - eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, - eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, - eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, - eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, - eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, - eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, - eR32Uint = VK_FORMAT_R32_UINT, - eR32Sint = VK_FORMAT_R32_SINT, - eR32Sfloat = VK_FORMAT_R32_SFLOAT, - eR32G32Uint = VK_FORMAT_R32G32_UINT, - eR32G32Sint = VK_FORMAT_R32G32_SINT, - eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, - eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, - eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, - eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, - eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, - eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, - eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, - eR64Uint = VK_FORMAT_R64_UINT, - eR64Sint = VK_FORMAT_R64_SINT, - eR64Sfloat = VK_FORMAT_R64_SFLOAT, - eR64G64Uint = VK_FORMAT_R64G64_UINT, - eR64G64Sint = VK_FORMAT_R64G64_SINT, - eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, - eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, - eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, - eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, - eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, - eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, - eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, - eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, - eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, - eD16Unorm = VK_FORMAT_D16_UNORM, - eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, - eD32Sfloat = VK_FORMAT_D32_SFLOAT, - eS8Uint = VK_FORMAT_S8_UINT, - eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, - eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, - eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, - eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, - eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, - eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, - eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, - eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, - eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, - eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, - eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, - eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, - eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, - eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, - eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, - eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, - eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, - eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, - eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, - eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, - eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, - eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, - eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, - eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, - eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, - eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, - eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, - eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, - eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, - eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, - eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, - eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, - eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, - eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, - eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, - eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, - eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, - eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, - eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, - eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, - eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, - eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, - eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, - eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, - eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, - eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, - eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, - eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, - eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, - eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, - eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, - eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, - eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, - eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, - eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, - eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, - eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, - eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, - eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, - eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, - eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, - eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, - eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, - eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, - eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, - eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, - eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, - eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, - eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, - eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, - eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, - eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, - eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, - eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, - eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, - eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, - eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, - eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, - eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, - eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, - eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, - eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, - eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, - eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, - eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, - eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, - eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, - eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, - eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, - eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, - eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, - ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, - ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, - ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, - ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, - ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, - ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, - ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, - ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, - eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, - eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, - eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, - eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, - eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, - eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, - eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, - eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, - eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, - eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, - eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, - eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, - eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, - eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, - eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, - eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, - eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, - eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, - eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, + eUndefined = VK_FORMAT_UNDEFINED, + eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, + eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, + eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, + eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, + eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, + eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, + eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, + eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, + eR8Unorm = VK_FORMAT_R8_UNORM, + eR8Snorm = VK_FORMAT_R8_SNORM, + eR8Uscaled = VK_FORMAT_R8_USCALED, + eR8Sscaled = VK_FORMAT_R8_SSCALED, + eR8Uint = VK_FORMAT_R8_UINT, + eR8Sint = VK_FORMAT_R8_SINT, + eR8Srgb = VK_FORMAT_R8_SRGB, + eR8G8Unorm = VK_FORMAT_R8G8_UNORM, + eR8G8Snorm = VK_FORMAT_R8G8_SNORM, + eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, + eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, + eR8G8Uint = VK_FORMAT_R8G8_UINT, + eR8G8Sint = VK_FORMAT_R8G8_SINT, + eR8G8Srgb = VK_FORMAT_R8G8_SRGB, + eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, + eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, + eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, + eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, + eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, + eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, + eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, + eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, + eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, + eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, + eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, + eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, + eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, + eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, + eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, + eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, + eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, + eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, + eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, + eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, + eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, + eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, + eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, + eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, + eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, + eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, + eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, + eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, + eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, + eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, + eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, + eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, + eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, + eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, + eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, + eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, + eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, + eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, + eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, + eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, + eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, + eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, + eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, + eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, + eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, + eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, + eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, + eR16Unorm = VK_FORMAT_R16_UNORM, + eR16Snorm = VK_FORMAT_R16_SNORM, + eR16Uscaled = VK_FORMAT_R16_USCALED, + eR16Sscaled = VK_FORMAT_R16_SSCALED, + eR16Uint = VK_FORMAT_R16_UINT, + eR16Sint = VK_FORMAT_R16_SINT, + eR16Sfloat = VK_FORMAT_R16_SFLOAT, + eR16G16Unorm = VK_FORMAT_R16G16_UNORM, + eR16G16Snorm = VK_FORMAT_R16G16_SNORM, + eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, + eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, + eR16G16Uint = VK_FORMAT_R16G16_UINT, + eR16G16Sint = VK_FORMAT_R16G16_SINT, + eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, + eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, + eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, + eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, + eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, + eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, + eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, + eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, + eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, + eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, + eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, + eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, + eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, + eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, + eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, + eR32Uint = VK_FORMAT_R32_UINT, + eR32Sint = VK_FORMAT_R32_SINT, + eR32Sfloat = VK_FORMAT_R32_SFLOAT, + eR32G32Uint = VK_FORMAT_R32G32_UINT, + eR32G32Sint = VK_FORMAT_R32G32_SINT, + eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, + eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, + eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, + eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, + eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, + eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, + eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, + eR64Uint = VK_FORMAT_R64_UINT, + eR64Sint = VK_FORMAT_R64_SINT, + eR64Sfloat = VK_FORMAT_R64_SFLOAT, + eR64G64Uint = VK_FORMAT_R64G64_UINT, + eR64G64Sint = VK_FORMAT_R64G64_SINT, + eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, + eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, + eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, + eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, + eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, + eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, + eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, + eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, + eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + eD16Unorm = VK_FORMAT_D16_UNORM, + eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, + eD32Sfloat = VK_FORMAT_D32_SFLOAT, + eS8Uint = VK_FORMAT_S8_UINT, + eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, + eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, + eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, + eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, + eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, + eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, + eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, + eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, + eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, + eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, + eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, + eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, + eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, + eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, + eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, + eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, + eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, + eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, + eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, + eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, + eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, + eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, + eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, + eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, + eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, + eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, + eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, + eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, + eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, + eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, + eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, + eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, + ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, + ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, + ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, + ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, + ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, + ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, + ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, + eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, + eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, + eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, + eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, + eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, + eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, + eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, + eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, + eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, + eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, + eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, + eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, + eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, + eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, + eG8B8R82Plane444UnormEXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT, + eG10X6B10X6R10X62Plane444Unorm3Pack16EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT, + eG12X4B12X4R12X42Plane444Unorm3Pack16EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT, + eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT, + eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT, + eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT, + eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, + eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, - eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, - eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, - eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, - eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, - eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, - eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, - eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, - eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, - eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, - eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, - eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, - eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, - eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, - eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, - eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, - eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, - eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, - eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, - eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR + eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, + eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, + eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR }; VULKAN_HPP_INLINE std::string to_string( Format value ) { switch ( value ) { - case Format::eUndefined : return "Undefined"; - case Format::eR4G4UnormPack8 : return "R4G4UnormPack8"; - case Format::eR4G4B4A4UnormPack16 : return "R4G4B4A4UnormPack16"; - case Format::eB4G4R4A4UnormPack16 : return "B4G4R4A4UnormPack16"; - case Format::eR5G6B5UnormPack16 : return "R5G6B5UnormPack16"; - case Format::eB5G6R5UnormPack16 : return "B5G6R5UnormPack16"; - case Format::eR5G5B5A1UnormPack16 : return "R5G5B5A1UnormPack16"; - case Format::eB5G5R5A1UnormPack16 : return "B5G5R5A1UnormPack16"; - case Format::eA1R5G5B5UnormPack16 : return "A1R5G5B5UnormPack16"; - case Format::eR8Unorm : return "R8Unorm"; - case Format::eR8Snorm : return "R8Snorm"; - case Format::eR8Uscaled : return "R8Uscaled"; - case Format::eR8Sscaled : return "R8Sscaled"; - case Format::eR8Uint : return "R8Uint"; - case Format::eR8Sint : return "R8Sint"; - case Format::eR8Srgb : return "R8Srgb"; - case Format::eR8G8Unorm : return "R8G8Unorm"; - case Format::eR8G8Snorm : return "R8G8Snorm"; - case Format::eR8G8Uscaled : return "R8G8Uscaled"; - case Format::eR8G8Sscaled : return "R8G8Sscaled"; - case Format::eR8G8Uint : return "R8G8Uint"; - case Format::eR8G8Sint : return "R8G8Sint"; - case Format::eR8G8Srgb : return "R8G8Srgb"; - case Format::eR8G8B8Unorm : return "R8G8B8Unorm"; - case Format::eR8G8B8Snorm : return "R8G8B8Snorm"; - case Format::eR8G8B8Uscaled : return "R8G8B8Uscaled"; - case Format::eR8G8B8Sscaled : return "R8G8B8Sscaled"; - case Format::eR8G8B8Uint : return "R8G8B8Uint"; - case Format::eR8G8B8Sint : return "R8G8B8Sint"; - case Format::eR8G8B8Srgb : return "R8G8B8Srgb"; - case Format::eB8G8R8Unorm : return "B8G8R8Unorm"; - case Format::eB8G8R8Snorm : return "B8G8R8Snorm"; - case Format::eB8G8R8Uscaled : return "B8G8R8Uscaled"; - case Format::eB8G8R8Sscaled : return "B8G8R8Sscaled"; - case Format::eB8G8R8Uint : return "B8G8R8Uint"; - case Format::eB8G8R8Sint : return "B8G8R8Sint"; - case Format::eB8G8R8Srgb : return "B8G8R8Srgb"; - case Format::eR8G8B8A8Unorm : return "R8G8B8A8Unorm"; - case Format::eR8G8B8A8Snorm : return "R8G8B8A8Snorm"; - case Format::eR8G8B8A8Uscaled : return "R8G8B8A8Uscaled"; - case Format::eR8G8B8A8Sscaled : return "R8G8B8A8Sscaled"; - case Format::eR8G8B8A8Uint : return "R8G8B8A8Uint"; - case Format::eR8G8B8A8Sint : return "R8G8B8A8Sint"; - case Format::eR8G8B8A8Srgb : return "R8G8B8A8Srgb"; - case Format::eB8G8R8A8Unorm : return "B8G8R8A8Unorm"; - case Format::eB8G8R8A8Snorm : return "B8G8R8A8Snorm"; - case Format::eB8G8R8A8Uscaled : return "B8G8R8A8Uscaled"; - case Format::eB8G8R8A8Sscaled : return "B8G8R8A8Sscaled"; - case Format::eB8G8R8A8Uint : return "B8G8R8A8Uint"; - case Format::eB8G8R8A8Sint : return "B8G8R8A8Sint"; - case Format::eB8G8R8A8Srgb : return "B8G8R8A8Srgb"; - case Format::eA8B8G8R8UnormPack32 : return "A8B8G8R8UnormPack32"; - case Format::eA8B8G8R8SnormPack32 : return "A8B8G8R8SnormPack32"; - case Format::eA8B8G8R8UscaledPack32 : return "A8B8G8R8UscaledPack32"; - case Format::eA8B8G8R8SscaledPack32 : return "A8B8G8R8SscaledPack32"; - case Format::eA8B8G8R8UintPack32 : return "A8B8G8R8UintPack32"; - case Format::eA8B8G8R8SintPack32 : return "A8B8G8R8SintPack32"; - case Format::eA8B8G8R8SrgbPack32 : return "A8B8G8R8SrgbPack32"; - case Format::eA2R10G10B10UnormPack32 : return "A2R10G10B10UnormPack32"; - case Format::eA2R10G10B10SnormPack32 : return "A2R10G10B10SnormPack32"; - case Format::eA2R10G10B10UscaledPack32 : return "A2R10G10B10UscaledPack32"; - case Format::eA2R10G10B10SscaledPack32 : return "A2R10G10B10SscaledPack32"; - case Format::eA2R10G10B10UintPack32 : return "A2R10G10B10UintPack32"; - case Format::eA2R10G10B10SintPack32 : return "A2R10G10B10SintPack32"; - case Format::eA2B10G10R10UnormPack32 : return "A2B10G10R10UnormPack32"; - case Format::eA2B10G10R10SnormPack32 : return "A2B10G10R10SnormPack32"; - case Format::eA2B10G10R10UscaledPack32 : return "A2B10G10R10UscaledPack32"; - case Format::eA2B10G10R10SscaledPack32 : return "A2B10G10R10SscaledPack32"; - case Format::eA2B10G10R10UintPack32 : return "A2B10G10R10UintPack32"; - case Format::eA2B10G10R10SintPack32 : return "A2B10G10R10SintPack32"; - case Format::eR16Unorm : return "R16Unorm"; - case Format::eR16Snorm : return "R16Snorm"; - case Format::eR16Uscaled : return "R16Uscaled"; - case Format::eR16Sscaled : return "R16Sscaled"; - case Format::eR16Uint : return "R16Uint"; - case Format::eR16Sint : return "R16Sint"; - case Format::eR16Sfloat : return "R16Sfloat"; - case Format::eR16G16Unorm : return "R16G16Unorm"; - case Format::eR16G16Snorm : return "R16G16Snorm"; - case Format::eR16G16Uscaled : return "R16G16Uscaled"; - case Format::eR16G16Sscaled : return "R16G16Sscaled"; - case Format::eR16G16Uint : return "R16G16Uint"; - case Format::eR16G16Sint : return "R16G16Sint"; - case Format::eR16G16Sfloat : return "R16G16Sfloat"; - case Format::eR16G16B16Unorm : return "R16G16B16Unorm"; - case Format::eR16G16B16Snorm : return "R16G16B16Snorm"; - case Format::eR16G16B16Uscaled : return "R16G16B16Uscaled"; - case Format::eR16G16B16Sscaled : return "R16G16B16Sscaled"; - case Format::eR16G16B16Uint : return "R16G16B16Uint"; - case Format::eR16G16B16Sint : return "R16G16B16Sint"; - case Format::eR16G16B16Sfloat : return "R16G16B16Sfloat"; - case Format::eR16G16B16A16Unorm : return "R16G16B16A16Unorm"; - case Format::eR16G16B16A16Snorm : return "R16G16B16A16Snorm"; - case Format::eR16G16B16A16Uscaled : return "R16G16B16A16Uscaled"; - case Format::eR16G16B16A16Sscaled : return "R16G16B16A16Sscaled"; - case Format::eR16G16B16A16Uint : return "R16G16B16A16Uint"; - case Format::eR16G16B16A16Sint : return "R16G16B16A16Sint"; - case Format::eR16G16B16A16Sfloat : return "R16G16B16A16Sfloat"; - case Format::eR32Uint : return "R32Uint"; - case Format::eR32Sint : return "R32Sint"; - case Format::eR32Sfloat : return "R32Sfloat"; - case Format::eR32G32Uint : return "R32G32Uint"; - case Format::eR32G32Sint : return "R32G32Sint"; - case Format::eR32G32Sfloat : return "R32G32Sfloat"; - case Format::eR32G32B32Uint : return "R32G32B32Uint"; - case Format::eR32G32B32Sint : return "R32G32B32Sint"; - case Format::eR32G32B32Sfloat : return "R32G32B32Sfloat"; - case Format::eR32G32B32A32Uint : return "R32G32B32A32Uint"; - case Format::eR32G32B32A32Sint : return "R32G32B32A32Sint"; - case Format::eR32G32B32A32Sfloat : return "R32G32B32A32Sfloat"; - case Format::eR64Uint : return "R64Uint"; - case Format::eR64Sint : return "R64Sint"; - case Format::eR64Sfloat : return "R64Sfloat"; - case Format::eR64G64Uint : return "R64G64Uint"; - case Format::eR64G64Sint : return "R64G64Sint"; - case Format::eR64G64Sfloat : return "R64G64Sfloat"; - case Format::eR64G64B64Uint : return "R64G64B64Uint"; - case Format::eR64G64B64Sint : return "R64G64B64Sint"; - case Format::eR64G64B64Sfloat : return "R64G64B64Sfloat"; - case Format::eR64G64B64A64Uint : return "R64G64B64A64Uint"; - case Format::eR64G64B64A64Sint : return "R64G64B64A64Sint"; - case Format::eR64G64B64A64Sfloat : return "R64G64B64A64Sfloat"; - case Format::eB10G11R11UfloatPack32 : return "B10G11R11UfloatPack32"; - case Format::eE5B9G9R9UfloatPack32 : return "E5B9G9R9UfloatPack32"; - case Format::eD16Unorm : return "D16Unorm"; - case Format::eX8D24UnormPack32 : return "X8D24UnormPack32"; - case Format::eD32Sfloat : return "D32Sfloat"; - case Format::eS8Uint : return "S8Uint"; - case Format::eD16UnormS8Uint : return "D16UnormS8Uint"; - case Format::eD24UnormS8Uint : return "D24UnormS8Uint"; - case Format::eD32SfloatS8Uint : return "D32SfloatS8Uint"; - case Format::eBc1RgbUnormBlock : return "Bc1RgbUnormBlock"; - case Format::eBc1RgbSrgbBlock : return "Bc1RgbSrgbBlock"; - case Format::eBc1RgbaUnormBlock : return "Bc1RgbaUnormBlock"; - case Format::eBc1RgbaSrgbBlock : return "Bc1RgbaSrgbBlock"; - case Format::eBc2UnormBlock : return "Bc2UnormBlock"; - case Format::eBc2SrgbBlock : return "Bc2SrgbBlock"; - case Format::eBc3UnormBlock : return "Bc3UnormBlock"; - case Format::eBc3SrgbBlock : return "Bc3SrgbBlock"; - case Format::eBc4UnormBlock : return "Bc4UnormBlock"; - case Format::eBc4SnormBlock : return "Bc4SnormBlock"; - case Format::eBc5UnormBlock : return "Bc5UnormBlock"; - case Format::eBc5SnormBlock : return "Bc5SnormBlock"; - case Format::eBc6HUfloatBlock : return "Bc6HUfloatBlock"; - case Format::eBc6HSfloatBlock : return "Bc6HSfloatBlock"; - case Format::eBc7UnormBlock : return "Bc7UnormBlock"; - case Format::eBc7SrgbBlock : return "Bc7SrgbBlock"; - case Format::eEtc2R8G8B8UnormBlock : return "Etc2R8G8B8UnormBlock"; - case Format::eEtc2R8G8B8SrgbBlock : return "Etc2R8G8B8SrgbBlock"; - case Format::eEtc2R8G8B8A1UnormBlock : return "Etc2R8G8B8A1UnormBlock"; - case Format::eEtc2R8G8B8A1SrgbBlock : return "Etc2R8G8B8A1SrgbBlock"; - case Format::eEtc2R8G8B8A8UnormBlock : return "Etc2R8G8B8A8UnormBlock"; - case Format::eEtc2R8G8B8A8SrgbBlock : return "Etc2R8G8B8A8SrgbBlock"; - case Format::eEacR11UnormBlock : return "EacR11UnormBlock"; - case Format::eEacR11SnormBlock : return "EacR11SnormBlock"; - case Format::eEacR11G11UnormBlock : return "EacR11G11UnormBlock"; - case Format::eEacR11G11SnormBlock : return "EacR11G11SnormBlock"; - case Format::eAstc4x4UnormBlock : return "Astc4x4UnormBlock"; - case Format::eAstc4x4SrgbBlock : return "Astc4x4SrgbBlock"; - case Format::eAstc5x4UnormBlock : return "Astc5x4UnormBlock"; - case Format::eAstc5x4SrgbBlock : return "Astc5x4SrgbBlock"; - case Format::eAstc5x5UnormBlock : return "Astc5x5UnormBlock"; - case Format::eAstc5x5SrgbBlock : return "Astc5x5SrgbBlock"; - case Format::eAstc6x5UnormBlock : return "Astc6x5UnormBlock"; - case Format::eAstc6x5SrgbBlock : return "Astc6x5SrgbBlock"; - case Format::eAstc6x6UnormBlock : return "Astc6x6UnormBlock"; - case Format::eAstc6x6SrgbBlock : return "Astc6x6SrgbBlock"; - case Format::eAstc8x5UnormBlock : return "Astc8x5UnormBlock"; - case Format::eAstc8x5SrgbBlock : return "Astc8x5SrgbBlock"; - case Format::eAstc8x6UnormBlock : return "Astc8x6UnormBlock"; - case Format::eAstc8x6SrgbBlock : return "Astc8x6SrgbBlock"; - case Format::eAstc8x8UnormBlock : return "Astc8x8UnormBlock"; - case Format::eAstc8x8SrgbBlock : return "Astc8x8SrgbBlock"; - case Format::eAstc10x5UnormBlock : return "Astc10x5UnormBlock"; - case Format::eAstc10x5SrgbBlock : return "Astc10x5SrgbBlock"; - case Format::eAstc10x6UnormBlock : return "Astc10x6UnormBlock"; - case Format::eAstc10x6SrgbBlock : return "Astc10x6SrgbBlock"; - case Format::eAstc10x8UnormBlock : return "Astc10x8UnormBlock"; - case Format::eAstc10x8SrgbBlock : return "Astc10x8SrgbBlock"; - case Format::eAstc10x10UnormBlock : return "Astc10x10UnormBlock"; - case Format::eAstc10x10SrgbBlock : return "Astc10x10SrgbBlock"; - case Format::eAstc12x10UnormBlock : return "Astc12x10UnormBlock"; - case Format::eAstc12x10SrgbBlock : return "Astc12x10SrgbBlock"; - case Format::eAstc12x12UnormBlock : return "Astc12x12UnormBlock"; - case Format::eAstc12x12SrgbBlock : return "Astc12x12SrgbBlock"; - case Format::eG8B8G8R8422Unorm : return "G8B8G8R8422Unorm"; - case Format::eB8G8R8G8422Unorm : return "B8G8R8G8422Unorm"; - case Format::eG8B8R83Plane420Unorm : return "G8B8R83Plane420Unorm"; - case Format::eG8B8R82Plane420Unorm : return "G8B8R82Plane420Unorm"; - case Format::eG8B8R83Plane422Unorm : return "G8B8R83Plane422Unorm"; - case Format::eG8B8R82Plane422Unorm : return "G8B8R82Plane422Unorm"; - case Format::eG8B8R83Plane444Unorm : return "G8B8R83Plane444Unorm"; - case Format::eR10X6UnormPack16 : return "R10X6UnormPack16"; - case Format::eR10X6G10X6Unorm2Pack16 : return "R10X6G10X6Unorm2Pack16"; - case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16 : return "R10X6G10X6B10X6A10X6Unorm4Pack16"; - case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16 : return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; - case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16 : return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; - case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16 : return "G10X6B10X6R10X63Plane420Unorm3Pack16"; - case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16 : return "G10X6B10X6R10X62Plane420Unorm3Pack16"; - case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16 : return "G10X6B10X6R10X63Plane422Unorm3Pack16"; - case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16 : return "G10X6B10X6R10X62Plane422Unorm3Pack16"; - case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16 : return "G10X6B10X6R10X63Plane444Unorm3Pack16"; - case Format::eR12X4UnormPack16 : return "R12X4UnormPack16"; - case Format::eR12X4G12X4Unorm2Pack16 : return "R12X4G12X4Unorm2Pack16"; - case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16 : return "R12X4G12X4B12X4A12X4Unorm4Pack16"; - case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16 : return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; - case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16 : return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; - case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16 : return "G12X4B12X4R12X43Plane420Unorm3Pack16"; - case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16 : return "G12X4B12X4R12X42Plane420Unorm3Pack16"; - case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16 : return "G12X4B12X4R12X43Plane422Unorm3Pack16"; - case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16 : return "G12X4B12X4R12X42Plane422Unorm3Pack16"; - case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16 : return "G12X4B12X4R12X43Plane444Unorm3Pack16"; - case Format::eG16B16G16R16422Unorm : return "G16B16G16R16422Unorm"; - case Format::eB16G16R16G16422Unorm : return "B16G16R16G16422Unorm"; - case Format::eG16B16R163Plane420Unorm : return "G16B16R163Plane420Unorm"; - case Format::eG16B16R162Plane420Unorm : return "G16B16R162Plane420Unorm"; - case Format::eG16B16R163Plane422Unorm : return "G16B16R163Plane422Unorm"; - case Format::eG16B16R162Plane422Unorm : return "G16B16R162Plane422Unorm"; - case Format::eG16B16R163Plane444Unorm : return "G16B16R163Plane444Unorm"; - case Format::ePvrtc12BppUnormBlockIMG : return "Pvrtc12BppUnormBlockIMG"; - case Format::ePvrtc14BppUnormBlockIMG : return "Pvrtc14BppUnormBlockIMG"; - case Format::ePvrtc22BppUnormBlockIMG : return "Pvrtc22BppUnormBlockIMG"; - case Format::ePvrtc24BppUnormBlockIMG : return "Pvrtc24BppUnormBlockIMG"; - case Format::ePvrtc12BppSrgbBlockIMG : return "Pvrtc12BppSrgbBlockIMG"; - case Format::ePvrtc14BppSrgbBlockIMG : return "Pvrtc14BppSrgbBlockIMG"; - case Format::ePvrtc22BppSrgbBlockIMG : return "Pvrtc22BppSrgbBlockIMG"; - case Format::ePvrtc24BppSrgbBlockIMG : return "Pvrtc24BppSrgbBlockIMG"; - case Format::eAstc4x4SfloatBlockEXT : return "Astc4x4SfloatBlockEXT"; - case Format::eAstc5x4SfloatBlockEXT : return "Astc5x4SfloatBlockEXT"; - case Format::eAstc5x5SfloatBlockEXT : return "Astc5x5SfloatBlockEXT"; - case Format::eAstc6x5SfloatBlockEXT : return "Astc6x5SfloatBlockEXT"; - case Format::eAstc6x6SfloatBlockEXT : return "Astc6x6SfloatBlockEXT"; - case Format::eAstc8x5SfloatBlockEXT : return "Astc8x5SfloatBlockEXT"; - case Format::eAstc8x6SfloatBlockEXT : return "Astc8x6SfloatBlockEXT"; - case Format::eAstc8x8SfloatBlockEXT : return "Astc8x8SfloatBlockEXT"; - case Format::eAstc10x5SfloatBlockEXT : return "Astc10x5SfloatBlockEXT"; - case Format::eAstc10x6SfloatBlockEXT : return "Astc10x6SfloatBlockEXT"; - case Format::eAstc10x8SfloatBlockEXT : return "Astc10x8SfloatBlockEXT"; - case Format::eAstc10x10SfloatBlockEXT : return "Astc10x10SfloatBlockEXT"; - case Format::eAstc12x10SfloatBlockEXT : return "Astc12x10SfloatBlockEXT"; - case Format::eAstc12x12SfloatBlockEXT : return "Astc12x12SfloatBlockEXT"; - default: return "invalid"; + case Format::eUndefined: return "Undefined"; + case Format::eR4G4UnormPack8: return "R4G4UnormPack8"; + case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16"; + case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16"; + case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16"; + case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16"; + case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16"; + case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16"; + case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16"; + case Format::eR8Unorm: return "R8Unorm"; + case Format::eR8Snorm: return "R8Snorm"; + case Format::eR8Uscaled: return "R8Uscaled"; + case Format::eR8Sscaled: return "R8Sscaled"; + case Format::eR8Uint: return "R8Uint"; + case Format::eR8Sint: return "R8Sint"; + case Format::eR8Srgb: return "R8Srgb"; + case Format::eR8G8Unorm: return "R8G8Unorm"; + case Format::eR8G8Snorm: return "R8G8Snorm"; + case Format::eR8G8Uscaled: return "R8G8Uscaled"; + case Format::eR8G8Sscaled: return "R8G8Sscaled"; + case Format::eR8G8Uint: return "R8G8Uint"; + case Format::eR8G8Sint: return "R8G8Sint"; + case Format::eR8G8Srgb: return "R8G8Srgb"; + case Format::eR8G8B8Unorm: return "R8G8B8Unorm"; + case Format::eR8G8B8Snorm: return "R8G8B8Snorm"; + case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled"; + case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled"; + case Format::eR8G8B8Uint: return "R8G8B8Uint"; + case Format::eR8G8B8Sint: return "R8G8B8Sint"; + case Format::eR8G8B8Srgb: return "R8G8B8Srgb"; + case Format::eB8G8R8Unorm: return "B8G8R8Unorm"; + case Format::eB8G8R8Snorm: return "B8G8R8Snorm"; + case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled"; + case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled"; + case Format::eB8G8R8Uint: return "B8G8R8Uint"; + case Format::eB8G8R8Sint: return "B8G8R8Sint"; + case Format::eB8G8R8Srgb: return "B8G8R8Srgb"; + case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm"; + case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm"; + case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled"; + case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled"; + case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint"; + case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint"; + case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb"; + case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm"; + case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm"; + case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled"; + case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled"; + case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint"; + case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint"; + case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb"; + case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32"; + case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32"; + case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32"; + case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32"; + case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32"; + case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32"; + case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32"; + case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32"; + case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32"; + case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32"; + case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32"; + case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32"; + case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32"; + case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32"; + case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32"; + case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32"; + case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32"; + case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32"; + case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32"; + case Format::eR16Unorm: return "R16Unorm"; + case Format::eR16Snorm: return "R16Snorm"; + case Format::eR16Uscaled: return "R16Uscaled"; + case Format::eR16Sscaled: return "R16Sscaled"; + case Format::eR16Uint: return "R16Uint"; + case Format::eR16Sint: return "R16Sint"; + case Format::eR16Sfloat: return "R16Sfloat"; + case Format::eR16G16Unorm: return "R16G16Unorm"; + case Format::eR16G16Snorm: return "R16G16Snorm"; + case Format::eR16G16Uscaled: return "R16G16Uscaled"; + case Format::eR16G16Sscaled: return "R16G16Sscaled"; + case Format::eR16G16Uint: return "R16G16Uint"; + case Format::eR16G16Sint: return "R16G16Sint"; + case Format::eR16G16Sfloat: return "R16G16Sfloat"; + case Format::eR16G16B16Unorm: return "R16G16B16Unorm"; + case Format::eR16G16B16Snorm: return "R16G16B16Snorm"; + case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled"; + case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled"; + case Format::eR16G16B16Uint: return "R16G16B16Uint"; + case Format::eR16G16B16Sint: return "R16G16B16Sint"; + case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat"; + case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm"; + case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm"; + case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled"; + case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled"; + case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint"; + case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint"; + case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat"; + case Format::eR32Uint: return "R32Uint"; + case Format::eR32Sint: return "R32Sint"; + case Format::eR32Sfloat: return "R32Sfloat"; + case Format::eR32G32Uint: return "R32G32Uint"; + case Format::eR32G32Sint: return "R32G32Sint"; + case Format::eR32G32Sfloat: return "R32G32Sfloat"; + case Format::eR32G32B32Uint: return "R32G32B32Uint"; + case Format::eR32G32B32Sint: return "R32G32B32Sint"; + case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat"; + case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint"; + case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint"; + case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat"; + case Format::eR64Uint: return "R64Uint"; + case Format::eR64Sint: return "R64Sint"; + case Format::eR64Sfloat: return "R64Sfloat"; + case Format::eR64G64Uint: return "R64G64Uint"; + case Format::eR64G64Sint: return "R64G64Sint"; + case Format::eR64G64Sfloat: return "R64G64Sfloat"; + case Format::eR64G64B64Uint: return "R64G64B64Uint"; + case Format::eR64G64B64Sint: return "R64G64B64Sint"; + case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat"; + case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint"; + case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint"; + case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat"; + case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32"; + case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32"; + case Format::eD16Unorm: return "D16Unorm"; + case Format::eX8D24UnormPack32: return "X8D24UnormPack32"; + case Format::eD32Sfloat: return "D32Sfloat"; + case Format::eS8Uint: return "S8Uint"; + case Format::eD16UnormS8Uint: return "D16UnormS8Uint"; + case Format::eD24UnormS8Uint: return "D24UnormS8Uint"; + case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint"; + case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock"; + case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock"; + case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock"; + case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock"; + case Format::eBc2UnormBlock: return "Bc2UnormBlock"; + case Format::eBc2SrgbBlock: return "Bc2SrgbBlock"; + case Format::eBc3UnormBlock: return "Bc3UnormBlock"; + case Format::eBc3SrgbBlock: return "Bc3SrgbBlock"; + case Format::eBc4UnormBlock: return "Bc4UnormBlock"; + case Format::eBc4SnormBlock: return "Bc4SnormBlock"; + case Format::eBc5UnormBlock: return "Bc5UnormBlock"; + case Format::eBc5SnormBlock: return "Bc5SnormBlock"; + case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock"; + case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock"; + case Format::eBc7UnormBlock: return "Bc7UnormBlock"; + case Format::eBc7SrgbBlock: return "Bc7SrgbBlock"; + case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock"; + case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock"; + case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock"; + case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock"; + case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock"; + case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock"; + case Format::eEacR11UnormBlock: return "EacR11UnormBlock"; + case Format::eEacR11SnormBlock: return "EacR11SnormBlock"; + case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock"; + case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock"; + case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock"; + case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock"; + case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock"; + case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock"; + case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock"; + case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock"; + case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock"; + case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock"; + case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock"; + case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock"; + case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock"; + case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock"; + case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock"; + case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock"; + case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock"; + case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock"; + case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock"; + case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock"; + case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock"; + case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock"; + case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock"; + case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock"; + case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock"; + case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock"; + case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock"; + case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock"; + case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock"; + case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock"; + case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm"; + case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm"; + case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm"; + case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm"; + case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm"; + case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm"; + case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm"; + case Format::eR10X6UnormPack16: return "R10X6UnormPack16"; + case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16"; + case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16"; + case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; + case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; + case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16"; + case Format::eR12X4UnormPack16: return "R12X4UnormPack16"; + case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16"; + case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16"; + case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; + case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; + case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16"; + case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm"; + case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm"; + case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm"; + case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm"; + case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm"; + case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm"; + case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm"; + case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG"; + case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG"; + case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG"; + case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG"; + case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG"; + case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG"; + case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; + case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; + case Format::eAstc4x4SfloatBlockEXT: return "Astc4x4SfloatBlockEXT"; + case Format::eAstc5x4SfloatBlockEXT: return "Astc5x4SfloatBlockEXT"; + case Format::eAstc5x5SfloatBlockEXT: return "Astc5x5SfloatBlockEXT"; + case Format::eAstc6x5SfloatBlockEXT: return "Astc6x5SfloatBlockEXT"; + case Format::eAstc6x6SfloatBlockEXT: return "Astc6x6SfloatBlockEXT"; + case Format::eAstc8x5SfloatBlockEXT: return "Astc8x5SfloatBlockEXT"; + case Format::eAstc8x6SfloatBlockEXT: return "Astc8x6SfloatBlockEXT"; + case Format::eAstc8x8SfloatBlockEXT: return "Astc8x8SfloatBlockEXT"; + case Format::eAstc10x5SfloatBlockEXT: return "Astc10x5SfloatBlockEXT"; + case Format::eAstc10x6SfloatBlockEXT: return "Astc10x6SfloatBlockEXT"; + case Format::eAstc10x8SfloatBlockEXT: return "Astc10x8SfloatBlockEXT"; + case Format::eAstc10x10SfloatBlockEXT: return "Astc10x10SfloatBlockEXT"; + case Format::eAstc12x10SfloatBlockEXT: return "Astc12x10SfloatBlockEXT"; + case Format::eAstc12x12SfloatBlockEXT: return "Astc12x12SfloatBlockEXT"; + case Format::eG8B8R82Plane444UnormEXT: return "G8B8R82Plane444UnormEXT"; + case Format::eG10X6B10X6R10X62Plane444Unorm3Pack16EXT: return "G10X6B10X6R10X62Plane444Unorm3Pack16EXT"; + case Format::eG12X4B12X4R12X42Plane444Unorm3Pack16EXT: return "G12X4B12X4R12X42Plane444Unorm3Pack16EXT"; + case Format::eG16B16R162Plane444UnormEXT: return "G16B16R162Plane444UnormEXT"; + case Format::eA4R4G4B4UnormPack16EXT: return "A4R4G4B4UnormPack16EXT"; + case Format::eA4B4G4R4UnormPack16EXT: return "A4B4G4R4UnormPack16EXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class FormatFeatureFlagBits : VkFormatFeatureFlags { - eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, - eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, - eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, - eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, - eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, - eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, - eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, - eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, - eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, - eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, - eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, - eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, - eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, - eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, - eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, - eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, - eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, - eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, - eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, - eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, - eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, - eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilter = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR, + eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, - eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, - eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, - eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, - eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, - eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR, + eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, + eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, - eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, - eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, - eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, - eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, + eSampledImageYcbcrConversionLinearFilterKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR }; @@ -5662,292 +8087,101 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case FormatFeatureFlagBits::eSampledImage : return "SampledImage"; - case FormatFeatureFlagBits::eStorageImage : return "StorageImage"; - case FormatFeatureFlagBits::eStorageImageAtomic : return "StorageImageAtomic"; - case FormatFeatureFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer"; - case FormatFeatureFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer"; - case FormatFeatureFlagBits::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic"; - case FormatFeatureFlagBits::eVertexBuffer : return "VertexBuffer"; - case FormatFeatureFlagBits::eColorAttachment : return "ColorAttachment"; - case FormatFeatureFlagBits::eColorAttachmentBlend : return "ColorAttachmentBlend"; - case FormatFeatureFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment"; - case FormatFeatureFlagBits::eBlitSrc : return "BlitSrc"; - case FormatFeatureFlagBits::eBlitDst : return "BlitDst"; - case FormatFeatureFlagBits::eSampledImageFilterLinear : return "SampledImageFilterLinear"; - case FormatFeatureFlagBits::eTransferSrc : return "TransferSrc"; - case FormatFeatureFlagBits::eTransferDst : return "TransferDst"; - case FormatFeatureFlagBits::eMidpointChromaSamples : return "MidpointChromaSamples"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; - case FormatFeatureFlagBits::eDisjoint : return "Disjoint"; - case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples"; - case FormatFeatureFlagBits::eSampledImageFilterMinmax : return "SampledImageFilterMinmax"; - case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG"; - case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR : return "AccelerationStructureVertexBufferKHR"; - case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT"; - default: return "invalid"; - } - } - - enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags - { - eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, - eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value ) - { - switch ( value ) - { - case FramebufferCreateFlagBits::eImageless : return "Imageless"; - default: return "invalid"; - } - } - - enum class FrontFace - { - eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, - eClockwise = VK_FRONT_FACE_CLOCKWISE - }; - - VULKAN_HPP_INLINE std::string to_string( FrontFace value ) - { - switch ( value ) - { - case FrontFace::eCounterClockwise : return "CounterClockwise"; - case FrontFace::eClockwise : return "Clockwise"; - default: return "invalid"; - } - } - -#ifdef VK_USE_PLATFORM_WIN32_KHR - enum class FullScreenExclusiveEXT - { - eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, - eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, - eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, - eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value ) - { - switch ( value ) - { - case FullScreenExclusiveEXT::eDefault : return "Default"; - case FullScreenExclusiveEXT::eAllowed : return "Allowed"; - case FullScreenExclusiveEXT::eDisallowed : return "Disallowed"; - case FullScreenExclusiveEXT::eApplicationControlled : return "ApplicationControlled"; - default: return "invalid"; - } - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR - { - eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR, - eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR - }; - using GeometryFlagBitsNV = GeometryFlagBitsKHR; - - VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value ) - { - switch ( value ) - { - case GeometryFlagBitsKHR::eOpaque : return "Opaque"; - case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation"; - default: return "invalid"; - } - } - - enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR - { - eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, - eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, - eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, - eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, - eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV - }; - using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR; - - VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value ) - { - switch ( value ) - { - case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable : return "TriangleFacingCullDisable"; - case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise"; - case GeometryInstanceFlagBitsKHR::eForceOpaque : return "ForceOpaque"; - case GeometryInstanceFlagBitsKHR::eForceNoOpaque : return "ForceNoOpaque"; - default: return "invalid"; - } - } - - enum class GeometryTypeKHR - { - eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, - eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, - eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR - }; - using GeometryTypeNV = GeometryTypeKHR; - - VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value ) - { - switch ( value ) - { - case GeometryTypeKHR::eTriangles : return "Triangles"; - case GeometryTypeKHR::eAabbs : return "Aabbs"; - case GeometryTypeKHR::eInstances : return "Instances"; - default: return "invalid"; - } - } - - enum class ImageAspectFlagBits : VkImageAspectFlags - { - eColor = VK_IMAGE_ASPECT_COLOR_BIT, - eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, - eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, - eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, - ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, - ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, - ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, - eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, - eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, - eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, - eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT, - ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, - ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, - ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value ) - { - switch ( value ) - { - case ImageAspectFlagBits::eColor : return "Color"; - case ImageAspectFlagBits::eDepth : return "Depth"; - case ImageAspectFlagBits::eStencil : return "Stencil"; - case ImageAspectFlagBits::eMetadata : return "Metadata"; - case ImageAspectFlagBits::ePlane0 : return "Plane0"; - case ImageAspectFlagBits::ePlane1 : return "Plane1"; - case ImageAspectFlagBits::ePlane2 : return "Plane2"; - case ImageAspectFlagBits::eMemoryPlane0EXT : return "MemoryPlane0EXT"; - case ImageAspectFlagBits::eMemoryPlane1EXT : return "MemoryPlane1EXT"; - case ImageAspectFlagBits::eMemoryPlane2EXT : return "MemoryPlane2EXT"; - case ImageAspectFlagBits::eMemoryPlane3EXT : return "MemoryPlane3EXT"; - default: return "invalid"; + case FormatFeatureFlagBits::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: + return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: + return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: + return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: + return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; + case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case FormatFeatureFlagBits::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; + case FormatFeatureFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; + case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case FormatFeatureFlagBits::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; + case FormatFeatureFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class ImageCreateFlagBits : VkImageCreateFlags { - eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, - eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, - eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, - eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, - eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, - eAlias = VK_IMAGE_CREATE_ALIAS_BIT, - eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, - e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, - eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, - eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, - eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, - eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, - eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, + eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, + eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, + eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, + eAlias = VK_IMAGE_CREATE_ALIAS_BIT, + eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, + eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, + eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, - eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, - e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, - eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR, - eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, - eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, - eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, - eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR + eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, + e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR, + eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, + eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR }; VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value ) { switch ( value ) { - case ImageCreateFlagBits::eSparseBinding : return "SparseBinding"; - case ImageCreateFlagBits::eSparseResidency : return "SparseResidency"; - case ImageCreateFlagBits::eSparseAliased : return "SparseAliased"; - case ImageCreateFlagBits::eMutableFormat : return "MutableFormat"; - case ImageCreateFlagBits::eCubeCompatible : return "CubeCompatible"; - case ImageCreateFlagBits::eAlias : return "Alias"; - case ImageCreateFlagBits::eSplitInstanceBindRegions : return "SplitInstanceBindRegions"; - case ImageCreateFlagBits::e2DArrayCompatible : return "2DArrayCompatible"; - case ImageCreateFlagBits::eBlockTexelViewCompatible : return "BlockTexelViewCompatible"; - case ImageCreateFlagBits::eExtendedUsage : return "ExtendedUsage"; - case ImageCreateFlagBits::eProtected : return "Protected"; - case ImageCreateFlagBits::eDisjoint : return "Disjoint"; - case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV"; - case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT"; - case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT"; - default: return "invalid"; - } - } - - enum class ImageLayout - { - eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, - eGeneral = VK_IMAGE_LAYOUT_GENERAL, - eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, - eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, - eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, - eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, - eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, - ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, - eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, - eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, - eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, - eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, - eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, - eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, - ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, - eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, - eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, - eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, - eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, - eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, - eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, - eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, - eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, - eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( ImageLayout value ) - { - switch ( value ) - { - case ImageLayout::eUndefined : return "Undefined"; - case ImageLayout::eGeneral : return "General"; - case ImageLayout::eColorAttachmentOptimal : return "ColorAttachmentOptimal"; - case ImageLayout::eDepthStencilAttachmentOptimal : return "DepthStencilAttachmentOptimal"; - case ImageLayout::eDepthStencilReadOnlyOptimal : return "DepthStencilReadOnlyOptimal"; - case ImageLayout::eShaderReadOnlyOptimal : return "ShaderReadOnlyOptimal"; - case ImageLayout::eTransferSrcOptimal : return "TransferSrcOptimal"; - case ImageLayout::eTransferDstOptimal : return "TransferDstOptimal"; - case ImageLayout::ePreinitialized : return "Preinitialized"; - case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal : return "DepthReadOnlyStencilAttachmentOptimal"; - case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal : return "DepthAttachmentStencilReadOnlyOptimal"; - case ImageLayout::eDepthAttachmentOptimal : return "DepthAttachmentOptimal"; - case ImageLayout::eDepthReadOnlyOptimal : return "DepthReadOnlyOptimal"; - case ImageLayout::eStencilAttachmentOptimal : return "StencilAttachmentOptimal"; - case ImageLayout::eStencilReadOnlyOptimal : return "StencilReadOnlyOptimal"; - case ImageLayout::ePresentSrcKHR : return "PresentSrcKHR"; - case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR"; - case ImageLayout::eShadingRateOptimalNV : return "ShadingRateOptimalNV"; - case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT"; - default: return "invalid"; + case ImageCreateFlagBits::eSparseBinding: return "SparseBinding"; + case ImageCreateFlagBits::eSparseResidency: return "SparseResidency"; + case ImageCreateFlagBits::eSparseAliased: return "SparseAliased"; + case ImageCreateFlagBits::eMutableFormat: return "MutableFormat"; + case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible"; + case ImageCreateFlagBits::eAlias: return "Alias"; + case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible"; + case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible"; + case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage"; + case ImageCreateFlagBits::eProtected: return "Protected"; + case ImageCreateFlagBits::eDisjoint: return "Disjoint"; + case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV"; + case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT"; + case ImageCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class ImageTiling { - eOptimal = VK_IMAGE_TILING_OPTIMAL, - eLinear = VK_IMAGE_TILING_LINEAR, + eOptimal = VK_IMAGE_TILING_OPTIMAL, + eLinear = VK_IMAGE_TILING_LINEAR, eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT }; @@ -5955,10 +8189,10 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ImageTiling::eOptimal : return "Optimal"; - case ImageTiling::eLinear : return "Linear"; - case ImageTiling::eDrmFormatModifierEXT : return "DrmFormatModifierEXT"; - default: return "invalid"; + case ImageTiling::eOptimal: return "Optimal"; + case ImageTiling::eLinear: return "Linear"; + case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5973,168 +8207,69 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ImageType::e1D : return "1D"; - case ImageType::e2D : return "2D"; - case ImageType::e3D : return "3D"; - default: return "invalid"; + case ImageType::e1D: return "1D"; + case ImageType::e2D: return "2D"; + case ImageType::e3D: return "3D"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class ImageUsageFlagBits : VkImageUsageFlags { - eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, - eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, - eStorage = VK_IMAGE_USAGE_STORAGE_BIT, - eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, - eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, - eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, - eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, - eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV }; VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value ) { switch ( value ) { - case ImageUsageFlagBits::eTransferSrc : return "TransferSrc"; - case ImageUsageFlagBits::eTransferDst : return "TransferDst"; - case ImageUsageFlagBits::eSampled : return "Sampled"; - case ImageUsageFlagBits::eStorage : return "Storage"; - case ImageUsageFlagBits::eColorAttachment : return "ColorAttachment"; - case ImageUsageFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment"; - case ImageUsageFlagBits::eTransientAttachment : return "TransientAttachment"; - case ImageUsageFlagBits::eInputAttachment : return "InputAttachment"; - case ImageUsageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV"; - case ImageUsageFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT"; - default: return "invalid"; - } - } - - enum class ImageViewCreateFlagBits : VkImageViewCreateFlags - { - eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value ) - { - switch ( value ) - { - case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT"; - default: return "invalid"; - } - } - - enum class ImageViewType - { - e1D = VK_IMAGE_VIEW_TYPE_1D, - e2D = VK_IMAGE_VIEW_TYPE_2D, - e3D = VK_IMAGE_VIEW_TYPE_3D, - eCube = VK_IMAGE_VIEW_TYPE_CUBE, - e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, - e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, - eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - }; - - VULKAN_HPP_INLINE std::string to_string( ImageViewType value ) - { - switch ( value ) - { - case ImageViewType::e1D : return "1D"; - case ImageViewType::e2D : return "2D"; - case ImageViewType::e3D : return "3D"; - case ImageViewType::eCube : return "Cube"; - case ImageViewType::e1DArray : return "1DArray"; - case ImageViewType::e2DArray : return "2DArray"; - case ImageViewType::eCubeArray : return "CubeArray"; - default: return "invalid"; - } - } - - enum class IndexType - { - eUint16 = VK_INDEX_TYPE_UINT16, - eUint32 = VK_INDEX_TYPE_UINT32, - eNoneKHR = VK_INDEX_TYPE_NONE_KHR, - eUint8EXT = VK_INDEX_TYPE_UINT8_EXT, - eNoneNV = VK_INDEX_TYPE_NONE_NV - }; - - VULKAN_HPP_INLINE std::string to_string( IndexType value ) - { - switch ( value ) - { - case IndexType::eUint16 : return "Uint16"; - case IndexType::eUint32 : return "Uint32"; - case IndexType::eNoneKHR : return "NoneKHR"; - case IndexType::eUint8EXT : return "Uint8EXT"; - default: return "invalid"; - } - } - - enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV - { - eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV, - eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV, - eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV - }; - - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value ) - { - switch ( value ) - { - case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess : return "ExplicitPreprocess"; - case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences : return "IndexedSequences"; - case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences : return "UnorderedSequences"; - default: return "invalid"; - } - } - - enum class IndirectCommandsTokenTypeNV - { - eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, - eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, - eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV, - eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV, - ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, - eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, - eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, - eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV - }; - - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value ) - { - switch ( value ) - { - case IndirectCommandsTokenTypeNV::eShaderGroup : return "ShaderGroup"; - case IndirectCommandsTokenTypeNV::eStateFlags : return "StateFlags"; - case IndirectCommandsTokenTypeNV::eIndexBuffer : return "IndexBuffer"; - case IndirectCommandsTokenTypeNV::eVertexBuffer : return "VertexBuffer"; - case IndirectCommandsTokenTypeNV::ePushConstant : return "PushConstant"; - case IndirectCommandsTokenTypeNV::eDrawIndexed : return "DrawIndexed"; - case IndirectCommandsTokenTypeNV::eDraw : return "Draw"; - case IndirectCommandsTokenTypeNV::eDrawTasks : return "DrawTasks"; - default: return "invalid"; - } - } - - enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV - { - eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV - }; - - VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value ) - { - switch ( value ) - { - case IndirectStateFlagBitsNV::eFlagFrontface : return "FlagFrontface"; - default: return "invalid"; + case ImageUsageFlagBits::eTransferSrc: return "TransferSrc"; + case ImageUsageFlagBits::eTransferDst: return "TransferDst"; + case ImageUsageFlagBits::eSampled: return "Sampled"; + case ImageUsageFlagBits::eStorage: return "Storage"; + case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment"; + case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment"; + case ImageUsageFlagBits::eInputAttachment: return "InputAttachment"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case ImageUsageFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class InstanceCreateFlagBits - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits ) { @@ -6150,98 +8285,15 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case InternalAllocationType::eExecutable : return "Executable"; - default: return "invalid"; - } - } - - enum class LineRasterizationModeEXT - { - eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, - eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, - eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, - eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value ) - { - switch ( value ) - { - case LineRasterizationModeEXT::eDefault : return "Default"; - case LineRasterizationModeEXT::eRectangular : return "Rectangular"; - case LineRasterizationModeEXT::eBresenham : return "Bresenham"; - case LineRasterizationModeEXT::eRectangularSmooth : return "RectangularSmooth"; - default: return "invalid"; - } - } - - enum class LogicOp - { - eClear = VK_LOGIC_OP_CLEAR, - eAnd = VK_LOGIC_OP_AND, - eAndReverse = VK_LOGIC_OP_AND_REVERSE, - eCopy = VK_LOGIC_OP_COPY, - eAndInverted = VK_LOGIC_OP_AND_INVERTED, - eNoOp = VK_LOGIC_OP_NO_OP, - eXor = VK_LOGIC_OP_XOR, - eOr = VK_LOGIC_OP_OR, - eNor = VK_LOGIC_OP_NOR, - eEquivalent = VK_LOGIC_OP_EQUIVALENT, - eInvert = VK_LOGIC_OP_INVERT, - eOrReverse = VK_LOGIC_OP_OR_REVERSE, - eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, - eOrInverted = VK_LOGIC_OP_OR_INVERTED, - eNand = VK_LOGIC_OP_NAND, - eSet = VK_LOGIC_OP_SET - }; - - VULKAN_HPP_INLINE std::string to_string( LogicOp value ) - { - switch ( value ) - { - case LogicOp::eClear : return "Clear"; - case LogicOp::eAnd : return "And"; - case LogicOp::eAndReverse : return "AndReverse"; - case LogicOp::eCopy : return "Copy"; - case LogicOp::eAndInverted : return "AndInverted"; - case LogicOp::eNoOp : return "NoOp"; - case LogicOp::eXor : return "Xor"; - case LogicOp::eOr : return "Or"; - case LogicOp::eNor : return "Nor"; - case LogicOp::eEquivalent : return "Equivalent"; - case LogicOp::eInvert : return "Invert"; - case LogicOp::eOrReverse : return "OrReverse"; - case LogicOp::eCopyInverted : return "CopyInverted"; - case LogicOp::eOrInverted : return "OrInverted"; - case LogicOp::eNand : return "Nand"; - case LogicOp::eSet : return "Set"; - default: return "invalid"; - } - } - - enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags - { - eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, - eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, - eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT - }; - using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits; - - VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value ) - { - switch ( value ) - { - case MemoryAllocateFlagBits::eDeviceMask : return "DeviceMask"; - case MemoryAllocateFlagBits::eDeviceAddress : return "DeviceAddress"; - case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay"; - default: return "invalid"; + case InternalAllocationType::eExecutable: return "Executable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class MemoryHeapFlagBits : VkMemoryHeapFlags { - eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, - eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, + eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR }; @@ -6249,38 +8301,20 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case MemoryHeapFlagBits::eDeviceLocal : return "DeviceLocal"; - case MemoryHeapFlagBits::eMultiInstance : return "MultiInstance"; - default: return "invalid"; - } - } - - enum class MemoryOverallocationBehaviorAMD - { - eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, - eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, - eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD - }; - - VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value ) - { - switch ( value ) - { - case MemoryOverallocationBehaviorAMD::eDefault : return "Default"; - case MemoryOverallocationBehaviorAMD::eAllowed : return "Allowed"; - case MemoryOverallocationBehaviorAMD::eDisallowed : return "Disallowed"; - default: return "invalid"; + case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags { - eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, - eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, - eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, - eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, - eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, - eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT, + eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, + eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, + eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, + eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT, eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD, eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD }; @@ -6289,986 +8323,76 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case MemoryPropertyFlagBits::eDeviceLocal : return "DeviceLocal"; - case MemoryPropertyFlagBits::eHostVisible : return "HostVisible"; - case MemoryPropertyFlagBits::eHostCoherent : return "HostCoherent"; - case MemoryPropertyFlagBits::eHostCached : return "HostCached"; - case MemoryPropertyFlagBits::eLazilyAllocated : return "LazilyAllocated"; - case MemoryPropertyFlagBits::eProtected : return "Protected"; - case MemoryPropertyFlagBits::eDeviceCoherentAMD : return "DeviceCoherentAMD"; - case MemoryPropertyFlagBits::eDeviceUncachedAMD : return "DeviceUncachedAMD"; - default: return "invalid"; - } - } - - enum class ObjectType - { - eUnknown = VK_OBJECT_TYPE_UNKNOWN, - eInstance = VK_OBJECT_TYPE_INSTANCE, - ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, - eDevice = VK_OBJECT_TYPE_DEVICE, - eQueue = VK_OBJECT_TYPE_QUEUE, - eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, - eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, - eFence = VK_OBJECT_TYPE_FENCE, - eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, - eBuffer = VK_OBJECT_TYPE_BUFFER, - eImage = VK_OBJECT_TYPE_IMAGE, - eEvent = VK_OBJECT_TYPE_EVENT, - eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, - eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, - eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, - eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, - ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, - ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, - eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, - ePipeline = VK_OBJECT_TYPE_PIPELINE, - eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, - eSampler = VK_OBJECT_TYPE_SAMPLER, - eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, - eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, - eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, - eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, - eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, - eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, - eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, - eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, - eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, - eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, - eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, - eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, - eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, - eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, - ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, - eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, - eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, - ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, - eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, - eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, - eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( ObjectType value ) - { - switch ( value ) - { - case ObjectType::eUnknown : return "Unknown"; - case ObjectType::eInstance : return "Instance"; - case ObjectType::ePhysicalDevice : return "PhysicalDevice"; - case ObjectType::eDevice : return "Device"; - case ObjectType::eQueue : return "Queue"; - case ObjectType::eSemaphore : return "Semaphore"; - case ObjectType::eCommandBuffer : return "CommandBuffer"; - case ObjectType::eFence : return "Fence"; - case ObjectType::eDeviceMemory : return "DeviceMemory"; - case ObjectType::eBuffer : return "Buffer"; - case ObjectType::eImage : return "Image"; - case ObjectType::eEvent : return "Event"; - case ObjectType::eQueryPool : return "QueryPool"; - case ObjectType::eBufferView : return "BufferView"; - case ObjectType::eImageView : return "ImageView"; - case ObjectType::eShaderModule : return "ShaderModule"; - case ObjectType::ePipelineCache : return "PipelineCache"; - case ObjectType::ePipelineLayout : return "PipelineLayout"; - case ObjectType::eRenderPass : return "RenderPass"; - case ObjectType::ePipeline : return "Pipeline"; - case ObjectType::eDescriptorSetLayout : return "DescriptorSetLayout"; - case ObjectType::eSampler : return "Sampler"; - case ObjectType::eDescriptorPool : return "DescriptorPool"; - case ObjectType::eDescriptorSet : return "DescriptorSet"; - case ObjectType::eFramebuffer : return "Framebuffer"; - case ObjectType::eCommandPool : return "CommandPool"; - case ObjectType::eSamplerYcbcrConversion : return "SamplerYcbcrConversion"; - case ObjectType::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate"; - case ObjectType::eSurfaceKHR : return "SurfaceKHR"; - case ObjectType::eSwapchainKHR : return "SwapchainKHR"; - case ObjectType::eDisplayKHR : return "DisplayKHR"; - case ObjectType::eDisplayModeKHR : return "DisplayModeKHR"; - case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; - case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT"; - case ObjectType::eAccelerationStructureKHR : return "AccelerationStructureKHR"; - case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT"; - case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL"; - case ObjectType::eDeferredOperationKHR : return "DeferredOperationKHR"; - case ObjectType::eIndirectCommandsLayoutNV : return "IndirectCommandsLayoutNV"; - case ObjectType::ePrivateDataSlotEXT : return "PrivateDataSlotEXT"; - default: return "invalid"; - } - } - - template - struct cpp_type - {}; - - enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags - { - eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, - eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, - eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, - eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT - }; - using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits; - - VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value ) - { - switch ( value ) - { - case PeerMemoryFeatureFlagBits::eCopySrc : return "CopySrc"; - case PeerMemoryFeatureFlagBits::eCopyDst : return "CopyDst"; - case PeerMemoryFeatureFlagBits::eGenericSrc : return "GenericSrc"; - case PeerMemoryFeatureFlagBits::eGenericDst : return "GenericDst"; - default: return "invalid"; - } - } - - enum class PerformanceConfigurationTypeINTEL - { - eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL - }; - - VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value ) - { - switch ( value ) - { - case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated : return "CommandQueueMetricsDiscoveryActivated"; - default: return "invalid"; - } - } - - enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR - { - ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR, - eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value ) - { - switch ( value ) - { - case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting : return "PerformanceImpacting"; - case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted : return "ConcurrentlyImpacted"; - default: return "invalid"; - } - } - - enum class PerformanceCounterScopeKHR - { - eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, - eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, - eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, - eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, - eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR, - eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value ) - { - switch ( value ) - { - case PerformanceCounterScopeKHR::eCommandBuffer : return "CommandBuffer"; - case PerformanceCounterScopeKHR::eRenderPass : return "RenderPass"; - case PerformanceCounterScopeKHR::eCommand : return "Command"; - default: return "invalid"; - } - } - - enum class PerformanceCounterStorageKHR - { - eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR, - eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR, - eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR, - eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR, - eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR, - eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value ) - { - switch ( value ) - { - case PerformanceCounterStorageKHR::eInt32 : return "Int32"; - case PerformanceCounterStorageKHR::eInt64 : return "Int64"; - case PerformanceCounterStorageKHR::eUint32 : return "Uint32"; - case PerformanceCounterStorageKHR::eUint64 : return "Uint64"; - case PerformanceCounterStorageKHR::eFloat32 : return "Float32"; - case PerformanceCounterStorageKHR::eFloat64 : return "Float64"; - default: return "invalid"; - } - } - - enum class PerformanceCounterUnitKHR - { - eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR, - ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR, - eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR, - eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR, - eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR, - eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR, - eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR, - eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR, - eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR, - eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR, - eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value ) - { - switch ( value ) - { - case PerformanceCounterUnitKHR::eGeneric : return "Generic"; - case PerformanceCounterUnitKHR::ePercentage : return "Percentage"; - case PerformanceCounterUnitKHR::eNanoseconds : return "Nanoseconds"; - case PerformanceCounterUnitKHR::eBytes : return "Bytes"; - case PerformanceCounterUnitKHR::eBytesPerSecond : return "BytesPerSecond"; - case PerformanceCounterUnitKHR::eKelvin : return "Kelvin"; - case PerformanceCounterUnitKHR::eWatts : return "Watts"; - case PerformanceCounterUnitKHR::eVolts : return "Volts"; - case PerformanceCounterUnitKHR::eAmps : return "Amps"; - case PerformanceCounterUnitKHR::eHertz : return "Hertz"; - case PerformanceCounterUnitKHR::eCycles : return "Cycles"; - default: return "invalid"; - } - } - - enum class PerformanceOverrideTypeINTEL - { - eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, - eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL - }; - - VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value ) - { - switch ( value ) - { - case PerformanceOverrideTypeINTEL::eNullHardware : return "NullHardware"; - case PerformanceOverrideTypeINTEL::eFlushGpuCaches : return "FlushGpuCaches"; - default: return "invalid"; - } - } - - enum class PerformanceParameterTypeINTEL - { - eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, - eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL - }; - - VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value ) - { - switch ( value ) - { - case PerformanceParameterTypeINTEL::eHwCountersSupported : return "HwCountersSupported"; - case PerformanceParameterTypeINTEL::eStreamMarkerValidBits : return "StreamMarkerValidBits"; - default: return "invalid"; - } - } - - enum class PerformanceValueTypeINTEL - { - eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, - eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, - eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, - eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, - eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL - }; - - VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value ) - { - switch ( value ) - { - case PerformanceValueTypeINTEL::eUint32 : return "Uint32"; - case PerformanceValueTypeINTEL::eUint64 : return "Uint64"; - case PerformanceValueTypeINTEL::eFloat : return "Float"; - case PerformanceValueTypeINTEL::eBool : return "Bool"; - case PerformanceValueTypeINTEL::eString : return "String"; - default: return "invalid"; + case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryPropertyFlagBits::eHostVisible: return "HostVisible"; + case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent"; + case MemoryPropertyFlagBits::eHostCached: return "HostCached"; + case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated"; + case MemoryPropertyFlagBits::eProtected: return "Protected"; + case MemoryPropertyFlagBits::eDeviceCoherentAMD: return "DeviceCoherentAMD"; + case MemoryPropertyFlagBits::eDeviceUncachedAMD: return "DeviceUncachedAMD"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class PhysicalDeviceType { - eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, + eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, - eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, - eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, - eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU + eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, + eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, + eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU }; VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value ) { switch ( value ) { - case PhysicalDeviceType::eOther : return "Other"; - case PhysicalDeviceType::eIntegratedGpu : return "IntegratedGpu"; - case PhysicalDeviceType::eDiscreteGpu : return "DiscreteGpu"; - case PhysicalDeviceType::eVirtualGpu : return "VirtualGpu"; - case PhysicalDeviceType::eCpu : return "Cpu"; - default: return "invalid"; - } - } - - enum class PipelineBindPoint - { - eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, - eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, - eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, - eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV - }; - - VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value ) - { - switch ( value ) - { - case PipelineBindPoint::eGraphics : return "Graphics"; - case PipelineBindPoint::eCompute : return "Compute"; - case PipelineBindPoint::eRayTracingKHR : return "RayTracingKHR"; - default: return "invalid"; - } - } - - enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags - { - eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value ) - { - switch ( value ) - { - case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT : return "ExternallySynchronizedEXT"; - default: return "invalid"; - } - } - - enum class PipelineCacheHeaderVersion - { - eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE - }; - - VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value ) - { - switch ( value ) - { - case PipelineCacheHeaderVersion::eOne : return "One"; - default: return "invalid"; - } - } - - enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD ) - { - return "(void)"; - } - - enum class PipelineCreateFlagBits : VkPipelineCreateFlags - { - eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, - eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, - eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, - eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, - eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, - eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, - eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, - eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, - eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, - eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, - eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, - eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, - eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, - eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, - eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, - eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, - eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, - eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, - eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR, - eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value ) - { - switch ( value ) - { - case PipelineCreateFlagBits::eDisableOptimization : return "DisableOptimization"; - case PipelineCreateFlagBits::eAllowDerivatives : return "AllowDerivatives"; - case PipelineCreateFlagBits::eDerivative : return "Derivative"; - case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex"; - case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase"; - case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR : return "RayTracingNoNullAnyHitShadersKHR"; - case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR : return "RayTracingNoNullClosestHitShadersKHR"; - case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR : return "RayTracingNoNullMissShadersKHR"; - case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR : return "RayTracingNoNullIntersectionShadersKHR"; - case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR : return "RayTracingSkipTrianglesKHR"; - case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR : return "RayTracingSkipAabbsKHR"; - case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV"; - case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR"; - case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR"; - case PipelineCreateFlagBits::eIndirectBindableNV : return "IndirectBindableNV"; - case PipelineCreateFlagBits::eLibraryKHR : return "LibraryKHR"; - case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT : return "FailOnPipelineCompileRequiredEXT"; - case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT : return "EarlyReturnOnFailureEXT"; - default: return "invalid"; - } - } - - enum class PipelineCreationFeedbackFlagBitsEXT : VkPipelineCreationFeedbackFlagsEXT - { - eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT, - eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT, - eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value ) - { - switch ( value ) - { - case PipelineCreationFeedbackFlagBitsEXT::eValid : return "Valid"; - case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit : return "ApplicationPipelineCacheHit"; - case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration : return "BasePipelineAcceleration"; - default: return "invalid"; - } - } - - enum class PipelineExecutableStatisticFormatKHR - { - eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, - eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, - eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, - eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value ) - { - switch ( value ) - { - case PipelineExecutableStatisticFormatKHR::eBool32 : return "Bool32"; - case PipelineExecutableStatisticFormatKHR::eInt64 : return "Int64"; - case PipelineExecutableStatisticFormatKHR::eUint64 : return "Uint64"; - case PipelineExecutableStatisticFormatKHR::eFloat64 : return "Float64"; - default: return "invalid"; - } - } - - enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags - { - eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, - eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value ) - { - switch ( value ) - { - case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT : return "AllowVaryingSubgroupSizeEXT"; - case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT : return "RequireFullSubgroupsEXT"; - default: return "invalid"; - } - } - - enum class PipelineStageFlagBits : VkPipelineStageFlags - { - eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, - eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, - eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, - eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, - eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, - eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, - eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, - eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, - eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, - eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, - eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, - eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, - eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, - eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, - eHost = VK_PIPELINE_STAGE_HOST_BIT, - eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, - eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, - eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, - eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, - eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, - eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, - eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, - eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, - eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, - eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, - eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, - eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, - eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV - }; - - VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) - { - switch ( value ) - { - case PipelineStageFlagBits::eTopOfPipe : return "TopOfPipe"; - case PipelineStageFlagBits::eDrawIndirect : return "DrawIndirect"; - case PipelineStageFlagBits::eVertexInput : return "VertexInput"; - case PipelineStageFlagBits::eVertexShader : return "VertexShader"; - case PipelineStageFlagBits::eTessellationControlShader : return "TessellationControlShader"; - case PipelineStageFlagBits::eTessellationEvaluationShader : return "TessellationEvaluationShader"; - case PipelineStageFlagBits::eGeometryShader : return "GeometryShader"; - case PipelineStageFlagBits::eFragmentShader : return "FragmentShader"; - case PipelineStageFlagBits::eEarlyFragmentTests : return "EarlyFragmentTests"; - case PipelineStageFlagBits::eLateFragmentTests : return "LateFragmentTests"; - case PipelineStageFlagBits::eColorAttachmentOutput : return "ColorAttachmentOutput"; - case PipelineStageFlagBits::eComputeShader : return "ComputeShader"; - case PipelineStageFlagBits::eTransfer : return "Transfer"; - case PipelineStageFlagBits::eBottomOfPipe : return "BottomOfPipe"; - case PipelineStageFlagBits::eHost : return "Host"; - case PipelineStageFlagBits::eAllGraphics : return "AllGraphics"; - case PipelineStageFlagBits::eAllCommands : return "AllCommands"; - case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT"; - case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; - case PipelineStageFlagBits::eRayTracingShaderKHR : return "RayTracingShaderKHR"; - case PipelineStageFlagBits::eAccelerationStructureBuildKHR : return "AccelerationStructureBuildKHR"; - case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV"; - case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV"; - case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV"; - case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT"; - case PipelineStageFlagBits::eCommandPreprocessNV : return "CommandPreprocessNV"; - default: return "invalid"; - } - } - - enum class PointClippingBehavior - { - eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, - eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY - }; - using PointClippingBehaviorKHR = PointClippingBehavior; - - VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value ) - { - switch ( value ) - { - case PointClippingBehavior::eAllClipPlanes : return "AllClipPlanes"; - case PointClippingBehavior::eUserClipPlanesOnly : return "UserClipPlanesOnly"; - default: return "invalid"; - } - } - - enum class PolygonMode - { - eFill = VK_POLYGON_MODE_FILL, - eLine = VK_POLYGON_MODE_LINE, - ePoint = VK_POLYGON_MODE_POINT, - eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV - }; - - VULKAN_HPP_INLINE std::string to_string( PolygonMode value ) - { - switch ( value ) - { - case PolygonMode::eFill : return "Fill"; - case PolygonMode::eLine : return "Line"; - case PolygonMode::ePoint : return "Point"; - case PolygonMode::eFillRectangleNV : return "FillRectangleNV"; - default: return "invalid"; - } - } - - enum class PresentModeKHR - { - eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, - eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, - eFifo = VK_PRESENT_MODE_FIFO_KHR, - eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, - eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, - eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value ) - { - switch ( value ) - { - case PresentModeKHR::eImmediate : return "Immediate"; - case PresentModeKHR::eMailbox : return "Mailbox"; - case PresentModeKHR::eFifo : return "Fifo"; - case PresentModeKHR::eFifoRelaxed : return "FifoRelaxed"; - case PresentModeKHR::eSharedDemandRefresh : return "SharedDemandRefresh"; - case PresentModeKHR::eSharedContinuousRefresh : return "SharedContinuousRefresh"; - default: return "invalid"; - } - } - - enum class PrimitiveTopology - { - ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, - eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, - eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, - eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, - eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, - eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, - eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, - eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, - eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, - eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, - ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - }; - - VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value ) - { - switch ( value ) - { - case PrimitiveTopology::ePointList : return "PointList"; - case PrimitiveTopology::eLineList : return "LineList"; - case PrimitiveTopology::eLineStrip : return "LineStrip"; - case PrimitiveTopology::eTriangleList : return "TriangleList"; - case PrimitiveTopology::eTriangleStrip : return "TriangleStrip"; - case PrimitiveTopology::eTriangleFan : return "TriangleFan"; - case PrimitiveTopology::eLineListWithAdjacency : return "LineListWithAdjacency"; - case PrimitiveTopology::eLineStripWithAdjacency : return "LineStripWithAdjacency"; - case PrimitiveTopology::eTriangleListWithAdjacency : return "TriangleListWithAdjacency"; - case PrimitiveTopology::eTriangleStripWithAdjacency : return "TriangleStripWithAdjacency"; - case PrimitiveTopology::ePatchList : return "PatchList"; - default: return "invalid"; - } - } - - enum class PrivateDataSlotCreateFlagBitsEXT : VkPrivateDataSlotCreateFlagsEXT - {}; - - VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBitsEXT ) - { - return "(void)"; - } - - enum class QueryControlFlagBits : VkQueryControlFlags - { - ePrecise = VK_QUERY_CONTROL_PRECISE_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value ) - { - switch ( value ) - { - case QueryControlFlagBits::ePrecise : return "Precise"; - default: return "invalid"; - } - } - - enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags - { - eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, - eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, - eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, - eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, - eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, - eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, - eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, - eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, - eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, - eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, - eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value ) - { - switch ( value ) - { - case QueryPipelineStatisticFlagBits::eInputAssemblyVertices : return "InputAssemblyVertices"; - case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives : return "InputAssemblyPrimitives"; - case QueryPipelineStatisticFlagBits::eVertexShaderInvocations : return "VertexShaderInvocations"; - case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations : return "GeometryShaderInvocations"; - case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives : return "GeometryShaderPrimitives"; - case QueryPipelineStatisticFlagBits::eClippingInvocations : return "ClippingInvocations"; - case QueryPipelineStatisticFlagBits::eClippingPrimitives : return "ClippingPrimitives"; - case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations : return "FragmentShaderInvocations"; - case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches : return "TessellationControlShaderPatches"; - case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations : return "TessellationEvaluationShaderInvocations"; - case QueryPipelineStatisticFlagBits::eComputeShaderInvocations : return "ComputeShaderInvocations"; - default: return "invalid"; - } - } - - enum class QueryPoolCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits ) - { - return "(void)"; - } - - enum class QueryPoolSamplingModeINTEL - { - eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL - }; - - VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value ) - { - switch ( value ) - { - case QueryPoolSamplingModeINTEL::eManual : return "Manual"; - default: return "invalid"; - } - } - - enum class QueryResultFlagBits : VkQueryResultFlags - { - e64 = VK_QUERY_RESULT_64_BIT, - eWait = VK_QUERY_RESULT_WAIT_BIT, - eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, - ePartial = VK_QUERY_RESULT_PARTIAL_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value ) - { - switch ( value ) - { - case QueryResultFlagBits::e64 : return "64"; - case QueryResultFlagBits::eWait : return "Wait"; - case QueryResultFlagBits::eWithAvailability : return "WithAvailability"; - case QueryResultFlagBits::ePartial : return "Partial"; - default: return "invalid"; - } - } - - enum class QueryType - { - eOcclusion = VK_QUERY_TYPE_OCCLUSION, - ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, - eTimestamp = VK_QUERY_TYPE_TIMESTAMP, - eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, - ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, - eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, - eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, - ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, - eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV - }; - - VULKAN_HPP_INLINE std::string to_string( QueryType value ) - { - switch ( value ) - { - case QueryType::eOcclusion : return "Occlusion"; - case QueryType::ePipelineStatistics : return "PipelineStatistics"; - case QueryType::eTimestamp : return "Timestamp"; - case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT"; - case QueryType::ePerformanceQueryKHR : return "PerformanceQueryKHR"; - case QueryType::eAccelerationStructureCompactedSizeKHR : return "AccelerationStructureCompactedSizeKHR"; - case QueryType::eAccelerationStructureSerializationSizeKHR : return "AccelerationStructureSerializationSizeKHR"; - case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL"; - default: return "invalid"; + case PhysicalDeviceType::eOther: return "Other"; + case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu"; + case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu"; + case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu"; + case PhysicalDeviceType::eCpu: return "Cpu"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class QueueFlagBits : VkQueueFlags { - eGraphics = VK_QUEUE_GRAPHICS_BIT, - eCompute = VK_QUEUE_COMPUTE_BIT, - eTransfer = VK_QUEUE_TRANSFER_BIT, + eGraphics = VK_QUEUE_GRAPHICS_BIT, + eCompute = VK_QUEUE_COMPUTE_BIT, + eTransfer = VK_QUEUE_TRANSFER_BIT, eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, - eProtected = VK_QUEUE_PROTECTED_BIT + eProtected = VK_QUEUE_PROTECTED_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR, + eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ }; VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value ) { switch ( value ) { - case QueueFlagBits::eGraphics : return "Graphics"; - case QueueFlagBits::eCompute : return "Compute"; - case QueueFlagBits::eTransfer : return "Transfer"; - case QueueFlagBits::eSparseBinding : return "SparseBinding"; - case QueueFlagBits::eProtected : return "Protected"; - default: return "invalid"; - } - } - - enum class QueueGlobalPriorityEXT - { - eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, - eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, - eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, - eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityEXT value ) - { - switch ( value ) - { - case QueueGlobalPriorityEXT::eLow : return "Low"; - case QueueGlobalPriorityEXT::eMedium : return "Medium"; - case QueueGlobalPriorityEXT::eHigh : return "High"; - case QueueGlobalPriorityEXT::eRealtime : return "Realtime"; - default: return "invalid"; - } - } - - enum class RasterizationOrderAMD - { - eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, - eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD - }; - - VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value ) - { - switch ( value ) - { - case RasterizationOrderAMD::eStrict : return "Strict"; - case RasterizationOrderAMD::eRelaxed : return "Relaxed"; - default: return "invalid"; - } - } - - enum class RayTracingShaderGroupTypeKHR - { - eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, - eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, - eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR - }; - using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; - - VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value ) - { - switch ( value ) - { - case RayTracingShaderGroupTypeKHR::eGeneral : return "General"; - case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup : return "TrianglesHitGroup"; - case RayTracingShaderGroupTypeKHR::eProceduralHitGroup : return "ProceduralHitGroup"; - default: return "invalid"; - } - } - - enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags - { - eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM - }; - - VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value ) - { - switch ( value ) - { - case RenderPassCreateFlagBits::eTransformQCOM : return "TransformQCOM"; - default: return "invalid"; - } - } - - enum class ResolveModeFlagBits : VkResolveModeFlags - { - eNone = VK_RESOLVE_MODE_NONE, - eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, - eAverage = VK_RESOLVE_MODE_AVERAGE_BIT, - eMin = VK_RESOLVE_MODE_MIN_BIT, - eMax = VK_RESOLVE_MODE_MAX_BIT - }; - using ResolveModeFlagBitsKHR = ResolveModeFlagBits; - - VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value ) - { - switch ( value ) - { - case ResolveModeFlagBits::eNone : return "None"; - case ResolveModeFlagBits::eSampleZero : return "SampleZero"; - case ResolveModeFlagBits::eAverage : return "Average"; - case ResolveModeFlagBits::eMin : return "Min"; - case ResolveModeFlagBits::eMax : return "Max"; - default: return "invalid"; - } - } - - enum class Result - { - eSuccess = VK_SUCCESS, - eNotReady = VK_NOT_READY, - eTimeout = VK_TIMEOUT, - eEventSet = VK_EVENT_SET, - eEventReset = VK_EVENT_RESET, - eIncomplete = VK_INCOMPLETE, - eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, - eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, - eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, - eErrorDeviceLost = VK_ERROR_DEVICE_LOST, - eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, - eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, - eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, - eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, - eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, - eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, - eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, - eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, - eErrorUnknown = VK_ERROR_UNKNOWN, - eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, - eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, - eErrorFragmentation = VK_ERROR_FRAGMENTATION, - eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, - eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, - eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, - eSuboptimalKHR = VK_SUBOPTIMAL_KHR, - eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, - eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, - eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, - eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, - eErrorIncompatibleVersionKHR = VK_ERROR_INCOMPATIBLE_VERSION_KHR, - eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, - eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, - eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, - eThreadIdleKHR = VK_THREAD_IDLE_KHR, - eThreadDoneKHR = VK_THREAD_DONE_KHR, - eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, - eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, - ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, - eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, - eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, - eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, - eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, - eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, - eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( Result value ) - { - switch ( value ) - { - case Result::eSuccess : return "Success"; - case Result::eNotReady : return "NotReady"; - case Result::eTimeout : return "Timeout"; - case Result::eEventSet : return "EventSet"; - case Result::eEventReset : return "EventReset"; - case Result::eIncomplete : return "Incomplete"; - case Result::eErrorOutOfHostMemory : return "ErrorOutOfHostMemory"; - case Result::eErrorOutOfDeviceMemory : return "ErrorOutOfDeviceMemory"; - case Result::eErrorInitializationFailed : return "ErrorInitializationFailed"; - case Result::eErrorDeviceLost : return "ErrorDeviceLost"; - case Result::eErrorMemoryMapFailed : return "ErrorMemoryMapFailed"; - case Result::eErrorLayerNotPresent : return "ErrorLayerNotPresent"; - case Result::eErrorExtensionNotPresent : return "ErrorExtensionNotPresent"; - case Result::eErrorFeatureNotPresent : return "ErrorFeatureNotPresent"; - case Result::eErrorIncompatibleDriver : return "ErrorIncompatibleDriver"; - case Result::eErrorTooManyObjects : return "ErrorTooManyObjects"; - case Result::eErrorFormatNotSupported : return "ErrorFormatNotSupported"; - case Result::eErrorFragmentedPool : return "ErrorFragmentedPool"; - case Result::eErrorUnknown : return "ErrorUnknown"; - case Result::eErrorOutOfPoolMemory : return "ErrorOutOfPoolMemory"; - case Result::eErrorInvalidExternalHandle : return "ErrorInvalidExternalHandle"; - case Result::eErrorFragmentation : return "ErrorFragmentation"; - case Result::eErrorInvalidOpaqueCaptureAddress : return "ErrorInvalidOpaqueCaptureAddress"; - case Result::eErrorSurfaceLostKHR : return "ErrorSurfaceLostKHR"; - case Result::eErrorNativeWindowInUseKHR : return "ErrorNativeWindowInUseKHR"; - case Result::eSuboptimalKHR : return "SuboptimalKHR"; - case Result::eErrorOutOfDateKHR : return "ErrorOutOfDateKHR"; - case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR"; - case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT"; - case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV"; - case Result::eErrorIncompatibleVersionKHR : return "ErrorIncompatibleVersionKHR"; - case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; - case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT"; - case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT"; - case Result::eThreadIdleKHR : return "ThreadIdleKHR"; - case Result::eThreadDoneKHR : return "ThreadDoneKHR"; - case Result::eOperationDeferredKHR : return "OperationDeferredKHR"; - case Result::eOperationNotDeferredKHR : return "OperationNotDeferredKHR"; - case Result::ePipelineCompileRequiredEXT : return "PipelineCompileRequiredEXT"; - default: return "invalid"; + case QueueFlagBits::eGraphics: return "Graphics"; + case QueueFlagBits::eCompute: return "Compute"; + case QueueFlagBits::eTransfer: return "Transfer"; + case QueueFlagBits::eSparseBinding: return "SparseBinding"; + case QueueFlagBits::eProtected: return "Protected"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueueFlagBits::eVideoDecodeKHR: return "VideoDecodeKHR"; + case QueueFlagBits::eVideoEncodeKHR: return "VideoEncodeKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class SampleCountFlagBits : VkSampleCountFlags { - e1 = VK_SAMPLE_COUNT_1_BIT, - e2 = VK_SAMPLE_COUNT_2_BIT, - e4 = VK_SAMPLE_COUNT_4_BIT, - e8 = VK_SAMPLE_COUNT_8_BIT, + e1 = VK_SAMPLE_COUNT_1_BIT, + e2 = VK_SAMPLE_COUNT_2_BIT, + e4 = VK_SAMPLE_COUNT_4_BIT, + e8 = VK_SAMPLE_COUNT_8_BIT, e16 = VK_SAMPLE_COUNT_16_BIT, e32 = VK_SAMPLE_COUNT_32_BIT, e64 = VK_SAMPLE_COUNT_64_BIT @@ -7278,365 +8402,172 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SampleCountFlagBits::e1 : return "1"; - case SampleCountFlagBits::e2 : return "2"; - case SampleCountFlagBits::e4 : return "4"; - case SampleCountFlagBits::e8 : return "8"; - case SampleCountFlagBits::e16 : return "16"; - case SampleCountFlagBits::e32 : return "32"; - case SampleCountFlagBits::e64 : return "64"; - default: return "invalid"; + case SampleCountFlagBits::e1: return "1"; + case SampleCountFlagBits::e2: return "2"; + case SampleCountFlagBits::e4: return "4"; + case SampleCountFlagBits::e8: return "8"; + case SampleCountFlagBits::e16: return "16"; + case SampleCountFlagBits::e32: return "32"; + case SampleCountFlagBits::e64: return "64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class SamplerAddressMode + enum class SystemAllocationScope { - eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, - eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, - eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, - eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, - eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, - eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR + eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, + eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, + eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, + eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE }; - VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value ) + VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value ) { switch ( value ) { - case SamplerAddressMode::eRepeat : return "Repeat"; - case SamplerAddressMode::eMirroredRepeat : return "MirroredRepeat"; - case SamplerAddressMode::eClampToEdge : return "ClampToEdge"; - case SamplerAddressMode::eClampToBorder : return "ClampToBorder"; - case SamplerAddressMode::eMirrorClampToEdge : return "MirrorClampToEdge"; - default: return "invalid"; + case SystemAllocationScope::eCommand: return "Command"; + case SystemAllocationScope::eObject: return "Object"; + case SystemAllocationScope::eCache: return "Cache"; + case SystemAllocationScope::eDevice: return "Device"; + case SystemAllocationScope::eInstance: return "Instance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class SamplerCreateFlagBits : VkSamplerCreateFlags + enum class DeviceCreateFlagBits { - eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, - eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT }; - VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value ) - { - switch ( value ) - { - case SamplerCreateFlagBits::eSubsampledEXT : return "SubsampledEXT"; - case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT : return "SubsampledCoarseReconstructionEXT"; - default: return "invalid"; - } - } - - enum class SamplerMipmapMode - { - eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, - eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR - }; - - VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value ) - { - switch ( value ) - { - case SamplerMipmapMode::eNearest : return "Nearest"; - case SamplerMipmapMode::eLinear : return "Linear"; - default: return "invalid"; - } - } - - enum class SamplerReductionMode - { - eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, - eMin = VK_SAMPLER_REDUCTION_MODE_MIN, - eMax = VK_SAMPLER_REDUCTION_MODE_MAX - }; - using SamplerReductionModeEXT = SamplerReductionMode; - - VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value ) - { - switch ( value ) - { - case SamplerReductionMode::eWeightedAverage : return "WeightedAverage"; - case SamplerReductionMode::eMin : return "Min"; - case SamplerReductionMode::eMax : return "Max"; - default: return "invalid"; - } - } - - enum class SamplerYcbcrModelConversion - { - eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, - eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, - eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, - eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, - eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 - }; - using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion; - - VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value ) - { - switch ( value ) - { - case SamplerYcbcrModelConversion::eRgbIdentity : return "RgbIdentity"; - case SamplerYcbcrModelConversion::eYcbcrIdentity : return "YcbcrIdentity"; - case SamplerYcbcrModelConversion::eYcbcr709 : return "Ycbcr709"; - case SamplerYcbcrModelConversion::eYcbcr601 : return "Ycbcr601"; - case SamplerYcbcrModelConversion::eYcbcr2020 : return "Ycbcr2020"; - default: return "invalid"; - } - } - - enum class SamplerYcbcrRange - { - eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, - eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW - }; - using SamplerYcbcrRangeKHR = SamplerYcbcrRange; - - VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value ) - { - switch ( value ) - { - case SamplerYcbcrRange::eItuFull : return "ItuFull"; - case SamplerYcbcrRange::eItuNarrow : return "ItuNarrow"; - default: return "invalid"; - } - } - - enum class ScopeNV - { - eDevice = VK_SCOPE_DEVICE_NV, - eWorkgroup = VK_SCOPE_WORKGROUP_NV, - eSubgroup = VK_SCOPE_SUBGROUP_NV, - eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV - }; - - VULKAN_HPP_INLINE std::string to_string( ScopeNV value ) - { - switch ( value ) - { - case ScopeNV::eDevice : return "Device"; - case ScopeNV::eWorkgroup : return "Workgroup"; - case ScopeNV::eSubgroup : return "Subgroup"; - case ScopeNV::eQueueFamily : return "QueueFamily"; - default: return "invalid"; - } - } - - enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits ) + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits ) { return "(void)"; } - enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags + enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags { - eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT + eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT }; - using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits; - VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value ) + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value ) { switch ( value ) { - case SemaphoreImportFlagBits::eTemporary : return "Temporary"; - default: return "invalid"; + case DeviceQueueCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class SemaphoreType + enum class PipelineStageFlagBits : VkPipelineStageFlags { - eBinary = VK_SEMAPHORE_TYPE_BINARY, - eTimeline = VK_SEMAPHORE_TYPE_TIMELINE + eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, + eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, + eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, + eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, + eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, + eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV }; - using SemaphoreTypeKHR = SemaphoreType; - VULKAN_HPP_INLINE std::string to_string( SemaphoreType value ) + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) { switch ( value ) { - case SemaphoreType::eBinary : return "Binary"; - case SemaphoreType::eTimeline : return "Timeline"; - default: return "invalid"; + case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits::eTransfer: return "Transfer"; + case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits::eHost: return "Host"; + case PipelineStageFlagBits::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR"; + case PipelineStageFlagBits::eRayTracingShaderKHR: return "RayTracingShaderKHR"; + case PipelineStageFlagBits::eTaskShaderNV: return "TaskShaderNV"; + case PipelineStageFlagBits::eMeshShaderNV: return "MeshShaderNV"; + case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case PipelineStageFlagBits::eCommandPreprocessNV: return "CommandPreprocessNV"; + case PipelineStageFlagBits::eNoneKHR: return "NoneKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags + enum class ImageAspectFlagBits : VkImageAspectFlags { - eAny = VK_SEMAPHORE_WAIT_ANY_BIT + eColor = VK_IMAGE_ASPECT_COLOR_BIT, + eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, + eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, + eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, + ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, + eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, + eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, + eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, + eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT, + ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, + ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, + ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR }; - using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits; - VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value ) + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value ) { switch ( value ) { - case SemaphoreWaitFlagBits::eAny : return "Any"; - default: return "invalid"; - } - } - - enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD - {}; - - VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD ) - { - return "(void)"; - } - - enum class ShaderFloatControlsIndependence - { - e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, - eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, - eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE - }; - using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence; - - VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value ) - { - switch ( value ) - { - case ShaderFloatControlsIndependence::e32BitOnly : return "32BitOnly"; - case ShaderFloatControlsIndependence::eAll : return "All"; - case ShaderFloatControlsIndependence::eNone : return "None"; - default: return "invalid"; - } - } - - enum class ShaderInfoTypeAMD - { - eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, - eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, - eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD - }; - - VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value ) - { - switch ( value ) - { - case ShaderInfoTypeAMD::eStatistics : return "Statistics"; - case ShaderInfoTypeAMD::eBinary : return "Binary"; - case ShaderInfoTypeAMD::eDisassembly : return "Disassembly"; - default: return "invalid"; - } - } - - enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits ) - { - return "(void)"; - } - - enum class ShaderStageFlagBits : VkShaderStageFlags - { - eVertex = VK_SHADER_STAGE_VERTEX_BIT, - eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, - eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, - eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, - eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, - eCompute = VK_SHADER_STAGE_COMPUTE_BIT, - eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, - eAll = VK_SHADER_STAGE_ALL, - eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, - eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, - eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, - eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, - eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, - eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, - eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, - eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, - eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, - eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, - eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, - eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, - eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, - eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV - }; - - VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value ) - { - switch ( value ) - { - case ShaderStageFlagBits::eVertex : return "Vertex"; - case ShaderStageFlagBits::eTessellationControl : return "TessellationControl"; - case ShaderStageFlagBits::eTessellationEvaluation : return "TessellationEvaluation"; - case ShaderStageFlagBits::eGeometry : return "Geometry"; - case ShaderStageFlagBits::eFragment : return "Fragment"; - case ShaderStageFlagBits::eCompute : return "Compute"; - case ShaderStageFlagBits::eAllGraphics : return "AllGraphics"; - case ShaderStageFlagBits::eAll : return "All"; - case ShaderStageFlagBits::eRaygenKHR : return "RaygenKHR"; - case ShaderStageFlagBits::eAnyHitKHR : return "AnyHitKHR"; - case ShaderStageFlagBits::eClosestHitKHR : return "ClosestHitKHR"; - case ShaderStageFlagBits::eMissKHR : return "MissKHR"; - case ShaderStageFlagBits::eIntersectionKHR : return "IntersectionKHR"; - case ShaderStageFlagBits::eCallableKHR : return "CallableKHR"; - case ShaderStageFlagBits::eTaskNV : return "TaskNV"; - case ShaderStageFlagBits::eMeshNV : return "MeshNV"; - default: return "invalid"; - } - } - - enum class ShadingRatePaletteEntryNV - { - eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, - e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, - e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, - e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, - e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, - e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, - e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, - e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, - e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, - e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, - e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, - e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV - }; - - VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value ) - { - switch ( value ) - { - case ShadingRatePaletteEntryNV::eNoInvocations : return "NoInvocations"; - case ShadingRatePaletteEntryNV::e16InvocationsPerPixel : return "16InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e8InvocationsPerPixel : return "8InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e4InvocationsPerPixel : return "4InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e2InvocationsPerPixel : return "2InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e1InvocationPerPixel : return "1InvocationPerPixel"; - case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels : return "1InvocationPer2X1Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels : return "1InvocationPer1X2Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels : return "1InvocationPer2X2Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels : return "1InvocationPer4X2Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels : return "1InvocationPer2X4Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels : return "1InvocationPer4X4Pixels"; - default: return "invalid"; - } - } - - enum class SharingMode - { - eExclusive = VK_SHARING_MODE_EXCLUSIVE, - eConcurrent = VK_SHARING_MODE_CONCURRENT - }; - - VULKAN_HPP_INLINE std::string to_string( SharingMode value ) - { - switch ( value ) - { - case SharingMode::eExclusive : return "Exclusive"; - case SharingMode::eConcurrent : return "Concurrent"; - default: return "invalid"; + case ImageAspectFlagBits::eColor: return "Color"; + case ImageAspectFlagBits::eDepth: return "Depth"; + case ImageAspectFlagBits::eStencil: return "Stencil"; + case ImageAspectFlagBits::eMetadata: return "Metadata"; + case ImageAspectFlagBits::ePlane0: return "Plane0"; + case ImageAspectFlagBits::ePlane1: return "Plane1"; + case ImageAspectFlagBits::ePlane2: return "Plane2"; + case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT"; + case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT"; + case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT"; + case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags { - eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, - eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, + eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, + eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT }; @@ -7644,10 +8575,10 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SparseImageFormatFlagBits::eSingleMiptail : return "SingleMiptail"; - case SparseImageFormatFlagBits::eAlignedMipSize : return "AlignedMipSize"; - case SparseImageFormatFlagBits::eNonstandardBlockSize : return "NonstandardBlockSize"; - default: return "invalid"; + case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail"; + case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize"; + case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7660,16 +8591,1538 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SparseMemoryBindFlagBits::eMetadata : return "Metadata"; - default: return "invalid"; + case SparseMemoryBindFlagBits::eMetadata: return "Metadata"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FenceCreateFlagBits : VkFenceCreateFlags + { + eSignaled = VK_FENCE_CREATE_SIGNALED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value ) + { + switch ( value ) + { + case FenceCreateFlagBits::eSignaled: return "Signaled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class EventCreateFlagBits : VkEventCreateFlags + { + eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits value ) + { + switch ( value ) + { + case EventCreateFlagBits::eDeviceOnlyKHR: return "DeviceOnlyKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags + { + eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, + eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, + eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, + eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, + eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, + eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, + eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, + eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, + eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, + eTessellationEvaluationShaderInvocations = + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, + eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value ) + { + switch ( value ) + { + case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices"; + case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives"; + case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives"; + case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations"; + case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives"; + case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches"; + case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: + return "TessellationEvaluationShaderInvocations"; + case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryPoolCreateFlagBits + { + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits ) + { + return "(void)"; + } + + enum class QueryResultFlagBits : VkQueryResultFlags + { + e64 = VK_QUERY_RESULT_64_BIT, + eWait = VK_QUERY_RESULT_WAIT_BIT, + eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, + ePartial = VK_QUERY_RESULT_PARTIAL_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eWithStatusKHR = VK_QUERY_RESULT_WITH_STATUS_BIT_KHR +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value ) + { + switch ( value ) + { + case QueryResultFlagBits::e64: return "64"; + case QueryResultFlagBits::eWait: return "Wait"; + case QueryResultFlagBits::eWithAvailability: return "WithAvailability"; + case QueryResultFlagBits::ePartial: return "Partial"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueryResultFlagBits::eWithStatusKHR: return "WithStatusKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryType + { + eOcclusion = VK_QUERY_TYPE_OCCLUSION, + ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, + eTimestamp = VK_QUERY_TYPE_TIMESTAMP, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eResultStatusOnlyKHR = VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, + ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, + eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, + eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, + ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeBitstreamBufferRangeKHR = VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( QueryType value ) + { + switch ( value ) + { + case QueryType::eOcclusion: return "Occlusion"; + case QueryType::ePipelineStatistics: return "PipelineStatistics"; + case QueryType::eTimestamp: return "Timestamp"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueryType::eResultStatusOnlyKHR: return "ResultStatusOnlyKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT"; + case QueryType::ePerformanceQueryKHR: return "PerformanceQueryKHR"; + case QueryType::eAccelerationStructureCompactedSizeKHR: return "AccelerationStructureCompactedSizeKHR"; + case QueryType::eAccelerationStructureSerializationSizeKHR: return "AccelerationStructureSerializationSizeKHR"; + case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV"; + case QueryType::ePerformanceQueryINTEL: return "PerformanceQueryINTEL"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueryType::eVideoEncodeBitstreamBufferRangeKHR: return "VideoEncodeBitstreamBufferRangeKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BufferCreateFlagBits : VkBufferCreateFlags + { + eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, + eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT, + eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value ) + { + switch ( value ) + { + case BufferCreateFlagBits::eSparseBinding: return "SparseBinding"; + case BufferCreateFlagBits::eSparseResidency: return "SparseResidency"; + case BufferCreateFlagBits::eSparseAliased: return "SparseAliased"; + case BufferCreateFlagBits::eProtected: return "Protected"; + case BufferCreateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BufferUsageFlagBits : VkBufferUsageFlags + { + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, + eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, + eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, + eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, + eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, + eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value ) + { + switch ( value ) + { + case BufferUsageFlagBits::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer"; + case BufferUsageFlagBits::eShaderDeviceAddress: return "ShaderDeviceAddress"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case BufferUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: + return "AccelerationStructureBuildInputReadOnlyKHR"; + case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR"; + case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case BufferUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SharingMode + { + eExclusive = VK_SHARING_MODE_EXCLUSIVE, + eConcurrent = VK_SHARING_MODE_CONCURRENT + }; + + VULKAN_HPP_INLINE std::string to_string( SharingMode value ) + { + switch ( value ) + { + case SharingMode::eExclusive: return "Exclusive"; + case SharingMode::eConcurrent: return "Concurrent"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageLayout + { + eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, + eGeneral = VK_IMAGE_LAYOUT_GENERAL, + eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, + eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, + eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, + eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, + eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, + eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, + eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, + eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageLayout value ) + { + switch ( value ) + { + case ImageLayout::eUndefined: return "Undefined"; + case ImageLayout::eGeneral: return "General"; + case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal"; + case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal"; + case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal"; + case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal"; + case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal"; + case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal"; + case ImageLayout::ePreinitialized: return "Preinitialized"; + case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal"; + case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal"; + case ImageLayout::eDepthAttachmentOptimal: return "DepthAttachmentOptimal"; + case ImageLayout::eDepthReadOnlyOptimal: return "DepthReadOnlyOptimal"; + case ImageLayout::eStencilAttachmentOptimal: return "StencilAttachmentOptimal"; + case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal"; + case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageLayout::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case ImageLayout::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; + case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT"; + case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ImageLayout::eReadOnlyOptimalKHR: return "ReadOnlyOptimalKHR"; + case ImageLayout::eAttachmentOptimalKHR: return "AttachmentOptimalKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ComponentSwizzle + { + eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, + eZero = VK_COMPONENT_SWIZZLE_ZERO, + eOne = VK_COMPONENT_SWIZZLE_ONE, + eR = VK_COMPONENT_SWIZZLE_R, + eG = VK_COMPONENT_SWIZZLE_G, + eB = VK_COMPONENT_SWIZZLE_B, + eA = VK_COMPONENT_SWIZZLE_A + }; + + VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value ) + { + switch ( value ) + { + case ComponentSwizzle::eIdentity: return "Identity"; + case ComponentSwizzle::eZero: return "Zero"; + case ComponentSwizzle::eOne: return "One"; + case ComponentSwizzle::eR: return "R"; + case ComponentSwizzle::eG: return "G"; + case ComponentSwizzle::eB: return "B"; + case ComponentSwizzle::eA: return "A"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageViewCreateFlagBits : VkImageViewCreateFlags + { + eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT, + eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value ) + { + switch ( value ) + { + case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT: return "FragmentDensityMapDynamicEXT"; + case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT: return "FragmentDensityMapDeferredEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageViewType + { + e1D = VK_IMAGE_VIEW_TYPE_1D, + e2D = VK_IMAGE_VIEW_TYPE_2D, + e3D = VK_IMAGE_VIEW_TYPE_3D, + eCube = VK_IMAGE_VIEW_TYPE_CUBE, + e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, + e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, + eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY + }; + + VULKAN_HPP_INLINE std::string to_string( ImageViewType value ) + { + switch ( value ) + { + case ImageViewType::e1D: return "1D"; + case ImageViewType::e2D: return "2D"; + case ImageViewType::e3D: return "3D"; + case ImageViewType::eCube: return "Cube"; + case ImageViewType::e1DArray: return "1DArray"; + case ImageViewType::e2DArray: return "2DArray"; + case ImageViewType::eCubeArray: return "CubeArray"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags + { + eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT: return "ExternallySynchronizedEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BlendFactor + { + eZero = VK_BLEND_FACTOR_ZERO, + eOne = VK_BLEND_FACTOR_ONE, + eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, + eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, + eDstColor = VK_BLEND_FACTOR_DST_COLOR, + eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, + eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, + eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, + eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, + eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, + eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, + eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, + eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, + eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, + eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, + eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, + eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, + eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, + eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA + }; + + VULKAN_HPP_INLINE std::string to_string( BlendFactor value ) + { + switch ( value ) + { + case BlendFactor::eZero: return "Zero"; + case BlendFactor::eOne: return "One"; + case BlendFactor::eSrcColor: return "SrcColor"; + case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor"; + case BlendFactor::eDstColor: return "DstColor"; + case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor"; + case BlendFactor::eSrcAlpha: return "SrcAlpha"; + case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha"; + case BlendFactor::eDstAlpha: return "DstAlpha"; + case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha"; + case BlendFactor::eConstantColor: return "ConstantColor"; + case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor"; + case BlendFactor::eConstantAlpha: return "ConstantAlpha"; + case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha"; + case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate"; + case BlendFactor::eSrc1Color: return "Src1Color"; + case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color"; + case BlendFactor::eSrc1Alpha: return "Src1Alpha"; + case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BlendOp + { + eAdd = VK_BLEND_OP_ADD, + eSubtract = VK_BLEND_OP_SUBTRACT, + eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, + eMin = VK_BLEND_OP_MIN, + eMax = VK_BLEND_OP_MAX, + eZeroEXT = VK_BLEND_OP_ZERO_EXT, + eSrcEXT = VK_BLEND_OP_SRC_EXT, + eDstEXT = VK_BLEND_OP_DST_EXT, + eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, + eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, + eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, + eDstInEXT = VK_BLEND_OP_DST_IN_EXT, + eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, + eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, + eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, + eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, + eXorEXT = VK_BLEND_OP_XOR_EXT, + eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, + eScreenEXT = VK_BLEND_OP_SCREEN_EXT, + eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, + eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, + eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, + eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, + eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, + eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, + eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, + eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, + eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, + eInvertEXT = VK_BLEND_OP_INVERT_EXT, + eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, + eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, + eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, + eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, + eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, + ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, + eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, + eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, + eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, + eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, + eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, + ePlusEXT = VK_BLEND_OP_PLUS_EXT, + ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, + ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, + ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, + eMinusEXT = VK_BLEND_OP_MINUS_EXT, + eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, + eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, + eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, + eRedEXT = VK_BLEND_OP_RED_EXT, + eGreenEXT = VK_BLEND_OP_GREEN_EXT, + eBlueEXT = VK_BLEND_OP_BLUE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BlendOp value ) + { + switch ( value ) + { + case BlendOp::eAdd: return "Add"; + case BlendOp::eSubtract: return "Subtract"; + case BlendOp::eReverseSubtract: return "ReverseSubtract"; + case BlendOp::eMin: return "Min"; + case BlendOp::eMax: return "Max"; + case BlendOp::eZeroEXT: return "ZeroEXT"; + case BlendOp::eSrcEXT: return "SrcEXT"; + case BlendOp::eDstEXT: return "DstEXT"; + case BlendOp::eSrcOverEXT: return "SrcOverEXT"; + case BlendOp::eDstOverEXT: return "DstOverEXT"; + case BlendOp::eSrcInEXT: return "SrcInEXT"; + case BlendOp::eDstInEXT: return "DstInEXT"; + case BlendOp::eSrcOutEXT: return "SrcOutEXT"; + case BlendOp::eDstOutEXT: return "DstOutEXT"; + case BlendOp::eSrcAtopEXT: return "SrcAtopEXT"; + case BlendOp::eDstAtopEXT: return "DstAtopEXT"; + case BlendOp::eXorEXT: return "XorEXT"; + case BlendOp::eMultiplyEXT: return "MultiplyEXT"; + case BlendOp::eScreenEXT: return "ScreenEXT"; + case BlendOp::eOverlayEXT: return "OverlayEXT"; + case BlendOp::eDarkenEXT: return "DarkenEXT"; + case BlendOp::eLightenEXT: return "LightenEXT"; + case BlendOp::eColordodgeEXT: return "ColordodgeEXT"; + case BlendOp::eColorburnEXT: return "ColorburnEXT"; + case BlendOp::eHardlightEXT: return "HardlightEXT"; + case BlendOp::eSoftlightEXT: return "SoftlightEXT"; + case BlendOp::eDifferenceEXT: return "DifferenceEXT"; + case BlendOp::eExclusionEXT: return "ExclusionEXT"; + case BlendOp::eInvertEXT: return "InvertEXT"; + case BlendOp::eInvertRgbEXT: return "InvertRgbEXT"; + case BlendOp::eLineardodgeEXT: return "LineardodgeEXT"; + case BlendOp::eLinearburnEXT: return "LinearburnEXT"; + case BlendOp::eVividlightEXT: return "VividlightEXT"; + case BlendOp::eLinearlightEXT: return "LinearlightEXT"; + case BlendOp::ePinlightEXT: return "PinlightEXT"; + case BlendOp::eHardmixEXT: return "HardmixEXT"; + case BlendOp::eHslHueEXT: return "HslHueEXT"; + case BlendOp::eHslSaturationEXT: return "HslSaturationEXT"; + case BlendOp::eHslColorEXT: return "HslColorEXT"; + case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT"; + case BlendOp::ePlusEXT: return "PlusEXT"; + case BlendOp::ePlusClampedEXT: return "PlusClampedEXT"; + case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT"; + case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT"; + case BlendOp::eMinusEXT: return "MinusEXT"; + case BlendOp::eMinusClampedEXT: return "MinusClampedEXT"; + case BlendOp::eContrastEXT: return "ContrastEXT"; + case BlendOp::eInvertOvgEXT: return "InvertOvgEXT"; + case BlendOp::eRedEXT: return "RedEXT"; + case BlendOp::eGreenEXT: return "GreenEXT"; + case BlendOp::eBlueEXT: return "BlueEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ColorComponentFlagBits : VkColorComponentFlags + { + eR = VK_COLOR_COMPONENT_R_BIT, + eG = VK_COLOR_COMPONENT_G_BIT, + eB = VK_COLOR_COMPONENT_B_BIT, + eA = VK_COLOR_COMPONENT_A_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value ) + { + switch ( value ) + { + case ColorComponentFlagBits::eR: return "R"; + case ColorComponentFlagBits::eG: return "G"; + case ColorComponentFlagBits::eB: return "B"; + case ColorComponentFlagBits::eA: return "A"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CompareOp + { + eNever = VK_COMPARE_OP_NEVER, + eLess = VK_COMPARE_OP_LESS, + eEqual = VK_COMPARE_OP_EQUAL, + eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, + eGreater = VK_COMPARE_OP_GREATER, + eNotEqual = VK_COMPARE_OP_NOT_EQUAL, + eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, + eAlways = VK_COMPARE_OP_ALWAYS + }; + + VULKAN_HPP_INLINE std::string to_string( CompareOp value ) + { + switch ( value ) + { + case CompareOp::eNever: return "Never"; + case CompareOp::eLess: return "Less"; + case CompareOp::eEqual: return "Equal"; + case CompareOp::eLessOrEqual: return "LessOrEqual"; + case CompareOp::eGreater: return "Greater"; + case CompareOp::eNotEqual: return "NotEqual"; + case CompareOp::eGreaterOrEqual: return "GreaterOrEqual"; + case CompareOp::eAlways: return "Always"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CullModeFlagBits : VkCullModeFlags + { + eNone = VK_CULL_MODE_NONE, + eFront = VK_CULL_MODE_FRONT_BIT, + eBack = VK_CULL_MODE_BACK_BIT, + eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK + }; + + VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value ) + { + switch ( value ) + { + case CullModeFlagBits::eNone: return "None"; + case CullModeFlagBits::eFront: return "Front"; + case CullModeFlagBits::eBack: return "Back"; + case CullModeFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DynamicState + { + eViewport = VK_DYNAMIC_STATE_VIEWPORT, + eScissor = VK_DYNAMIC_STATE_SCISSOR, + eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, + eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, + eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, + eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, + eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, + eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR, + eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, + eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, + eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, + eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR, + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, + eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT, + eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT, + ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, + eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, + eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, + eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, + eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, + eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, + eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, + eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, + eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, + eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT, + eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT, + ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT, + eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT, + eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT, + eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT, + ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT, + eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DynamicState value ) + { + switch ( value ) + { + case DynamicState::eViewport: return "Viewport"; + case DynamicState::eScissor: return "Scissor"; + case DynamicState::eLineWidth: return "LineWidth"; + case DynamicState::eDepthBias: return "DepthBias"; + case DynamicState::eBlendConstants: return "BlendConstants"; + case DynamicState::eDepthBounds: return "DepthBounds"; + case DynamicState::eStencilCompareMask: return "StencilCompareMask"; + case DynamicState::eStencilWriteMask: return "StencilWriteMask"; + case DynamicState::eStencilReference: return "StencilReference"; + case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV"; + case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT"; + case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT"; + case DynamicState::eRayTracingPipelineStackSizeKHR: return "RayTracingPipelineStackSizeKHR"; + case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV"; + case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV"; + case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV"; + case DynamicState::eFragmentShadingRateKHR: return "FragmentShadingRateKHR"; + case DynamicState::eLineStippleEXT: return "LineStippleEXT"; + case DynamicState::eCullModeEXT: return "CullModeEXT"; + case DynamicState::eFrontFaceEXT: return "FrontFaceEXT"; + case DynamicState::ePrimitiveTopologyEXT: return "PrimitiveTopologyEXT"; + case DynamicState::eViewportWithCountEXT: return "ViewportWithCountEXT"; + case DynamicState::eScissorWithCountEXT: return "ScissorWithCountEXT"; + case DynamicState::eVertexInputBindingStrideEXT: return "VertexInputBindingStrideEXT"; + case DynamicState::eDepthTestEnableEXT: return "DepthTestEnableEXT"; + case DynamicState::eDepthWriteEnableEXT: return "DepthWriteEnableEXT"; + case DynamicState::eDepthCompareOpEXT: return "DepthCompareOpEXT"; + case DynamicState::eDepthBoundsTestEnableEXT: return "DepthBoundsTestEnableEXT"; + case DynamicState::eStencilTestEnableEXT: return "StencilTestEnableEXT"; + case DynamicState::eStencilOpEXT: return "StencilOpEXT"; + case DynamicState::eVertexInputEXT: return "VertexInputEXT"; + case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT"; + case DynamicState::eRasterizerDiscardEnableEXT: return "RasterizerDiscardEnableEXT"; + case DynamicState::eDepthBiasEnableEXT: return "DepthBiasEnableEXT"; + case DynamicState::eLogicOpEXT: return "LogicOpEXT"; + case DynamicState::ePrimitiveRestartEnableEXT: return "PrimitiveRestartEnableEXT"; + case DynamicState::eColorWriteEnableEXT: return "ColorWriteEnableEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FrontFace + { + eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, + eClockwise = VK_FRONT_FACE_CLOCKWISE + }; + + VULKAN_HPP_INLINE std::string to_string( FrontFace value ) + { + switch ( value ) + { + case FrontFace::eCounterClockwise: return "CounterClockwise"; + case FrontFace::eClockwise: return "Clockwise"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class LogicOp + { + eClear = VK_LOGIC_OP_CLEAR, + eAnd = VK_LOGIC_OP_AND, + eAndReverse = VK_LOGIC_OP_AND_REVERSE, + eCopy = VK_LOGIC_OP_COPY, + eAndInverted = VK_LOGIC_OP_AND_INVERTED, + eNoOp = VK_LOGIC_OP_NO_OP, + eXor = VK_LOGIC_OP_XOR, + eOr = VK_LOGIC_OP_OR, + eNor = VK_LOGIC_OP_NOR, + eEquivalent = VK_LOGIC_OP_EQUIVALENT, + eInvert = VK_LOGIC_OP_INVERT, + eOrReverse = VK_LOGIC_OP_OR_REVERSE, + eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, + eOrInverted = VK_LOGIC_OP_OR_INVERTED, + eNand = VK_LOGIC_OP_NAND, + eSet = VK_LOGIC_OP_SET + }; + + VULKAN_HPP_INLINE std::string to_string( LogicOp value ) + { + switch ( value ) + { + case LogicOp::eClear: return "Clear"; + case LogicOp::eAnd: return "And"; + case LogicOp::eAndReverse: return "AndReverse"; + case LogicOp::eCopy: return "Copy"; + case LogicOp::eAndInverted: return "AndInverted"; + case LogicOp::eNoOp: return "NoOp"; + case LogicOp::eXor: return "Xor"; + case LogicOp::eOr: return "Or"; + case LogicOp::eNor: return "Nor"; + case LogicOp::eEquivalent: return "Equivalent"; + case LogicOp::eInvert: return "Invert"; + case LogicOp::eOrReverse: return "OrReverse"; + case LogicOp::eCopyInverted: return "CopyInverted"; + case LogicOp::eOrInverted: return "OrInverted"; + case LogicOp::eNand: return "Nand"; + case LogicOp::eSet: return "Set"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineCreateFlagBits : VkPipelineCreateFlags + { + eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eRayTracingShaderGroupHandleCaptureReplayKHR = + VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, + eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, + eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, + eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, + eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, + eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits::eDerivative: return "Derivative"; + case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR: + return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; + case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR: + return "RayTracingShaderGroupHandleCaptureReplayKHR"; + case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; + case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits::eLibraryKHR: return "LibraryKHR"; + case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT: return "FailOnPipelineCompileRequiredEXT"; + case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT: return "EarlyReturnOnFailureEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags + { + eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value ) + { + switch ( value ) + { + case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT: return "AllowVaryingSubgroupSizeEXT"; + case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT: return "RequireFullSubgroupsEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PolygonMode + { + eFill = VK_POLYGON_MODE_FILL, + eLine = VK_POLYGON_MODE_LINE, + ePoint = VK_POLYGON_MODE_POINT, + eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( PolygonMode value ) + { + switch ( value ) + { + case PolygonMode::eFill: return "Fill"; + case PolygonMode::eLine: return "Line"; + case PolygonMode::ePoint: return "Point"; + case PolygonMode::eFillRectangleNV: return "FillRectangleNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PrimitiveTopology + { + ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, + eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, + eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, + eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, + eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, + eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, + eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, + eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, + ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST + }; + + VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value ) + { + switch ( value ) + { + case PrimitiveTopology::ePointList: return "PointList"; + case PrimitiveTopology::eLineList: return "LineList"; + case PrimitiveTopology::eLineStrip: return "LineStrip"; + case PrimitiveTopology::eTriangleList: return "TriangleList"; + case PrimitiveTopology::eTriangleStrip: return "TriangleStrip"; + case PrimitiveTopology::eTriangleFan: return "TriangleFan"; + case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency"; + case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency"; + case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency"; + case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency"; + case PrimitiveTopology::ePatchList: return "PatchList"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderStageFlagBits : VkShaderStageFlags + { + eVertex = VK_SHADER_STAGE_VERTEX_BIT, + eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, + eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, + eCompute = VK_SHADER_STAGE_COMPUTE_BIT, + eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, + eAll = VK_SHADER_STAGE_ALL, + eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, + eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, + eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, + eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, + eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, + eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, + eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, + eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value ) + { + switch ( value ) + { + case ShaderStageFlagBits::eVertex: return "Vertex"; + case ShaderStageFlagBits::eTessellationControl: return "TessellationControl"; + case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation"; + case ShaderStageFlagBits::eGeometry: return "Geometry"; + case ShaderStageFlagBits::eFragment: return "Fragment"; + case ShaderStageFlagBits::eCompute: return "Compute"; + case ShaderStageFlagBits::eAllGraphics: return "AllGraphics"; + case ShaderStageFlagBits::eAll: return "All"; + case ShaderStageFlagBits::eRaygenKHR: return "RaygenKHR"; + case ShaderStageFlagBits::eAnyHitKHR: return "AnyHitKHR"; + case ShaderStageFlagBits::eClosestHitKHR: return "ClosestHitKHR"; + case ShaderStageFlagBits::eMissKHR: return "MissKHR"; + case ShaderStageFlagBits::eIntersectionKHR: return "IntersectionKHR"; + case ShaderStageFlagBits::eCallableKHR: return "CallableKHR"; + case ShaderStageFlagBits::eTaskNV: return "TaskNV"; + case ShaderStageFlagBits::eMeshNV: return "MeshNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class StencilOp + { + eKeep = VK_STENCIL_OP_KEEP, + eZero = VK_STENCIL_OP_ZERO, + eReplace = VK_STENCIL_OP_REPLACE, + eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, + eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, + eInvert = VK_STENCIL_OP_INVERT, + eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, + eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP + }; + + VULKAN_HPP_INLINE std::string to_string( StencilOp value ) + { + switch ( value ) + { + case StencilOp::eKeep: return "Keep"; + case StencilOp::eZero: return "Zero"; + case StencilOp::eReplace: return "Replace"; + case StencilOp::eIncrementAndClamp: return "IncrementAndClamp"; + case StencilOp::eDecrementAndClamp: return "DecrementAndClamp"; + case StencilOp::eInvert: return "Invert"; + case StencilOp::eIncrementAndWrap: return "IncrementAndWrap"; + case StencilOp::eDecrementAndWrap: return "DecrementAndWrap"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VertexInputRate + { + eVertex = VK_VERTEX_INPUT_RATE_VERTEX, + eInstance = VK_VERTEX_INPUT_RATE_INSTANCE + }; + + VULKAN_HPP_INLINE std::string to_string( VertexInputRate value ) + { + switch ( value ) + { + case VertexInputRate::eVertex: return "Vertex"; + case VertexInputRate::eInstance: return "Instance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BorderColor + { + eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, + eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, + eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, + eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, + eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT, + eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BorderColor value ) + { + switch ( value ) + { + case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack"; + case BorderColor::eIntTransparentBlack: return "IntTransparentBlack"; + case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack"; + case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack"; + case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite"; + case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite"; + case BorderColor::eFloatCustomEXT: return "FloatCustomEXT"; + case BorderColor::eIntCustomEXT: return "IntCustomEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class Filter + { + eNearest = VK_FILTER_NEAREST, + eLinear = VK_FILTER_LINEAR, + eCubicIMG = VK_FILTER_CUBIC_IMG, + eCubicEXT = VK_FILTER_CUBIC_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( Filter value ) + { + switch ( value ) + { + case Filter::eNearest: return "Nearest"; + case Filter::eLinear: return "Linear"; + case Filter::eCubicIMG: return "CubicIMG"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerAddressMode + { + eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, + eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, + eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, + eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value ) + { + switch ( value ) + { + case SamplerAddressMode::eRepeat: return "Repeat"; + case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat"; + case SamplerAddressMode::eClampToEdge: return "ClampToEdge"; + case SamplerAddressMode::eClampToBorder: return "ClampToBorder"; + case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerCreateFlagBits : VkSamplerCreateFlags + { + eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, + eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value ) + { + switch ( value ) + { + case SamplerCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT: return "SubsampledCoarseReconstructionEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerMipmapMode + { + eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, + eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value ) + { + switch ( value ) + { + case SamplerMipmapMode::eNearest: return "Nearest"; + case SamplerMipmapMode::eLinear: return "Linear"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags + { + eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE, + eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet"; + case DescriptorPoolCreateFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorPoolCreateFlagBits::eHostOnlyVALVE: return "HostOnlyVALVE"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags + { + eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, + eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, + eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool"; + case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; + case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE: return "HostOnlyPoolVALVE"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorType + { + eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, + eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, + eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorType value ) + { + switch ( value ) + { + case DescriptorType::eSampler: return "Sampler"; + case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler"; + case DescriptorType::eSampledImage: return "SampledImage"; + case DescriptorType::eStorageImage: return "StorageImage"; + case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer"; + case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer"; + case DescriptorType::eUniformBuffer: return "UniformBuffer"; + case DescriptorType::eStorageBuffer: return "StorageBuffer"; + case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic"; + case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic"; + case DescriptorType::eInputAttachment: return "InputAttachment"; + case DescriptorType::eInlineUniformBlockEXT: return "InlineUniformBlockEXT"; + case DescriptorType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV"; + case DescriptorType::eMutableVALVE: return "MutableVALVE"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccessFlagBits : VkAccessFlags + { + eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_HOST_READ_BIT, + eHostWrite = VK_ACCESS_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, + eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, + eNoneKHR = VK_ACCESS_NONE_KHR, + eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value ) + { + switch ( value ) + { + case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits::eIndexRead: return "IndexRead"; + case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits::eUniformRead: return "UniformRead"; + case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits::eShaderRead: return "ShaderRead"; + case AccessFlagBits::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits::eTransferRead: return "TransferRead"; + case AccessFlagBits::eTransferWrite: return "TransferWrite"; + case AccessFlagBits::eHostRead: return "HostRead"; + case AccessFlagBits::eHostWrite: return "HostWrite"; + case AccessFlagBits::eMemoryRead: return "MemoryRead"; + case AccessFlagBits::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR"; + case AccessFlagBits::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; + case AccessFlagBits::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR"; + case AccessFlagBits::eCommandPreprocessReadNV: return "CommandPreprocessReadNV"; + case AccessFlagBits::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV"; + case AccessFlagBits::eNoneKHR: return "NoneKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags + { + eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value ) + { + switch ( value ) + { + case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AttachmentLoadOp + { + eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, + eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, + eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value ) + { + switch ( value ) + { + case AttachmentLoadOp::eLoad: return "Load"; + case AttachmentLoadOp::eClear: return "Clear"; + case AttachmentLoadOp::eDontCare: return "DontCare"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AttachmentStoreOp + { + eStore = VK_ATTACHMENT_STORE_OP_STORE, + eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE, + eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value ) + { + switch ( value ) + { + case AttachmentStoreOp::eStore: return "Store"; + case AttachmentStoreOp::eDontCare: return "DontCare"; + case AttachmentStoreOp::eNoneQCOM: return "NoneQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DependencyFlagBits : VkDependencyFlags + { + eByRegion = VK_DEPENDENCY_BY_REGION_BIT, + eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, + eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value ) + { + switch ( value ) + { + case DependencyFlagBits::eByRegion: return "ByRegion"; + case DependencyFlagBits::eDeviceGroup: return "DeviceGroup"; + case DependencyFlagBits::eViewLocal: return "ViewLocal"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags + { + eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value ) + { + switch ( value ) + { + case FramebufferCreateFlagBits::eImageless: return "Imageless"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineBindPoint + { + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, + eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, + eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value ) + { + switch ( value ) + { + case PipelineBindPoint::eGraphics: return "Graphics"; + case PipelineBindPoint::eCompute: return "Compute"; + case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags + { + eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM + }; + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value ) + { + switch ( value ) + { + case RenderPassCreateFlagBits::eTransformQCOM: return "TransformQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags + { + ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, + ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, + eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, + eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM + }; + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value ) + { + switch ( value ) + { + case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX"; + case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX"; + case SubpassDescriptionFlagBits::eFragmentRegionQCOM: return "FragmentRegionQCOM"; + case SubpassDescriptionFlagBits::eShaderResolveQCOM: return "ShaderResolveQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags + { + eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, + eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, + eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value ) + { + switch ( value ) + { + case CommandPoolCreateFlagBits::eTransient: return "Transient"; + case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer"; + case CommandPoolCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags + { + eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value ) + { + switch ( value ) + { + case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandBufferLevel + { + ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value ) + { + switch ( value ) + { + case CommandBufferLevel::ePrimary: return "Primary"; + case CommandBufferLevel::eSecondary: return "Secondary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags + { + eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value ) + { + switch ( value ) + { + case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags + { + eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, + eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, + eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value ) + { + switch ( value ) + { + case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit"; + case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue"; + case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryControlFlagBits : VkQueryControlFlags + { + ePrecise = VK_QUERY_CONTROL_PRECISE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value ) + { + switch ( value ) + { + case QueryControlFlagBits::ePrecise: return "Precise"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class IndexType + { + eUint16 = VK_INDEX_TYPE_UINT16, + eUint32 = VK_INDEX_TYPE_UINT32, + eNoneKHR = VK_INDEX_TYPE_NONE_KHR, + eUint8EXT = VK_INDEX_TYPE_UINT8_EXT, + eNoneNV = VK_INDEX_TYPE_NONE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndexType value ) + { + switch ( value ) + { + case IndexType::eUint16: return "Uint16"; + case IndexType::eUint32: return "Uint32"; + case IndexType::eNoneKHR: return "NoneKHR"; + case IndexType::eUint8EXT: return "Uint8EXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class StencilFaceFlagBits : VkStencilFaceFlags { - eFront = VK_STENCIL_FACE_FRONT_BIT, - eBack = VK_STENCIL_FACE_BACK_BIT, - eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, + eFront = VK_STENCIL_FACE_FRONT_BIT, + eBack = VK_STENCIL_FACE_BACK_BIT, + eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK }; @@ -7677,1075 +10130,16 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case StencilFaceFlagBits::eFront : return "Front"; - case StencilFaceFlagBits::eBack : return "Back"; - case StencilFaceFlagBits::eFrontAndBack : return "FrontAndBack"; - default: return "invalid"; - } - } - - enum class StencilOp - { - eKeep = VK_STENCIL_OP_KEEP, - eZero = VK_STENCIL_OP_ZERO, - eReplace = VK_STENCIL_OP_REPLACE, - eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, - eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, - eInvert = VK_STENCIL_OP_INVERT, - eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, - eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP - }; - - VULKAN_HPP_INLINE std::string to_string( StencilOp value ) - { - switch ( value ) - { - case StencilOp::eKeep : return "Keep"; - case StencilOp::eZero : return "Zero"; - case StencilOp::eReplace : return "Replace"; - case StencilOp::eIncrementAndClamp : return "IncrementAndClamp"; - case StencilOp::eDecrementAndClamp : return "DecrementAndClamp"; - case StencilOp::eInvert : return "Invert"; - case StencilOp::eIncrementAndWrap : return "IncrementAndWrap"; - case StencilOp::eDecrementAndWrap : return "DecrementAndWrap"; - default: return "invalid"; - } - } - - enum class StructureType - { - eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, - eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, - eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, - eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, - eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, - eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, - eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, - eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, - eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, - eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, - eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, - eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, - eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, - eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, - eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, - eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, - eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, - ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, - ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, - ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, - ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, - ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, - ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, - ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, - ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, - ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, - ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, - ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, - eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, - eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, - ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, - eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, - eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, - eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, - eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, - eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, - eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, - eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, - eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, - eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, - eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, - eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, - eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, - eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, - eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, - eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, - eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, - eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, - eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, - ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, - eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, - eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, - ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, - eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, - eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, - eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, - eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, - eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, - eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, - eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, - eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, - eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, - ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, - eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, - eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, - eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, - eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, - eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, - eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, - ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, - ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, - eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, - eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, - ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, - eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, - ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, - eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, - ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, - ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, - eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, - eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, - ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, - eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, - ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, - ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, - ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, - eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, - ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, - ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, - eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, - eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, - eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, - eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, - eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, - ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, - eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, - eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, - ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, - eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, - ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, - eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, - ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, - eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, - eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, - eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, - ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, - eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, - eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, - eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, - ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, - eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, - ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, - eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, - ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, - ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, - ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES, - ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES, - ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES, - eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, - eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, - eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, - eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, - eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, - eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, - eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, - eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, - ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, - ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, - ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, - ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, - ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, - eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, - ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, - ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, - eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, - eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, - ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, - eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, - ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, - eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, - ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, - eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, - ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, - ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, - eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, - eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, - eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, - ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, - ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, - ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, - eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, - eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, - ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, - ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, - ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, - eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, - eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, - eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, - eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, - ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, - eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, - eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, - eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, - eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, - eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, - ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, - eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, - eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, - eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, - eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, - eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, - eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, - eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, - eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, - eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, - eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, - eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, - eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, - eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, - eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, - eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, - ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, - eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, - eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, - eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, - eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, - eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, - eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, - ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, - ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, - ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, - eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, - eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, - eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, - eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, - ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, - eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, - eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, - eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, - eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, - eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, - eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, - eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, - ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, - eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, - ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, - eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, - eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, - eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, - eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, - eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, - eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, - eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, - eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, - eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, - eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, - eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, - eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, - eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, - eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, - ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, - eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, - ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, - eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, - ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, - ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, - eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, - eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, - eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, - eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, - eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, - ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, - ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, - ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, - ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, - ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, - ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, - ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, - ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, - ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, - eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, - eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, - eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, - eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, - eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, - eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, - eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, - ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR, - ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR, - eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR, - ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR, - eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR, - ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR, - ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR, - ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, - eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, - eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, - eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, - eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, - eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, - eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, - eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, - eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, - eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, - eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, - eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, - eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, - eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, - eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, - eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, - eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, - eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, - eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, - eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, - eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, - ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, - ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, - eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, - eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, - eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, - eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, - ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, - ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, - eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, - ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, - ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, - ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, - ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, - eBindAccelerationStructureMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR, - eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, - eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR, - eAccelerationStructureCreateGeometryTypeInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR, - eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR, - eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR, - eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR, - eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR, - eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR, - eAccelerationStructureMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR, - eAccelerationStructureVersionKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR, - eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR, - eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR, - eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR, - ePhysicalDeviceRayTracingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR, - ePhysicalDeviceRayTracingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR, - eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR, - eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR, - eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR, - eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR, - ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, - ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, - ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, - eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, - eDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, - ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, - eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, - eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, - eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, - eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, - eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, - ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, - ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, - ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, - ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, - eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, - eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, - eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, - eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, - eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, - eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, - ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, - eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, - eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, - ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, - ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, - ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, - eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, - eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, - eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, - eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, - ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, - ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, - ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, - eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, - ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, - eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, - ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, - ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, - ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, - ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, - ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, - ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, - ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, - ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, - ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, - ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, - ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, - ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, - eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, - eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, - ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, - eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, - eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, - ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, - ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, - ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, - ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, - ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, - eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD, - eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD, - eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, - eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, - ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, - ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, - eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, - ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, - ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, - ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, - ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, - ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, - ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, - ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, - eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, - eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, - ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, - ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, - eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, - ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, - eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, - ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, - eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, - ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV, - ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, - ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV, - eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, - ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, - ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, - eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, - eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, - eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, - eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, - ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, - ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, - ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, - ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, - ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT, - eDeferredOperationInfoKHR = VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR, - ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, - ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, - ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, - ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, - ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, - ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, - ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, - ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, - eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, - eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, - eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV, - eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV, - eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV, - eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV, - ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV, - ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, - ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, - eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, - eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM, - ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT, - ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT, - eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT, - ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT, - ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT, - ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, - ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT, - eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT, - ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT, - ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, - ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, - eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, - eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, - eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, - eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, - eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, - eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, - eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, - eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, - eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, - eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, - eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, - eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, - eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, - eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, - eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, - eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, - eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, - eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, - eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, - eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, - eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, - eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, - eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, - eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, - eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, - eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, - eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, - eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, - eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, - eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, - eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, - eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, - eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, - eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, - eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, - eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, - eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, - eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, - eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, - eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, - eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, - eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, - eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, - eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, - eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, - eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, - eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, - eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, - eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, - eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, - eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, - ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, - ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, - ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, - ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, - ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, - ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, - ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, - ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, - ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, - ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, - ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, - ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, - ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, - ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, - ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, - ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, - ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, - ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, - ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, - ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, - ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, - ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, - ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, - ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, - ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, - ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, - ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, - ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, - ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, - ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, - ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, - ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, - ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, - ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, - ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, - ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, - ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, - ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, - ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, - ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, - ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, - ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, - ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, - eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, - eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, - eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, - eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, - eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, - eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, - eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, - eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, - eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, - eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, - eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, - eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, - eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, - eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, - eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, - eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, - eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, - eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, - eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, - eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, - eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, - eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV - }; - - VULKAN_HPP_INLINE std::string to_string( StructureType value ) - { - switch ( value ) - { - case StructureType::eApplicationInfo : return "ApplicationInfo"; - case StructureType::eInstanceCreateInfo : return "InstanceCreateInfo"; - case StructureType::eDeviceQueueCreateInfo : return "DeviceQueueCreateInfo"; - case StructureType::eDeviceCreateInfo : return "DeviceCreateInfo"; - case StructureType::eSubmitInfo : return "SubmitInfo"; - case StructureType::eMemoryAllocateInfo : return "MemoryAllocateInfo"; - case StructureType::eMappedMemoryRange : return "MappedMemoryRange"; - case StructureType::eBindSparseInfo : return "BindSparseInfo"; - case StructureType::eFenceCreateInfo : return "FenceCreateInfo"; - case StructureType::eSemaphoreCreateInfo : return "SemaphoreCreateInfo"; - case StructureType::eEventCreateInfo : return "EventCreateInfo"; - case StructureType::eQueryPoolCreateInfo : return "QueryPoolCreateInfo"; - case StructureType::eBufferCreateInfo : return "BufferCreateInfo"; - case StructureType::eBufferViewCreateInfo : return "BufferViewCreateInfo"; - case StructureType::eImageCreateInfo : return "ImageCreateInfo"; - case StructureType::eImageViewCreateInfo : return "ImageViewCreateInfo"; - case StructureType::eShaderModuleCreateInfo : return "ShaderModuleCreateInfo"; - case StructureType::ePipelineCacheCreateInfo : return "PipelineCacheCreateInfo"; - case StructureType::ePipelineShaderStageCreateInfo : return "PipelineShaderStageCreateInfo"; - case StructureType::ePipelineVertexInputStateCreateInfo : return "PipelineVertexInputStateCreateInfo"; - case StructureType::ePipelineInputAssemblyStateCreateInfo : return "PipelineInputAssemblyStateCreateInfo"; - case StructureType::ePipelineTessellationStateCreateInfo : return "PipelineTessellationStateCreateInfo"; - case StructureType::ePipelineViewportStateCreateInfo : return "PipelineViewportStateCreateInfo"; - case StructureType::ePipelineRasterizationStateCreateInfo : return "PipelineRasterizationStateCreateInfo"; - case StructureType::ePipelineMultisampleStateCreateInfo : return "PipelineMultisampleStateCreateInfo"; - case StructureType::ePipelineDepthStencilStateCreateInfo : return "PipelineDepthStencilStateCreateInfo"; - case StructureType::ePipelineColorBlendStateCreateInfo : return "PipelineColorBlendStateCreateInfo"; - case StructureType::ePipelineDynamicStateCreateInfo : return "PipelineDynamicStateCreateInfo"; - case StructureType::eGraphicsPipelineCreateInfo : return "GraphicsPipelineCreateInfo"; - case StructureType::eComputePipelineCreateInfo : return "ComputePipelineCreateInfo"; - case StructureType::ePipelineLayoutCreateInfo : return "PipelineLayoutCreateInfo"; - case StructureType::eSamplerCreateInfo : return "SamplerCreateInfo"; - case StructureType::eDescriptorSetLayoutCreateInfo : return "DescriptorSetLayoutCreateInfo"; - case StructureType::eDescriptorPoolCreateInfo : return "DescriptorPoolCreateInfo"; - case StructureType::eDescriptorSetAllocateInfo : return "DescriptorSetAllocateInfo"; - case StructureType::eWriteDescriptorSet : return "WriteDescriptorSet"; - case StructureType::eCopyDescriptorSet : return "CopyDescriptorSet"; - case StructureType::eFramebufferCreateInfo : return "FramebufferCreateInfo"; - case StructureType::eRenderPassCreateInfo : return "RenderPassCreateInfo"; - case StructureType::eCommandPoolCreateInfo : return "CommandPoolCreateInfo"; - case StructureType::eCommandBufferAllocateInfo : return "CommandBufferAllocateInfo"; - case StructureType::eCommandBufferInheritanceInfo : return "CommandBufferInheritanceInfo"; - case StructureType::eCommandBufferBeginInfo : return "CommandBufferBeginInfo"; - case StructureType::eRenderPassBeginInfo : return "RenderPassBeginInfo"; - case StructureType::eBufferMemoryBarrier : return "BufferMemoryBarrier"; - case StructureType::eImageMemoryBarrier : return "ImageMemoryBarrier"; - case StructureType::eMemoryBarrier : return "MemoryBarrier"; - case StructureType::eLoaderInstanceCreateInfo : return "LoaderInstanceCreateInfo"; - case StructureType::eLoaderDeviceCreateInfo : return "LoaderDeviceCreateInfo"; - case StructureType::ePhysicalDeviceSubgroupProperties : return "PhysicalDeviceSubgroupProperties"; - case StructureType::eBindBufferMemoryInfo : return "BindBufferMemoryInfo"; - case StructureType::eBindImageMemoryInfo : return "BindImageMemoryInfo"; - case StructureType::ePhysicalDevice16BitStorageFeatures : return "PhysicalDevice16BitStorageFeatures"; - case StructureType::eMemoryDedicatedRequirements : return "MemoryDedicatedRequirements"; - case StructureType::eMemoryDedicatedAllocateInfo : return "MemoryDedicatedAllocateInfo"; - case StructureType::eMemoryAllocateFlagsInfo : return "MemoryAllocateFlagsInfo"; - case StructureType::eDeviceGroupRenderPassBeginInfo : return "DeviceGroupRenderPassBeginInfo"; - case StructureType::eDeviceGroupCommandBufferBeginInfo : return "DeviceGroupCommandBufferBeginInfo"; - case StructureType::eDeviceGroupSubmitInfo : return "DeviceGroupSubmitInfo"; - case StructureType::eDeviceGroupBindSparseInfo : return "DeviceGroupBindSparseInfo"; - case StructureType::eBindBufferMemoryDeviceGroupInfo : return "BindBufferMemoryDeviceGroupInfo"; - case StructureType::eBindImageMemoryDeviceGroupInfo : return "BindImageMemoryDeviceGroupInfo"; - case StructureType::ePhysicalDeviceGroupProperties : return "PhysicalDeviceGroupProperties"; - case StructureType::eDeviceGroupDeviceCreateInfo : return "DeviceGroupDeviceCreateInfo"; - case StructureType::eBufferMemoryRequirementsInfo2 : return "BufferMemoryRequirementsInfo2"; - case StructureType::eImageMemoryRequirementsInfo2 : return "ImageMemoryRequirementsInfo2"; - case StructureType::eImageSparseMemoryRequirementsInfo2 : return "ImageSparseMemoryRequirementsInfo2"; - case StructureType::eMemoryRequirements2 : return "MemoryRequirements2"; - case StructureType::eSparseImageMemoryRequirements2 : return "SparseImageMemoryRequirements2"; - case StructureType::ePhysicalDeviceFeatures2 : return "PhysicalDeviceFeatures2"; - case StructureType::ePhysicalDeviceProperties2 : return "PhysicalDeviceProperties2"; - case StructureType::eFormatProperties2 : return "FormatProperties2"; - case StructureType::eImageFormatProperties2 : return "ImageFormatProperties2"; - case StructureType::ePhysicalDeviceImageFormatInfo2 : return "PhysicalDeviceImageFormatInfo2"; - case StructureType::eQueueFamilyProperties2 : return "QueueFamilyProperties2"; - case StructureType::ePhysicalDeviceMemoryProperties2 : return "PhysicalDeviceMemoryProperties2"; - case StructureType::eSparseImageFormatProperties2 : return "SparseImageFormatProperties2"; - case StructureType::ePhysicalDeviceSparseImageFormatInfo2 : return "PhysicalDeviceSparseImageFormatInfo2"; - case StructureType::ePhysicalDevicePointClippingProperties : return "PhysicalDevicePointClippingProperties"; - case StructureType::eRenderPassInputAttachmentAspectCreateInfo : return "RenderPassInputAttachmentAspectCreateInfo"; - case StructureType::eImageViewUsageCreateInfo : return "ImageViewUsageCreateInfo"; - case StructureType::ePipelineTessellationDomainOriginStateCreateInfo : return "PipelineTessellationDomainOriginStateCreateInfo"; - case StructureType::eRenderPassMultiviewCreateInfo : return "RenderPassMultiviewCreateInfo"; - case StructureType::ePhysicalDeviceMultiviewFeatures : return "PhysicalDeviceMultiviewFeatures"; - case StructureType::ePhysicalDeviceMultiviewProperties : return "PhysicalDeviceMultiviewProperties"; - case StructureType::ePhysicalDeviceVariablePointersFeatures : return "PhysicalDeviceVariablePointersFeatures"; - case StructureType::eProtectedSubmitInfo : return "ProtectedSubmitInfo"; - case StructureType::ePhysicalDeviceProtectedMemoryFeatures : return "PhysicalDeviceProtectedMemoryFeatures"; - case StructureType::ePhysicalDeviceProtectedMemoryProperties : return "PhysicalDeviceProtectedMemoryProperties"; - case StructureType::eDeviceQueueInfo2 : return "DeviceQueueInfo2"; - case StructureType::eSamplerYcbcrConversionCreateInfo : return "SamplerYcbcrConversionCreateInfo"; - case StructureType::eSamplerYcbcrConversionInfo : return "SamplerYcbcrConversionInfo"; - case StructureType::eBindImagePlaneMemoryInfo : return "BindImagePlaneMemoryInfo"; - case StructureType::eImagePlaneMemoryRequirementsInfo : return "ImagePlaneMemoryRequirementsInfo"; - case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures : return "PhysicalDeviceSamplerYcbcrConversionFeatures"; - case StructureType::eSamplerYcbcrConversionImageFormatProperties : return "SamplerYcbcrConversionImageFormatProperties"; - case StructureType::eDescriptorUpdateTemplateCreateInfo : return "DescriptorUpdateTemplateCreateInfo"; - case StructureType::ePhysicalDeviceExternalImageFormatInfo : return "PhysicalDeviceExternalImageFormatInfo"; - case StructureType::eExternalImageFormatProperties : return "ExternalImageFormatProperties"; - case StructureType::ePhysicalDeviceExternalBufferInfo : return "PhysicalDeviceExternalBufferInfo"; - case StructureType::eExternalBufferProperties : return "ExternalBufferProperties"; - case StructureType::ePhysicalDeviceIdProperties : return "PhysicalDeviceIdProperties"; - case StructureType::eExternalMemoryBufferCreateInfo : return "ExternalMemoryBufferCreateInfo"; - case StructureType::eExternalMemoryImageCreateInfo : return "ExternalMemoryImageCreateInfo"; - case StructureType::eExportMemoryAllocateInfo : return "ExportMemoryAllocateInfo"; - case StructureType::ePhysicalDeviceExternalFenceInfo : return "PhysicalDeviceExternalFenceInfo"; - case StructureType::eExternalFenceProperties : return "ExternalFenceProperties"; - case StructureType::eExportFenceCreateInfo : return "ExportFenceCreateInfo"; - case StructureType::eExportSemaphoreCreateInfo : return "ExportSemaphoreCreateInfo"; - case StructureType::ePhysicalDeviceExternalSemaphoreInfo : return "PhysicalDeviceExternalSemaphoreInfo"; - case StructureType::eExternalSemaphoreProperties : return "ExternalSemaphoreProperties"; - case StructureType::ePhysicalDeviceMaintenance3Properties : return "PhysicalDeviceMaintenance3Properties"; - case StructureType::eDescriptorSetLayoutSupport : return "DescriptorSetLayoutSupport"; - case StructureType::ePhysicalDeviceShaderDrawParametersFeatures : return "PhysicalDeviceShaderDrawParametersFeatures"; - case StructureType::ePhysicalDeviceVulkan11Features : return "PhysicalDeviceVulkan11Features"; - case StructureType::ePhysicalDeviceVulkan11Properties : return "PhysicalDeviceVulkan11Properties"; - case StructureType::ePhysicalDeviceVulkan12Features : return "PhysicalDeviceVulkan12Features"; - case StructureType::ePhysicalDeviceVulkan12Properties : return "PhysicalDeviceVulkan12Properties"; - case StructureType::eImageFormatListCreateInfo : return "ImageFormatListCreateInfo"; - case StructureType::eAttachmentDescription2 : return "AttachmentDescription2"; - case StructureType::eAttachmentReference2 : return "AttachmentReference2"; - case StructureType::eSubpassDescription2 : return "SubpassDescription2"; - case StructureType::eSubpassDependency2 : return "SubpassDependency2"; - case StructureType::eRenderPassCreateInfo2 : return "RenderPassCreateInfo2"; - case StructureType::eSubpassBeginInfo : return "SubpassBeginInfo"; - case StructureType::eSubpassEndInfo : return "SubpassEndInfo"; - case StructureType::ePhysicalDevice8BitStorageFeatures : return "PhysicalDevice8BitStorageFeatures"; - case StructureType::ePhysicalDeviceDriverProperties : return "PhysicalDeviceDriverProperties"; - case StructureType::ePhysicalDeviceShaderAtomicInt64Features : return "PhysicalDeviceShaderAtomicInt64Features"; - case StructureType::ePhysicalDeviceShaderFloat16Int8Features : return "PhysicalDeviceShaderFloat16Int8Features"; - case StructureType::ePhysicalDeviceFloatControlsProperties : return "PhysicalDeviceFloatControlsProperties"; - case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo : return "DescriptorSetLayoutBindingFlagsCreateInfo"; - case StructureType::ePhysicalDeviceDescriptorIndexingFeatures : return "PhysicalDeviceDescriptorIndexingFeatures"; - case StructureType::ePhysicalDeviceDescriptorIndexingProperties : return "PhysicalDeviceDescriptorIndexingProperties"; - case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo : return "DescriptorSetVariableDescriptorCountAllocateInfo"; - case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport : return "DescriptorSetVariableDescriptorCountLayoutSupport"; - case StructureType::ePhysicalDeviceDepthStencilResolveProperties : return "PhysicalDeviceDepthStencilResolveProperties"; - case StructureType::eSubpassDescriptionDepthStencilResolve : return "SubpassDescriptionDepthStencilResolve"; - case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures : return "PhysicalDeviceScalarBlockLayoutFeatures"; - case StructureType::eImageStencilUsageCreateInfo : return "ImageStencilUsageCreateInfo"; - case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties : return "PhysicalDeviceSamplerFilterMinmaxProperties"; - case StructureType::eSamplerReductionModeCreateInfo : return "SamplerReductionModeCreateInfo"; - case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures : return "PhysicalDeviceVulkanMemoryModelFeatures"; - case StructureType::ePhysicalDeviceImagelessFramebufferFeatures : return "PhysicalDeviceImagelessFramebufferFeatures"; - case StructureType::eFramebufferAttachmentsCreateInfo : return "FramebufferAttachmentsCreateInfo"; - case StructureType::eFramebufferAttachmentImageInfo : return "FramebufferAttachmentImageInfo"; - case StructureType::eRenderPassAttachmentBeginInfo : return "RenderPassAttachmentBeginInfo"; - case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures : return "PhysicalDeviceUniformBufferStandardLayoutFeatures"; - case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures : return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures"; - case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures : return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures"; - case StructureType::eAttachmentReferenceStencilLayout : return "AttachmentReferenceStencilLayout"; - case StructureType::eAttachmentDescriptionStencilLayout : return "AttachmentDescriptionStencilLayout"; - case StructureType::ePhysicalDeviceHostQueryResetFeatures : return "PhysicalDeviceHostQueryResetFeatures"; - case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures : return "PhysicalDeviceTimelineSemaphoreFeatures"; - case StructureType::ePhysicalDeviceTimelineSemaphoreProperties : return "PhysicalDeviceTimelineSemaphoreProperties"; - case StructureType::eSemaphoreTypeCreateInfo : return "SemaphoreTypeCreateInfo"; - case StructureType::eTimelineSemaphoreSubmitInfo : return "TimelineSemaphoreSubmitInfo"; - case StructureType::eSemaphoreWaitInfo : return "SemaphoreWaitInfo"; - case StructureType::eSemaphoreSignalInfo : return "SemaphoreSignalInfo"; - case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures : return "PhysicalDeviceBufferDeviceAddressFeatures"; - case StructureType::eBufferDeviceAddressInfo : return "BufferDeviceAddressInfo"; - case StructureType::eBufferOpaqueCaptureAddressCreateInfo : return "BufferOpaqueCaptureAddressCreateInfo"; - case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo : return "MemoryOpaqueCaptureAddressAllocateInfo"; - case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo : return "DeviceMemoryOpaqueCaptureAddressInfo"; - case StructureType::eSwapchainCreateInfoKHR : return "SwapchainCreateInfoKHR"; - case StructureType::ePresentInfoKHR : return "PresentInfoKHR"; - case StructureType::eDeviceGroupPresentCapabilitiesKHR : return "DeviceGroupPresentCapabilitiesKHR"; - case StructureType::eImageSwapchainCreateInfoKHR : return "ImageSwapchainCreateInfoKHR"; - case StructureType::eBindImageMemorySwapchainInfoKHR : return "BindImageMemorySwapchainInfoKHR"; - case StructureType::eAcquireNextImageInfoKHR : return "AcquireNextImageInfoKHR"; - case StructureType::eDeviceGroupPresentInfoKHR : return "DeviceGroupPresentInfoKHR"; - case StructureType::eDeviceGroupSwapchainCreateInfoKHR : return "DeviceGroupSwapchainCreateInfoKHR"; - case StructureType::eDisplayModeCreateInfoKHR : return "DisplayModeCreateInfoKHR"; - case StructureType::eDisplaySurfaceCreateInfoKHR : return "DisplaySurfaceCreateInfoKHR"; - case StructureType::eDisplayPresentInfoKHR : return "DisplayPresentInfoKHR"; - case StructureType::eXlibSurfaceCreateInfoKHR : return "XlibSurfaceCreateInfoKHR"; - case StructureType::eXcbSurfaceCreateInfoKHR : return "XcbSurfaceCreateInfoKHR"; - case StructureType::eWaylandSurfaceCreateInfoKHR : return "WaylandSurfaceCreateInfoKHR"; - case StructureType::eAndroidSurfaceCreateInfoKHR : return "AndroidSurfaceCreateInfoKHR"; - case StructureType::eWin32SurfaceCreateInfoKHR : return "Win32SurfaceCreateInfoKHR"; - case StructureType::eDebugReportCallbackCreateInfoEXT : return "DebugReportCallbackCreateInfoEXT"; - case StructureType::ePipelineRasterizationStateRasterizationOrderAMD : return "PipelineRasterizationStateRasterizationOrderAMD"; - case StructureType::eDebugMarkerObjectNameInfoEXT : return "DebugMarkerObjectNameInfoEXT"; - case StructureType::eDebugMarkerObjectTagInfoEXT : return "DebugMarkerObjectTagInfoEXT"; - case StructureType::eDebugMarkerMarkerInfoEXT : return "DebugMarkerMarkerInfoEXT"; - case StructureType::eDedicatedAllocationImageCreateInfoNV : return "DedicatedAllocationImageCreateInfoNV"; - case StructureType::eDedicatedAllocationBufferCreateInfoNV : return "DedicatedAllocationBufferCreateInfoNV"; - case StructureType::eDedicatedAllocationMemoryAllocateInfoNV : return "DedicatedAllocationMemoryAllocateInfoNV"; - case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT : return "PhysicalDeviceTransformFeedbackFeaturesEXT"; - case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT : return "PhysicalDeviceTransformFeedbackPropertiesEXT"; - case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT : return "PipelineRasterizationStateStreamCreateInfoEXT"; - case StructureType::eImageViewHandleInfoNVX : return "ImageViewHandleInfoNVX"; - case StructureType::eImageViewAddressPropertiesNVX : return "ImageViewAddressPropertiesNVX"; - case StructureType::eTextureLodGatherFormatPropertiesAMD : return "TextureLodGatherFormatPropertiesAMD"; - case StructureType::eStreamDescriptorSurfaceCreateInfoGGP : return "StreamDescriptorSurfaceCreateInfoGGP"; - case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV : return "PhysicalDeviceCornerSampledImageFeaturesNV"; - case StructureType::eExternalMemoryImageCreateInfoNV : return "ExternalMemoryImageCreateInfoNV"; - case StructureType::eExportMemoryAllocateInfoNV : return "ExportMemoryAllocateInfoNV"; - case StructureType::eImportMemoryWin32HandleInfoNV : return "ImportMemoryWin32HandleInfoNV"; - case StructureType::eExportMemoryWin32HandleInfoNV : return "ExportMemoryWin32HandleInfoNV"; - case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV : return "Win32KeyedMutexAcquireReleaseInfoNV"; - case StructureType::eValidationFlagsEXT : return "ValidationFlagsEXT"; - case StructureType::eViSurfaceCreateInfoNN : return "ViSurfaceCreateInfoNN"; - case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT : return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT"; - case StructureType::eImageViewAstcDecodeModeEXT : return "ImageViewAstcDecodeModeEXT"; - case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT : return "PhysicalDeviceAstcDecodeFeaturesEXT"; - case StructureType::eImportMemoryWin32HandleInfoKHR : return "ImportMemoryWin32HandleInfoKHR"; - case StructureType::eExportMemoryWin32HandleInfoKHR : return "ExportMemoryWin32HandleInfoKHR"; - case StructureType::eMemoryWin32HandlePropertiesKHR : return "MemoryWin32HandlePropertiesKHR"; - case StructureType::eMemoryGetWin32HandleInfoKHR : return "MemoryGetWin32HandleInfoKHR"; - case StructureType::eImportMemoryFdInfoKHR : return "ImportMemoryFdInfoKHR"; - case StructureType::eMemoryFdPropertiesKHR : return "MemoryFdPropertiesKHR"; - case StructureType::eMemoryGetFdInfoKHR : return "MemoryGetFdInfoKHR"; - case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR : return "Win32KeyedMutexAcquireReleaseInfoKHR"; - case StructureType::eImportSemaphoreWin32HandleInfoKHR : return "ImportSemaphoreWin32HandleInfoKHR"; - case StructureType::eExportSemaphoreWin32HandleInfoKHR : return "ExportSemaphoreWin32HandleInfoKHR"; - case StructureType::eD3D12FenceSubmitInfoKHR : return "D3D12FenceSubmitInfoKHR"; - case StructureType::eSemaphoreGetWin32HandleInfoKHR : return "SemaphoreGetWin32HandleInfoKHR"; - case StructureType::eImportSemaphoreFdInfoKHR : return "ImportSemaphoreFdInfoKHR"; - case StructureType::eSemaphoreGetFdInfoKHR : return "SemaphoreGetFdInfoKHR"; - case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR : return "PhysicalDevicePushDescriptorPropertiesKHR"; - case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT : return "CommandBufferInheritanceConditionalRenderingInfoEXT"; - case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT"; - case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT"; - case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR"; - case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV"; - case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT"; - case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT"; - case StructureType::eDeviceEventInfoEXT : return "DeviceEventInfoEXT"; - case StructureType::eDisplayEventInfoEXT : return "DisplayEventInfoEXT"; - case StructureType::eSwapchainCounterCreateInfoEXT : return "SwapchainCounterCreateInfoEXT"; - case StructureType::ePresentTimesInfoGOOGLE : return "PresentTimesInfoGOOGLE"; - case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX : return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; - case StructureType::ePipelineViewportSwizzleStateCreateInfoNV : return "PipelineViewportSwizzleStateCreateInfoNV"; - case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT : return "PhysicalDeviceDiscardRectanglePropertiesEXT"; - case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT : return "PipelineDiscardRectangleStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT : return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; - case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT : return "PipelineRasterizationConservativeStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT : return "PhysicalDeviceDepthClipEnableFeaturesEXT"; - case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT : return "PipelineRasterizationDepthClipStateCreateInfoEXT"; - case StructureType::eHdrMetadataEXT : return "HdrMetadataEXT"; - case StructureType::eSharedPresentSurfaceCapabilitiesKHR : return "SharedPresentSurfaceCapabilitiesKHR"; - case StructureType::eImportFenceWin32HandleInfoKHR : return "ImportFenceWin32HandleInfoKHR"; - case StructureType::eExportFenceWin32HandleInfoKHR : return "ExportFenceWin32HandleInfoKHR"; - case StructureType::eFenceGetWin32HandleInfoKHR : return "FenceGetWin32HandleInfoKHR"; - case StructureType::eImportFenceFdInfoKHR : return "ImportFenceFdInfoKHR"; - case StructureType::eFenceGetFdInfoKHR : return "FenceGetFdInfoKHR"; - case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR : return "PhysicalDevicePerformanceQueryFeaturesKHR"; - case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR : return "PhysicalDevicePerformanceQueryPropertiesKHR"; - case StructureType::eQueryPoolPerformanceCreateInfoKHR : return "QueryPoolPerformanceCreateInfoKHR"; - case StructureType::ePerformanceQuerySubmitInfoKHR : return "PerformanceQuerySubmitInfoKHR"; - case StructureType::eAcquireProfilingLockInfoKHR : return "AcquireProfilingLockInfoKHR"; - case StructureType::ePerformanceCounterKHR : return "PerformanceCounterKHR"; - case StructureType::ePerformanceCounterDescriptionKHR : return "PerformanceCounterDescriptionKHR"; - case StructureType::ePhysicalDeviceSurfaceInfo2KHR : return "PhysicalDeviceSurfaceInfo2KHR"; - case StructureType::eSurfaceCapabilities2KHR : return "SurfaceCapabilities2KHR"; - case StructureType::eSurfaceFormat2KHR : return "SurfaceFormat2KHR"; - case StructureType::eDisplayProperties2KHR : return "DisplayProperties2KHR"; - case StructureType::eDisplayPlaneProperties2KHR : return "DisplayPlaneProperties2KHR"; - case StructureType::eDisplayModeProperties2KHR : return "DisplayModeProperties2KHR"; - case StructureType::eDisplayPlaneInfo2KHR : return "DisplayPlaneInfo2KHR"; - case StructureType::eDisplayPlaneCapabilities2KHR : return "DisplayPlaneCapabilities2KHR"; - case StructureType::eIosSurfaceCreateInfoMVK : return "IosSurfaceCreateInfoMVK"; - case StructureType::eMacosSurfaceCreateInfoMVK : return "MacosSurfaceCreateInfoMVK"; - case StructureType::eDebugUtilsObjectNameInfoEXT : return "DebugUtilsObjectNameInfoEXT"; - case StructureType::eDebugUtilsObjectTagInfoEXT : return "DebugUtilsObjectTagInfoEXT"; - case StructureType::eDebugUtilsLabelEXT : return "DebugUtilsLabelEXT"; - case StructureType::eDebugUtilsMessengerCallbackDataEXT : return "DebugUtilsMessengerCallbackDataEXT"; - case StructureType::eDebugUtilsMessengerCreateInfoEXT : return "DebugUtilsMessengerCreateInfoEXT"; - case StructureType::eAndroidHardwareBufferUsageANDROID : return "AndroidHardwareBufferUsageANDROID"; - case StructureType::eAndroidHardwareBufferPropertiesANDROID : return "AndroidHardwareBufferPropertiesANDROID"; - case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID : return "AndroidHardwareBufferFormatPropertiesANDROID"; - case StructureType::eImportAndroidHardwareBufferInfoANDROID : return "ImportAndroidHardwareBufferInfoANDROID"; - case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID : return "MemoryGetAndroidHardwareBufferInfoANDROID"; - case StructureType::eExternalFormatANDROID : return "ExternalFormatANDROID"; - case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT : return "PhysicalDeviceInlineUniformBlockFeaturesEXT"; - case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT : return "PhysicalDeviceInlineUniformBlockPropertiesEXT"; - case StructureType::eWriteDescriptorSetInlineUniformBlockEXT : return "WriteDescriptorSetInlineUniformBlockEXT"; - case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT : return "DescriptorPoolInlineUniformBlockCreateInfoEXT"; - case StructureType::eSampleLocationsInfoEXT : return "SampleLocationsInfoEXT"; - case StructureType::eRenderPassSampleLocationsBeginInfoEXT : return "RenderPassSampleLocationsBeginInfoEXT"; - case StructureType::ePipelineSampleLocationsStateCreateInfoEXT : return "PipelineSampleLocationsStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT : return "PhysicalDeviceSampleLocationsPropertiesEXT"; - case StructureType::eMultisamplePropertiesEXT : return "MultisamplePropertiesEXT"; - case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT : return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; - case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; - case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT"; - case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV"; - case StructureType::eBindAccelerationStructureMemoryInfoKHR : return "BindAccelerationStructureMemoryInfoKHR"; - case StructureType::eWriteDescriptorSetAccelerationStructureKHR : return "WriteDescriptorSetAccelerationStructureKHR"; - case StructureType::eAccelerationStructureBuildGeometryInfoKHR : return "AccelerationStructureBuildGeometryInfoKHR"; - case StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR : return "AccelerationStructureCreateGeometryTypeInfoKHR"; - case StructureType::eAccelerationStructureDeviceAddressInfoKHR : return "AccelerationStructureDeviceAddressInfoKHR"; - case StructureType::eAccelerationStructureGeometryAabbsDataKHR : return "AccelerationStructureGeometryAabbsDataKHR"; - case StructureType::eAccelerationStructureGeometryInstancesDataKHR : return "AccelerationStructureGeometryInstancesDataKHR"; - case StructureType::eAccelerationStructureGeometryTrianglesDataKHR : return "AccelerationStructureGeometryTrianglesDataKHR"; - case StructureType::eAccelerationStructureGeometryKHR : return "AccelerationStructureGeometryKHR"; - case StructureType::eAccelerationStructureMemoryRequirementsInfoKHR : return "AccelerationStructureMemoryRequirementsInfoKHR"; - case StructureType::eAccelerationStructureVersionKHR : return "AccelerationStructureVersionKHR"; - case StructureType::eCopyAccelerationStructureInfoKHR : return "CopyAccelerationStructureInfoKHR"; - case StructureType::eCopyAccelerationStructureToMemoryInfoKHR : return "CopyAccelerationStructureToMemoryInfoKHR"; - case StructureType::eCopyMemoryToAccelerationStructureInfoKHR : return "CopyMemoryToAccelerationStructureInfoKHR"; - case StructureType::ePhysicalDeviceRayTracingFeaturesKHR : return "PhysicalDeviceRayTracingFeaturesKHR"; - case StructureType::ePhysicalDeviceRayTracingPropertiesKHR : return "PhysicalDeviceRayTracingPropertiesKHR"; - case StructureType::eRayTracingPipelineCreateInfoKHR : return "RayTracingPipelineCreateInfoKHR"; - case StructureType::eRayTracingShaderGroupCreateInfoKHR : return "RayTracingShaderGroupCreateInfoKHR"; - case StructureType::eAccelerationStructureCreateInfoKHR : return "AccelerationStructureCreateInfoKHR"; - case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR : return "RayTracingPipelineInterfaceCreateInfoKHR"; - case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV"; - case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; - case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; - case StructureType::eDrmFormatModifierPropertiesListEXT : return "DrmFormatModifierPropertiesListEXT"; - case StructureType::eDrmFormatModifierPropertiesEXT : return "DrmFormatModifierPropertiesEXT"; - case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT : return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; - case StructureType::eImageDrmFormatModifierListCreateInfoEXT : return "ImageDrmFormatModifierListCreateInfoEXT"; - case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT : return "ImageDrmFormatModifierExplicitCreateInfoEXT"; - case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT"; - case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT"; - case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT"; - case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV"; - case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV"; - case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV"; - case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV : return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; - case StructureType::eRayTracingPipelineCreateInfoNV : return "RayTracingPipelineCreateInfoNV"; - case StructureType::eAccelerationStructureCreateInfoNV : return "AccelerationStructureCreateInfoNV"; - case StructureType::eGeometryNV : return "GeometryNV"; - case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV"; - case StructureType::eGeometryAabbNV : return "GeometryAabbNV"; - case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV"; - case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV"; - case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV"; - case StructureType::eAccelerationStructureInfoNV : return "AccelerationStructureInfoNV"; - case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV : return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; - case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV : return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; - case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT"; - case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT"; - case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT : return "DeviceQueueGlobalPriorityCreateInfoEXT"; - case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT"; - case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT"; - case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; - case StructureType::ePhysicalDeviceShaderClockFeaturesKHR : return "PhysicalDeviceShaderClockFeaturesKHR"; - case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD"; - case StructureType::eCalibratedTimestampInfoEXT : return "CalibratedTimestampInfoEXT"; - case StructureType::ePhysicalDeviceShaderCorePropertiesAMD : return "PhysicalDeviceShaderCorePropertiesAMD"; - case StructureType::eDeviceMemoryOverallocationCreateInfoAMD : return "DeviceMemoryOverallocationCreateInfoAMD"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT : return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; - case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT : return "PipelineVertexInputDivisorStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT : return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT"; - case StructureType::ePresentFrameTokenGGP : return "PresentFrameTokenGGP"; - case StructureType::ePipelineCreationFeedbackCreateInfoEXT : return "PipelineCreationFeedbackCreateInfoEXT"; - case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV : return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; - case StructureType::ePhysicalDeviceMeshShaderFeaturesNV : return "PhysicalDeviceMeshShaderFeaturesNV"; - case StructureType::ePhysicalDeviceMeshShaderPropertiesNV : return "PhysicalDeviceMeshShaderPropertiesNV"; - case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV : return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV"; - case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV : return "PhysicalDeviceShaderImageFootprintFeaturesNV"; - case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV : return "PipelineViewportExclusiveScissorStateCreateInfoNV"; - case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV : return "PhysicalDeviceExclusiveScissorFeaturesNV"; - case StructureType::eCheckpointDataNV : return "CheckpointDataNV"; - case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV"; - case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; - case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL : return "QueryPoolPerformanceQueryCreateInfoINTEL"; - case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL"; - case StructureType::ePerformanceMarkerInfoINTEL : return "PerformanceMarkerInfoINTEL"; - case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL"; - case StructureType::ePerformanceOverrideInfoINTEL : return "PerformanceOverrideInfoINTEL"; - case StructureType::ePerformanceConfigurationAcquireInfoINTEL : return "PerformanceConfigurationAcquireInfoINTEL"; - case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT : return "PhysicalDevicePciBusInfoPropertiesEXT"; - case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD : return "DisplayNativeHdrSurfaceCapabilitiesAMD"; - case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD : return "SwapchainDisplayNativeHdrCreateInfoAMD"; - case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA : return "ImagepipeSurfaceCreateInfoFUCHSIA"; - case StructureType::eMetalSurfaceCreateInfoEXT : return "MetalSurfaceCreateInfoEXT"; - case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT : return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; - case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT : return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; - case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT : return "RenderPassFragmentDensityMapCreateInfoEXT"; - case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT : return "PhysicalDeviceSubgroupSizeControlPropertiesEXT"; - case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT : return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT"; - case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT : return "PhysicalDeviceSubgroupSizeControlFeaturesEXT"; - case StructureType::ePhysicalDeviceShaderCoreProperties2AMD : return "PhysicalDeviceShaderCoreProperties2AMD"; - case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD : return "PhysicalDeviceCoherentMemoryFeaturesAMD"; - case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT : return "PhysicalDeviceMemoryBudgetPropertiesEXT"; - case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT : return "PhysicalDeviceMemoryPriorityFeaturesEXT"; - case StructureType::eMemoryPriorityAllocateInfoEXT : return "MemoryPriorityAllocateInfoEXT"; - case StructureType::eSurfaceProtectedCapabilitiesKHR : return "SurfaceProtectedCapabilitiesKHR"; - case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"; - case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT : return "PhysicalDeviceBufferDeviceAddressFeaturesEXT"; - case StructureType::eBufferDeviceAddressCreateInfoEXT : return "BufferDeviceAddressCreateInfoEXT"; - case StructureType::ePhysicalDeviceToolPropertiesEXT : return "PhysicalDeviceToolPropertiesEXT"; - case StructureType::eValidationFeaturesEXT : return "ValidationFeaturesEXT"; - case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV : return "PhysicalDeviceCooperativeMatrixFeaturesNV"; - case StructureType::eCooperativeMatrixPropertiesNV : return "CooperativeMatrixPropertiesNV"; - case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV : return "PhysicalDeviceCooperativeMatrixPropertiesNV"; - case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV : return "PhysicalDeviceCoverageReductionModeFeaturesNV"; - case StructureType::ePipelineCoverageReductionStateCreateInfoNV : return "PipelineCoverageReductionStateCreateInfoNV"; - case StructureType::eFramebufferMixedSamplesCombinationNV : return "FramebufferMixedSamplesCombinationNV"; - case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT : return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT"; - case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT : return "PhysicalDeviceYcbcrImageArraysFeaturesEXT"; - case StructureType::eSurfaceFullScreenExclusiveInfoEXT : return "SurfaceFullScreenExclusiveInfoEXT"; - case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT : return "SurfaceCapabilitiesFullScreenExclusiveEXT"; - case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT : return "SurfaceFullScreenExclusiveWin32InfoEXT"; - case StructureType::eHeadlessSurfaceCreateInfoEXT : return "HeadlessSurfaceCreateInfoEXT"; - case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT"; - case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT"; - case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; - case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT : return "PhysicalDeviceExtendedDynamicStateFeaturesEXT"; - case StructureType::eDeferredOperationInfoKHR : return "DeferredOperationInfoKHR"; - case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; - case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR"; - case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR"; - case StructureType::ePipelineExecutableInfoKHR : return "PipelineExecutableInfoKHR"; - case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR"; - case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR"; - case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT"; - case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV : return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; - case StructureType::eGraphicsShaderGroupCreateInfoNV : return "GraphicsShaderGroupCreateInfoNV"; - case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV : return "GraphicsPipelineShaderGroupsCreateInfoNV"; - case StructureType::eIndirectCommandsLayoutTokenNV : return "IndirectCommandsLayoutTokenNV"; - case StructureType::eIndirectCommandsLayoutCreateInfoNV : return "IndirectCommandsLayoutCreateInfoNV"; - case StructureType::eGeneratedCommandsInfoNV : return "GeneratedCommandsInfoNV"; - case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV : return "GeneratedCommandsMemoryRequirementsInfoNV"; - case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV : return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV"; - case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; - case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT : return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT"; - case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM : return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; - case StructureType::eRenderPassTransformBeginInfoQCOM : return "RenderPassTransformBeginInfoQCOM"; - case StructureType::ePhysicalDeviceRobustness2FeaturesEXT : return "PhysicalDeviceRobustness2FeaturesEXT"; - case StructureType::ePhysicalDeviceRobustness2PropertiesEXT : return "PhysicalDeviceRobustness2PropertiesEXT"; - case StructureType::eSamplerCustomBorderColorCreateInfoEXT : return "SamplerCustomBorderColorCreateInfoEXT"; - case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT : return "PhysicalDeviceCustomBorderColorPropertiesEXT"; - case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT : return "PhysicalDeviceCustomBorderColorFeaturesEXT"; - case StructureType::ePipelineLibraryCreateInfoKHR : return "PipelineLibraryCreateInfoKHR"; - case StructureType::ePhysicalDevicePrivateDataFeaturesEXT : return "PhysicalDevicePrivateDataFeaturesEXT"; - case StructureType::eDevicePrivateDataCreateInfoEXT : return "DevicePrivateDataCreateInfoEXT"; - case StructureType::ePrivateDataSlotCreateInfoEXT : return "PrivateDataSlotCreateInfoEXT"; - case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT : return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT"; - case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV : return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; - case StructureType::eDeviceDiagnosticsConfigCreateInfoNV : return "DeviceDiagnosticsConfigCreateInfoNV"; - default: return "invalid"; - } - } - - enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags - { - eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, - eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, - eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, - eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, - eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, - eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, - eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, - eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, - ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV - }; - - VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value ) - { - switch ( value ) - { - case SubgroupFeatureFlagBits::eBasic : return "Basic"; - case SubgroupFeatureFlagBits::eVote : return "Vote"; - case SubgroupFeatureFlagBits::eArithmetic : return "Arithmetic"; - case SubgroupFeatureFlagBits::eBallot : return "Ballot"; - case SubgroupFeatureFlagBits::eShuffle : return "Shuffle"; - case SubgroupFeatureFlagBits::eShuffleRelative : return "ShuffleRelative"; - case SubgroupFeatureFlagBits::eClustered : return "Clustered"; - case SubgroupFeatureFlagBits::eQuad : return "Quad"; - case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV"; - default: return "invalid"; + case StencilFaceFlagBits::eFront: return "Front"; + case StencilFaceFlagBits::eBack: return "Back"; + case StencilFaceFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } enum class SubpassContents { - eInline = VK_SUBPASS_CONTENTS_INLINE, + eInline = VK_SUBPASS_CONTENTS_INLINE, eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS }; @@ -8753,113 +10147,98 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case SubpassContents::eInline : return "Inline"; - case SubpassContents::eSecondaryCommandBuffers : return "SecondaryCommandBuffers"; - default: return "invalid"; + case SubpassContents::eInline: return "Inline"; + case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags + //=== VK_VERSION_1_1 === + + enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags { - ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, - ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, - eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, - eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM + eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, + eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, + eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, + eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, + eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV }; - VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value ) + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value ) { switch ( value ) { - case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX"; - case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX"; - case SubpassDescriptionFlagBits::eFragmentRegionQCOM : return "FragmentRegionQCOM"; - case SubpassDescriptionFlagBits::eShaderResolveQCOM : return "ShaderResolveQCOM"; - default: return "invalid"; + case SubgroupFeatureFlagBits::eBasic: return "Basic"; + case SubgroupFeatureFlagBits::eVote: return "Vote"; + case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic"; + case SubgroupFeatureFlagBits::eBallot: return "Ballot"; + case SubgroupFeatureFlagBits::eShuffle: return "Shuffle"; + case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative"; + case SubgroupFeatureFlagBits::eClustered: return "Clustered"; + case SubgroupFeatureFlagBits::eQuad: return "Quad"; + case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT + enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags { - eVblank = VK_SURFACE_COUNTER_VBLANK_EXT + eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT }; + using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits; - VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value ) + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value ) { switch ( value ) { - case SurfaceCounterFlagBitsEXT::eVblank : return "Vblank"; - default: return "invalid"; + case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc"; + case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst"; + case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc"; + case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR + enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags { - eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, - eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, - eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, - eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, - eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, - eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, - eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, - eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, - eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT }; + using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits; - VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value ) + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value ) { switch ( value ) { - case SurfaceTransformFlagBitsKHR::eIdentity : return "Identity"; - case SurfaceTransformFlagBitsKHR::eRotate90 : return "Rotate90"; - case SurfaceTransformFlagBitsKHR::eRotate180 : return "Rotate180"; - case SurfaceTransformFlagBitsKHR::eRotate270 : return "Rotate270"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirror : return "HorizontalMirror"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 : return "HorizontalMirrorRotate90"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 : return "HorizontalMirrorRotate180"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 : return "HorizontalMirrorRotate270"; - case SurfaceTransformFlagBitsKHR::eInherit : return "Inherit"; - default: return "invalid"; + case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask"; + case MemoryAllocateFlagBits::eDeviceAddress: return "DeviceAddress"; + case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR + enum class PointClippingBehavior { - eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, - eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, - eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR + eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY }; + using PointClippingBehaviorKHR = PointClippingBehavior; - VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value ) + VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value ) { switch ( value ) { - case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return "SplitInstanceBindRegions"; - case SwapchainCreateFlagBitsKHR::eProtected : return "Protected"; - case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat"; - default: return "invalid"; - } - } - - enum class SystemAllocationScope - { - eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, - eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, - eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, - eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, - eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - }; - - VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value ) - { - switch ( value ) - { - case SystemAllocationScope::eCommand : return "Command"; - case SystemAllocationScope::eObject : return "Object"; - case SystemAllocationScope::eCache : return "Cache"; - case SystemAllocationScope::eDevice : return "Device"; - case SystemAllocationScope::eInstance : return "Instance"; - default: return "invalid"; + case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes"; + case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8874,75 +10253,1115 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case TessellationDomainOrigin::eUpperLeft : return "UpperLeft"; - case TessellationDomainOrigin::eLowerLeft : return "LowerLeft"; - default: return "invalid"; + case TessellationDomainOrigin::eUpperLeft: return "UpperLeft"; + case TessellationDomainOrigin::eLowerLeft: return "LowerLeft"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class TimeDomainEXT + enum class SamplerYcbcrModelConversion { - eDevice = VK_TIME_DOMAIN_DEVICE_EXT, - eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, - eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, - eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT + eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 }; + using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion; - VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value ) + VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value ) { switch ( value ) { - case TimeDomainEXT::eDevice : return "Device"; - case TimeDomainEXT::eClockMonotonic : return "ClockMonotonic"; - case TimeDomainEXT::eClockMonotonicRaw : return "ClockMonotonicRaw"; - case TimeDomainEXT::eQueryPerformanceCounter : return "QueryPerformanceCounter"; - default: return "invalid"; + case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity"; + case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity"; + case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709"; + case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601"; + case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class ToolPurposeFlagBitsEXT : VkToolPurposeFlagsEXT + enum class SamplerYcbcrRange { - eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT, - eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT, - eTracing = VK_TOOL_PURPOSE_TRACING_BIT_EXT, - eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT, - eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT, - eDebugReporting = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT, - eDebugMarkers = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT + eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW }; + using SamplerYcbcrRangeKHR = SamplerYcbcrRange; - VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBitsEXT value ) + VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value ) { switch ( value ) { - case ToolPurposeFlagBitsEXT::eValidation : return "Validation"; - case ToolPurposeFlagBitsEXT::eProfiling : return "Profiling"; - case ToolPurposeFlagBitsEXT::eTracing : return "Tracing"; - case ToolPurposeFlagBitsEXT::eAdditionalFeatures : return "AdditionalFeatures"; - case ToolPurposeFlagBitsEXT::eModifyingFeatures : return "ModifyingFeatures"; - case ToolPurposeFlagBitsEXT::eDebugReporting : return "DebugReporting"; - case ToolPurposeFlagBitsEXT::eDebugMarkers : return "DebugMarkers"; - default: return "invalid"; + case SamplerYcbcrRange::eItuFull: return "ItuFull"; + case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class ValidationCacheHeaderVersionEXT + enum class ChromaLocation { - eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, + eMidpoint = VK_CHROMA_LOCATION_MIDPOINT }; + using ChromaLocationKHR = ChromaLocation; - VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value ) + VULKAN_HPP_INLINE std::string to_string( ChromaLocation value ) { switch ( value ) { - case ValidationCacheHeaderVersionEXT::eOne : return "One"; - default: return "invalid"; + case ChromaLocation::eCositedEven: return "CositedEven"; + case ChromaLocation::eMidpoint: return "Midpoint"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } + enum class DescriptorUpdateTemplateType + { + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR + }; + using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value ) + { + switch ( value ) + { + case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet"; + case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, + eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eZirconVmoFUCHSIA = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + }; + using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture"; + case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource"; + case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT"; + case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA: return "ZirconVmoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT + }; + using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT + }; + using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags + { + eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT + }; + using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalFenceFeatureFlagBits::eExportable: return "Exportable"; + case ExternalFenceFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FenceImportFlagBits : VkFenceImportFlags + { + eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT + }; + using FenceImportFlagBitsKHR = FenceImportFlagBits; + + VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value ) + { + switch ( value ) + { + case FenceImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags + { + eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT + }; + using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value ) + { + switch ( value ) + { + case SemaphoreImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eZirconEventFUCHSIA = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT + }; + using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence"; + case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA: return "ZirconEventFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags + { + eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT + }; + using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable"; + case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_2 === + + enum class DriverId + { + eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY, + eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE, + eMesaRadv = VK_DRIVER_ID_MESA_RADV, + eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY, + eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, + eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE, + eMoltenvk = VK_DRIVER_ID_MOLTENVK, + eCoreaviProprietary = VK_DRIVER_ID_COREAVI_PROPRIETARY, + eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY + }; + using DriverIdKHR = DriverId; + + VULKAN_HPP_INLINE std::string to_string( DriverId value ) + { + switch ( value ) + { + case DriverId::eAmdProprietary: return "AmdProprietary"; + case DriverId::eAmdOpenSource: return "AmdOpenSource"; + case DriverId::eMesaRadv: return "MesaRadv"; + case DriverId::eNvidiaProprietary: return "NvidiaProprietary"; + case DriverId::eIntelProprietaryWindows: return "IntelProprietaryWindows"; + case DriverId::eIntelOpenSourceMESA: return "IntelOpenSourceMESA"; + case DriverId::eImaginationProprietary: return "ImaginationProprietary"; + case DriverId::eQualcommProprietary: return "QualcommProprietary"; + case DriverId::eArmProprietary: return "ArmProprietary"; + case DriverId::eGoogleSwiftshader: return "GoogleSwiftshader"; + case DriverId::eGgpProprietary: return "GgpProprietary"; + case DriverId::eBroadcomProprietary: return "BroadcomProprietary"; + case DriverId::eMesaLlvmpipe: return "MesaLlvmpipe"; + case DriverId::eMoltenvk: return "Moltenvk"; + case DriverId::eCoreaviProprietary: return "CoreaviProprietary"; + case DriverId::eJuiceProprietary: return "JuiceProprietary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderFloatControlsIndependence + { + e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, + eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE + }; + using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence; + + VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value ) + { + switch ( value ) + { + case ShaderFloatControlsIndependence::e32BitOnly: return "32BitOnly"; + case ShaderFloatControlsIndependence::eAll: return "All"; + case ShaderFloatControlsIndependence::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags + { + eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, + ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT + }; + using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits; + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value ) + { + switch ( value ) + { + case DescriptorBindingFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorBindingFlagBits::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending"; + case DescriptorBindingFlagBits::ePartiallyBound: return "PartiallyBound"; + case DescriptorBindingFlagBits::eVariableDescriptorCount: return "VariableDescriptorCount"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ResolveModeFlagBits : VkResolveModeFlags + { + eNone = VK_RESOLVE_MODE_NONE, + eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, + eAverage = VK_RESOLVE_MODE_AVERAGE_BIT, + eMin = VK_RESOLVE_MODE_MIN_BIT, + eMax = VK_RESOLVE_MODE_MAX_BIT + }; + using ResolveModeFlagBitsKHR = ResolveModeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value ) + { + switch ( value ) + { + case ResolveModeFlagBits::eNone: return "None"; + case ResolveModeFlagBits::eSampleZero: return "SampleZero"; + case ResolveModeFlagBits::eAverage: return "Average"; + case ResolveModeFlagBits::eMin: return "Min"; + case ResolveModeFlagBits::eMax: return "Max"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerReductionMode + { + eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + eMin = VK_SAMPLER_REDUCTION_MODE_MIN, + eMax = VK_SAMPLER_REDUCTION_MODE_MAX + }; + using SamplerReductionModeEXT = SamplerReductionMode; + + VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value ) + { + switch ( value ) + { + case SamplerReductionMode::eWeightedAverage: return "WeightedAverage"; + case SamplerReductionMode::eMin: return "Min"; + case SamplerReductionMode::eMax: return "Max"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SemaphoreType + { + eBinary = VK_SEMAPHORE_TYPE_BINARY, + eTimeline = VK_SEMAPHORE_TYPE_TIMELINE + }; + using SemaphoreTypeKHR = SemaphoreType; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreType value ) + { + switch ( value ) + { + case SemaphoreType::eBinary: return "Binary"; + case SemaphoreType::eTimeline: return "Timeline"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags + { + eAny = VK_SEMAPHORE_WAIT_ANY_BIT + }; + using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value ) + { + switch ( value ) + { + case SemaphoreWaitFlagBits::eAny: return "Any"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_surface === + + enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR + { + eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, + eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, + eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, + eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, + eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, + eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, + eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, + eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, + eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value ) + { + switch ( value ) + { + case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity"; + case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90"; + case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180"; + case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270"; + case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PresentModeKHR + { + eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, + eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, + eFifo = VK_PRESENT_MODE_FIFO_KHR, + eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, + eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value ) + { + switch ( value ) + { + case PresentModeKHR::eImmediate: return "Immediate"; + case PresentModeKHR::eMailbox: return "Mailbox"; + case PresentModeKHR::eFifo: return "Fifo"; + case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed"; + case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh"; + case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ColorSpaceKHR + { + eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, + eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, + eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, + eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, + eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, + eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, + eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, + eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, + eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, + eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, + eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value ) + { + switch ( value ) + { + case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear"; + case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT"; + case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT"; + case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT"; + case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT"; + case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT"; + case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT"; + case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT"; + case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT"; + case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT"; + case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT"; + case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT"; + case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT"; + case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT"; + case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT"; + case ColorSpaceKHR::eDisplayNativeAMD: return "DisplayNativeAMD"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR + { + eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, + eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied"; + case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied"; + case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_swapchain === + + enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR + { + eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, + eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value ) + { + switch ( value ) + { + case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case SwapchainCreateFlagBitsKHR::eProtected: return "Protected"; + case SwapchainCreateFlagBitsKHR::eMutableFormat: return "MutableFormat"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR + { + eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, + eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, + eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, + eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value ) + { + switch ( value ) + { + case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local"; + case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote"; + case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum"; + case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_display === + + enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR + { + eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, + eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, + ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, + ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_debug_report === + + enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT + { + eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, + eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, + ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, + eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, + eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value ) + { + switch ( value ) + { + case DebugReportFlagBitsEXT::eInformation: return "Information"; + case DebugReportFlagBitsEXT::eWarning: return "Warning"; + case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning"; + case DebugReportFlagBitsEXT::eError: return "Error"; + case DebugReportFlagBitsEXT::eDebug: return "Debug"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DebugReportObjectTypeEXT + { + eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, + eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, + eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, + eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, + eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, + eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, + eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, + eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, + eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, + eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, + eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, + eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, + eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, + eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, + ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, + ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, + eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, + ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, + eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, + eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, + eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, + eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, + eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, + eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, + eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, + eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, + eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, + eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, + eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eCuModuleNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT, + eCuFunctionNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT, + eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, + eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, + eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, + eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, + eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value ) + { + switch ( value ) + { + case DebugReportObjectTypeEXT::eUnknown: return "Unknown"; + case DebugReportObjectTypeEXT::eInstance: return "Instance"; + case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice"; + case DebugReportObjectTypeEXT::eDevice: return "Device"; + case DebugReportObjectTypeEXT::eQueue: return "Queue"; + case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore"; + case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer"; + case DebugReportObjectTypeEXT::eFence: return "Fence"; + case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory"; + case DebugReportObjectTypeEXT::eBuffer: return "Buffer"; + case DebugReportObjectTypeEXT::eImage: return "Image"; + case DebugReportObjectTypeEXT::eEvent: return "Event"; + case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool"; + case DebugReportObjectTypeEXT::eBufferView: return "BufferView"; + case DebugReportObjectTypeEXT::eImageView: return "ImageView"; + case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule"; + case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache"; + case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout"; + case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass"; + case DebugReportObjectTypeEXT::ePipeline: return "Pipeline"; + case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout"; + case DebugReportObjectTypeEXT::eSampler: return "Sampler"; + case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool"; + case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet"; + case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer"; + case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool"; + case DebugReportObjectTypeEXT::eSurfaceKHR: return "SurfaceKHR"; + case DebugReportObjectTypeEXT::eSwapchainKHR: return "SwapchainKHR"; + case DebugReportObjectTypeEXT::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case DebugReportObjectTypeEXT::eDisplayKHR: return "DisplayKHR"; + case DebugReportObjectTypeEXT::eDisplayModeKHR: return "DisplayModeKHR"; + case DebugReportObjectTypeEXT::eValidationCacheEXT: return "ValidationCacheEXT"; + case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case DebugReportObjectTypeEXT::eCuModuleNVX: return "CuModuleNVX"; + case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX"; + case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_rasterization_order === + + enum class RasterizationOrderAMD + { + eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, + eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value ) + { + switch ( value ) + { + case RasterizationOrderAMD::eStrict: return "Strict"; + case RasterizationOrderAMD::eRelaxed: return "Relaxed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR + { + eInvalid = VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR, +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + eEncodeH264EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT, + eDecodeH264EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_EXT, + eDecodeH265EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_EXT +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodecOperationFlagBitsKHR::eInvalid: return "Invalid"; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + case VideoCodecOperationFlagBitsKHR::eEncodeH264EXT: return "EncodeH264EXT"; + case VideoCodecOperationFlagBitsKHR::eDecodeH264EXT: return "DecodeH264EXT"; + case VideoCodecOperationFlagBitsKHR::eDecodeH265EXT: return "DecodeH265EXT"; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR + { + eInvalid = VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR, + eMonochrome = VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR, + e420 = VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR, + e422 = VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR, + e444 = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagBitsKHR value ) + { + switch ( value ) + { + case VideoChromaSubsamplingFlagBitsKHR::eInvalid: return "Invalid"; + case VideoChromaSubsamplingFlagBitsKHR::eMonochrome: return "Monochrome"; + case VideoChromaSubsamplingFlagBitsKHR::e420: return "420"; + case VideoChromaSubsamplingFlagBitsKHR::e422: return "422"; + case VideoChromaSubsamplingFlagBitsKHR::e444: return "444"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoComponentBitDepthFlagBitsKHR : VkVideoComponentBitDepthFlagsKHR + { + eInvalid = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR, + e8 = VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR, + e10 = VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR, + e12 = VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagBitsKHR value ) + { + switch ( value ) + { + case VideoComponentBitDepthFlagBitsKHR::eInvalid: return "Invalid"; + case VideoComponentBitDepthFlagBitsKHR::e8: return "8"; + case VideoComponentBitDepthFlagBitsKHR::e10: return "10"; + case VideoComponentBitDepthFlagBitsKHR::e12: return "12"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoCapabilitiesFlagBitsKHR : VkVideoCapabilitiesFlagsKHR + { + eProtectedContent = VK_VIDEO_CAPABILITIES_PROTECTED_CONTENT_BIT_KHR, + eSeparateReferenceImages = VK_VIDEO_CAPABILITIES_SEPARATE_REFERENCE_IMAGES_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCapabilitiesFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCapabilitiesFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + case VideoCapabilitiesFlagBitsKHR::eSeparateReferenceImages: return "SeparateReferenceImages"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoSessionCreateFlagBitsKHR : VkVideoSessionCreateFlagsKHR + { + eDefault = VK_VIDEO_SESSION_CREATE_DEFAULT_KHR, + eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagBitsKHR value ) + { + switch ( value ) + { + case VideoSessionCreateFlagBitsKHR::eDefault: return "Default"; + case VideoSessionCreateFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR + { + eDefault = VK_VIDEO_CODING_CONTROL_DEFAULT_KHR, + eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodingControlFlagBitsKHR::eDefault: return "Default"; + case VideoCodingControlFlagBitsKHR::eReset: return "Reset"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoCodingQualityPresetFlagBitsKHR : VkVideoCodingQualityPresetFlagsKHR + { + eDefault = VK_VIDEO_CODING_QUALITY_PRESET_DEFAULT_BIT_KHR, + eNormal = VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR, + ePower = VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR, + eQuality = VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCodingQualityPresetFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodingQualityPresetFlagBitsKHR::eDefault: return "Default"; + case VideoCodingQualityPresetFlagBitsKHR::eNormal: return "Normal"; + case VideoCodingQualityPresetFlagBitsKHR::ePower: return "Power"; + case VideoCodingQualityPresetFlagBitsKHR::eQuality: return "Quality"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryResultStatusKHR + { + eError = VK_QUERY_RESULT_STATUS_ERROR_KHR, + eNotReady = VK_QUERY_RESULT_STATUS_NOT_READY_KHR, + eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( QueryResultStatusKHR value ) + { + switch ( value ) + { + case QueryResultStatusKHR::eError: return "Error"; + case QueryResultStatusKHR::eNotReady: return "NotReady"; + case QueryResultStatusKHR::eComplete: return "Complete"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR + { + eDefault = VK_VIDEO_DECODE_DEFAULT_KHR, + eReserved0 = VK_VIDEO_DECODE_RESERVED_0_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoDecodeFlagBitsKHR::eDefault: return "Default"; + case VideoDecodeFlagBitsKHR::eReserved0: return "Reserved0"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h264 === + + enum class VideoEncodeH264CapabilitiesFlagBitsEXT : VkVideoEncodeH264CapabilitiesFlagsEXT + { + eVkVideoEncodeH264CapabilityCabac = VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT, + eVkVideoEncodeH264CapabilityCavlc = VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT, + eVkVideoEncodeH264CapabilityWeightedBiPredImplicit = + VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT, + eVkVideoEncodeH264CapabilityTransform8X8 = VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT, + eVkVideoEncodeH264CapabilityChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT, + eVkVideoEncodeH264CapabilitySecondChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT, + eVkVideoEncodeH264CapabilityDeblockingFilterDisabled = + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT, + eVkVideoEncodeH264CapabilityDeblockingFilterEnabled = + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT, + eVkVideoEncodeH264CapabilityDeblockingFilterPartial = + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT, + eVkVideoEncodeH264CapabilityMultipleSlicePerFrame = + VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT, + eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize = + VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilitiesFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCabac: + return "VkVideoEncodeH264CapabilityCabac"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCavlc: + return "VkVideoEncodeH264CapabilityCavlc"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityWeightedBiPredImplicit: + return "VkVideoEncodeH264CapabilityWeightedBiPredImplicit"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityTransform8X8: + return "VkVideoEncodeH264CapabilityTransform8X8"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityChromaQpOffset: + return "VkVideoEncodeH264CapabilityChromaQpOffset"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilitySecondChromaQpOffset: + return "VkVideoEncodeH264CapabilitySecondChromaQpOffset"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterDisabled: + return "VkVideoEncodeH264CapabilityDeblockingFilterDisabled"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterEnabled: + return "VkVideoEncodeH264CapabilityDeblockingFilterEnabled"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterPartial: + return "VkVideoEncodeH264CapabilityDeblockingFilterPartial"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityMultipleSlicePerFrame: + return "VkVideoEncodeH264CapabilityMultipleSlicePerFrame"; + case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize: + return "VkVideoEncodeH264CapabilityEvenlyDistributedSliceSize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH264InputModeFlagBitsEXT : VkVideoEncodeH264InputModeFlagsEXT + { + eFrame = VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT, + eSlice = VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT, + eNonVcl = VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264InputModeFlagBitsEXT::eFrame: return "Frame"; + case VideoEncodeH264InputModeFlagBitsEXT::eSlice: return "Slice"; + case VideoEncodeH264InputModeFlagBitsEXT::eNonVcl: return "NonVcl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH264OutputModeFlagBitsEXT : VkVideoEncodeH264OutputModeFlagsEXT + { + eFrame = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT, + eSlice = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT, + eNonVcl = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264OutputModeFlagBitsEXT::eFrame: return "Frame"; + case VideoEncodeH264OutputModeFlagBitsEXT::eSlice: return "Slice"; + case VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl: return "NonVcl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH264CreateFlagBitsEXT : VkVideoEncodeH264CreateFlagsEXT + { + eDefault = VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT, + eReserved0 = VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264CreateFlagBitsEXT::eDefault: return "Default"; + case VideoEncodeH264CreateFlagBitsEXT::eReserved0: return "Reserved0"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h264 === + + enum class VideoDecodeH264FieldLayoutFlagBitsEXT : VkVideoDecodeH264FieldLayoutFlagsEXT + { + eVkVideoDecodeH264ProgressivePicturesOnly = VK_VIDEO_DECODE_H264_PROGRESSIVE_PICTURES_ONLY_EXT, + eLineInterlacedPlane = VK_VIDEO_DECODE_H264_FIELD_LAYOUT_LINE_INTERLACED_PLANE_BIT_EXT, + eSeparateInterlacedPlane = VK_VIDEO_DECODE_H264_FIELD_LAYOUT_SEPARATE_INTERLACED_PLANE_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264FieldLayoutFlagBitsEXT value ) + { + switch ( value ) + { + case VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264ProgressivePicturesOnly: + return "VkVideoDecodeH264ProgressivePicturesOnly"; + case VideoDecodeH264FieldLayoutFlagBitsEXT::eLineInterlacedPlane: return "LineInterlacedPlane"; + case VideoDecodeH264FieldLayoutFlagBitsEXT::eSeparateInterlacedPlane: return "SeparateInterlacedPlane"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_shader_info === + + enum class ShaderInfoTypeAMD + { + eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, + eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value ) + { + switch ( value ) + { + case ShaderInfoTypeAMD::eStatistics: return "Statistics"; + case ShaderInfoTypeAMD::eBinary: return "Binary"; + case ShaderInfoTypeAMD::eDisassembly: return "Disassembly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_external_memory_capabilities === + + enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV + { + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, + eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, + eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_validation_flags === + enum class ValidationCheckEXT { - eAll = VK_VALIDATION_CHECK_ALL_EXT, + eAll = VK_VALIDATION_CHECK_ALL_EXT, eShaders = VK_VALIDATION_CHECK_SHADERS_EXT }; @@ -8950,96 +11369,94 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ValidationCheckEXT::eAll : return "All"; - case ValidationCheckEXT::eShaders : return "Shaders"; - default: return "invalid"; + case ValidationCheckEXT::eAll: return "All"; + case ValidationCheckEXT::eShaders: return "Shaders"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class ValidationFeatureDisableEXT + //=== VK_EXT_conditional_rendering === + + enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT { - eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, - eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, - eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, - eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, - eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, - eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, - eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT + eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT }; - VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value ) + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value ) { switch ( value ) { - case ValidationFeatureDisableEXT::eAll : return "All"; - case ValidationFeatureDisableEXT::eShaders : return "Shaders"; - case ValidationFeatureDisableEXT::eThreadSafety : return "ThreadSafety"; - case ValidationFeatureDisableEXT::eApiParameters : return "ApiParameters"; - case ValidationFeatureDisableEXT::eObjectLifetimes : return "ObjectLifetimes"; - case ValidationFeatureDisableEXT::eCoreChecks : return "CoreChecks"; - case ValidationFeatureDisableEXT::eUniqueHandles : return "UniqueHandles"; - default: return "invalid"; + case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class ValidationFeatureEnableEXT + //=== VK_EXT_display_surface_counter === + + enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT { - eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, - eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, - eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, - eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT + eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT }; - VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value ) + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value ) { switch ( value ) { - case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted"; - case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot"; - case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices"; - case ValidationFeatureEnableEXT::eDebugPrintf : return "DebugPrintf"; - default: return "invalid"; + case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class VendorId + //=== VK_EXT_display_control === + + enum class DisplayPowerStateEXT { - eVIV = VK_VENDOR_ID_VIV, - eVSI = VK_VENDOR_ID_VSI, - eKazan = VK_VENDOR_ID_KAZAN, - eCodeplay = VK_VENDOR_ID_CODEPLAY, - eMESA = VK_VENDOR_ID_MESA + eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, + eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, + eOn = VK_DISPLAY_POWER_STATE_ON_EXT }; - VULKAN_HPP_INLINE std::string to_string( VendorId value ) + VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value ) { switch ( value ) { - case VendorId::eVIV : return "VIV"; - case VendorId::eVSI : return "VSI"; - case VendorId::eKazan : return "Kazan"; - case VendorId::eCodeplay : return "Codeplay"; - case VendorId::eMESA : return "MESA"; - default: return "invalid"; + case DisplayPowerStateEXT::eOff: return "Off"; + case DisplayPowerStateEXT::eSuspend: return "Suspend"; + case DisplayPowerStateEXT::eOn: return "On"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - enum class VertexInputRate + enum class DeviceEventTypeEXT { - eVertex = VK_VERTEX_INPUT_RATE_VERTEX, - eInstance = VK_VERTEX_INPUT_RATE_INSTANCE + eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT }; - VULKAN_HPP_INLINE std::string to_string( VertexInputRate value ) + VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value ) { switch ( value ) { - case VertexInputRate::eVertex : return "Vertex"; - case VertexInputRate::eInstance : return "Instance"; - default: return "invalid"; + case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } + enum class DisplayEventTypeEXT + { + eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value ) + { + switch ( value ) + { + case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_viewport_swizzle === + enum class ViewportCoordinateSwizzleNV { ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, @@ -9056,19 +11473,1451 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case ViewportCoordinateSwizzleNV::ePositiveX : return "PositiveX"; - case ViewportCoordinateSwizzleNV::eNegativeX : return "NegativeX"; - case ViewportCoordinateSwizzleNV::ePositiveY : return "PositiveY"; - case ViewportCoordinateSwizzleNV::eNegativeY : return "NegativeY"; - case ViewportCoordinateSwizzleNV::ePositiveZ : return "PositiveZ"; - case ViewportCoordinateSwizzleNV::eNegativeZ : return "NegativeZ"; - case ViewportCoordinateSwizzleNV::ePositiveW : return "PositiveW"; - case ViewportCoordinateSwizzleNV::eNegativeW : return "NegativeW"; - default: return "invalid"; + case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX"; + case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX"; + case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY"; + case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY"; + case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ"; + case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ"; + case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW"; + case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - template + //=== VK_EXT_discard_rectangles === + + enum class DiscardRectangleModeEXT + { + eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value ) + { + switch ( value ) + { + case DiscardRectangleModeEXT::eInclusive: return "Inclusive"; + case DiscardRectangleModeEXT::eExclusive: return "Exclusive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_conservative_rasterization === + + enum class ConservativeRasterizationModeEXT + { + eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, + eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value ) + { + switch ( value ) + { + case ConservativeRasterizationModeEXT::eDisabled: return "Disabled"; + case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate"; + case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_performance_query === + + enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR + { + ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value ) + { + switch ( value ) + { + case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting: return "PerformanceImpacting"; + case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted: return "ConcurrentlyImpacted"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceCounterScopeKHR + { + eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, + eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR, + eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value ) + { + switch ( value ) + { + case PerformanceCounterScopeKHR::eCommandBuffer: return "CommandBuffer"; + case PerformanceCounterScopeKHR::eRenderPass: return "RenderPass"; + case PerformanceCounterScopeKHR::eCommand: return "Command"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceCounterStorageKHR + { + eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR, + eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR, + eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR, + eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR, + eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR, + eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value ) + { + switch ( value ) + { + case PerformanceCounterStorageKHR::eInt32: return "Int32"; + case PerformanceCounterStorageKHR::eInt64: return "Int64"; + case PerformanceCounterStorageKHR::eUint32: return "Uint32"; + case PerformanceCounterStorageKHR::eUint64: return "Uint64"; + case PerformanceCounterStorageKHR::eFloat32: return "Float32"; + case PerformanceCounterStorageKHR::eFloat64: return "Float64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceCounterUnitKHR + { + eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR, + ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR, + eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR, + eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR, + eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR, + eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR, + eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR, + eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR, + eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR, + eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR, + eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value ) + { + switch ( value ) + { + case PerformanceCounterUnitKHR::eGeneric: return "Generic"; + case PerformanceCounterUnitKHR::ePercentage: return "Percentage"; + case PerformanceCounterUnitKHR::eNanoseconds: return "Nanoseconds"; + case PerformanceCounterUnitKHR::eBytes: return "Bytes"; + case PerformanceCounterUnitKHR::eBytesPerSecond: return "BytesPerSecond"; + case PerformanceCounterUnitKHR::eKelvin: return "Kelvin"; + case PerformanceCounterUnitKHR::eWatts: return "Watts"; + case PerformanceCounterUnitKHR::eVolts: return "Volts"; + case PerformanceCounterUnitKHR::eAmps: return "Amps"; + case PerformanceCounterUnitKHR::eHertz: return "Hertz"; + case PerformanceCounterUnitKHR::eCycles: return "Cycles"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR ) + { + return "(void)"; + } + + //=== VK_EXT_debug_utils === + + enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT + { + eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, + eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose"; + case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info"; + case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning"; + case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT + { + eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General"; + case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation"; + case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_blend_operation_advanced === + + enum class BlendOverlapEXT + { + eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, + eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value ) + { + switch ( value ) + { + case BlendOverlapEXT::eUncorrelated: return "Uncorrelated"; + case BlendOverlapEXT::eDisjoint: return "Disjoint"; + case BlendOverlapEXT::eConjoint: return "Conjoint"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_acceleration_structure === + + enum class AccelerationStructureTypeKHR + { + eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + eGeneric = VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR + }; + using AccelerationStructureTypeNV = AccelerationStructureTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureTypeKHR::eTopLevel: return "TopLevel"; + case AccelerationStructureTypeKHR::eBottomLevel: return "BottomLevel"; + case AccelerationStructureTypeKHR::eGeneric: return "Generic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccelerationStructureBuildTypeKHR + { + eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR, + eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR, + eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureBuildTypeKHR::eHost: return "Host"; + case AccelerationStructureBuildTypeKHR::eDevice: return "Device"; + case AccelerationStructureBuildTypeKHR::eHostOrDevice: return "HostOrDevice"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR + { + eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR, + eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR + }; + using GeometryFlagBitsNV = GeometryFlagBitsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value ) + { + switch ( value ) + { + case GeometryFlagBitsKHR::eOpaque: return "Opaque"; + case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR + { + eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV + }; + using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value ) + { + switch ( value ) + { + case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable: return "TriangleFacingCullDisable"; + case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise: return "TriangleFrontCounterclockwise"; + case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque"; + case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR + { + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR + }; + using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value ) + { + switch ( value ) + { + case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate: return "AllowUpdate"; + case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction: return "AllowCompaction"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace: return "PreferFastTrace"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild"; + case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CopyAccelerationStructureModeKHR + { + eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR, + eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR + }; + using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR; + + VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value ) + { + switch ( value ) + { + case CopyAccelerationStructureModeKHR::eClone: return "Clone"; + case CopyAccelerationStructureModeKHR::eCompact: return "Compact"; + case CopyAccelerationStructureModeKHR::eSerialize: return "Serialize"; + case CopyAccelerationStructureModeKHR::eDeserialize: return "Deserialize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class GeometryTypeKHR + { + eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, + eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR + }; + using GeometryTypeNV = GeometryTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value ) + { + switch ( value ) + { + case GeometryTypeKHR::eTriangles: return "Triangles"; + case GeometryTypeKHR::eAabbs: return "Aabbs"; + case GeometryTypeKHR::eInstances: return "Instances"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccelerationStructureCompatibilityKHR + { + eCompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR, + eIncompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCompatibilityKHR value ) + { + switch ( value ) + { + case AccelerationStructureCompatibilityKHR::eCompatible: return "Compatible"; + case AccelerationStructureCompatibilityKHR::eIncompatible: return "Incompatible"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR + { + eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagBitsKHR value ) + { + switch ( value ) + { + case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BuildAccelerationStructureModeKHR + { + eBuild = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR, + eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureModeKHR value ) + { + switch ( value ) + { + case BuildAccelerationStructureModeKHR::eBuild: return "Build"; + case BuildAccelerationStructureModeKHR::eUpdate: return "Update"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_framebuffer_mixed_samples === + + enum class CoverageModulationModeNV + { + eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, + eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, + eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, + eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value ) + { + switch ( value ) + { + case CoverageModulationModeNV::eNone: return "None"; + case CoverageModulationModeNV::eRgb: return "Rgb"; + case CoverageModulationModeNV::eAlpha: return "Alpha"; + case CoverageModulationModeNV::eRgba: return "Rgba"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_validation_cache === + + enum class ValidationCacheHeaderVersionEXT + { + eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value ) + { + switch ( value ) + { + case ValidationCacheHeaderVersionEXT::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_shading_rate_image === + + enum class ShadingRatePaletteEntryNV + { + eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, + e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, + e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, + e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value ) + { + switch ( value ) + { + case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations"; + case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CoarseSampleOrderTypeNV + { + eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, + eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, + ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, + eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value ) + { + switch ( value ) + { + case CoarseSampleOrderTypeNV::eDefault: return "Default"; + case CoarseSampleOrderTypeNV::eCustom: return "Custom"; + case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor"; + case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing === + + enum class AccelerationStructureMemoryRequirementsTypeNV + { + eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, + eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, + eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value ) + { + switch ( value ) + { + case AccelerationStructureMemoryRequirementsTypeNV::eObject: return "Object"; + case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch: return "BuildScratch"; + case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch: return "UpdateScratch"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_global_priority === + + enum class QueueGlobalPriorityEXT + { + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, + eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityEXT value ) + { + switch ( value ) + { + case QueueGlobalPriorityEXT::eLow: return "Low"; + case QueueGlobalPriorityEXT::eMedium: return "Medium"; + case QueueGlobalPriorityEXT::eHigh: return "High"; + case QueueGlobalPriorityEXT::eRealtime: return "Realtime"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_pipeline_compiler_control === + + enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD ) + { + return "(void)"; + } + + //=== VK_EXT_calibrated_timestamps === + + enum class TimeDomainEXT + { + eDevice = VK_TIME_DOMAIN_DEVICE_EXT, + eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, + eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, + eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value ) + { + switch ( value ) + { + case TimeDomainEXT::eDevice: return "Device"; + case TimeDomainEXT::eClockMonotonic: return "ClockMonotonic"; + case TimeDomainEXT::eClockMonotonicRaw: return "ClockMonotonicRaw"; + case TimeDomainEXT::eQueryPerformanceCounter: return "QueryPerformanceCounter"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_memory_overallocation_behavior === + + enum class MemoryOverallocationBehaviorAMD + { + eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, + eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, + eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value ) + { + switch ( value ) + { + case MemoryOverallocationBehaviorAMD::eDefault: return "Default"; + case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed"; + case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_pipeline_creation_feedback === + + enum class PipelineCreationFeedbackFlagBitsEXT : VkPipelineCreationFeedbackFlagsEXT + { + eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT, + eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT, + eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value ) + { + switch ( value ) + { + case PipelineCreationFeedbackFlagBitsEXT::eValid: return "Valid"; + case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit: return "ApplicationPipelineCacheHit"; + case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration: return "BasePipelineAcceleration"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_INTEL_performance_query === + + enum class PerformanceConfigurationTypeINTEL + { + eCommandQueueMetricsDiscoveryActivated = + VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value ) + { + switch ( value ) + { + case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated: + return "CommandQueueMetricsDiscoveryActivated"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryPoolSamplingModeINTEL + { + eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value ) + { + switch ( value ) + { + case QueryPoolSamplingModeINTEL::eManual: return "Manual"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceOverrideTypeINTEL + { + eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, + eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value ) + { + switch ( value ) + { + case PerformanceOverrideTypeINTEL::eNullHardware: return "NullHardware"; + case PerformanceOverrideTypeINTEL::eFlushGpuCaches: return "FlushGpuCaches"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceParameterTypeINTEL + { + eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, + eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value ) + { + switch ( value ) + { + case PerformanceParameterTypeINTEL::eHwCountersSupported: return "HwCountersSupported"; + case PerformanceParameterTypeINTEL::eStreamMarkerValidBits: return "StreamMarkerValidBits"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceValueTypeINTEL + { + eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, + eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, + eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, + eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, + eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value ) + { + switch ( value ) + { + case PerformanceValueTypeINTEL::eUint32: return "Uint32"; + case PerformanceValueTypeINTEL::eUint64: return "Uint64"; + case PerformanceValueTypeINTEL::eFloat: return "Float"; + case PerformanceValueTypeINTEL::eBool: return "Bool"; + case PerformanceValueTypeINTEL::eString: return "String"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_fragment_shading_rate === + + enum class FragmentShadingRateCombinerOpKHR + { + eKeep = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR, + eReplace = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR, + eMin = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR, + eMax = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR, + eMul = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateCombinerOpKHR value ) + { + switch ( value ) + { + case FragmentShadingRateCombinerOpKHR::eKeep: return "Keep"; + case FragmentShadingRateCombinerOpKHR::eReplace: return "Replace"; + case FragmentShadingRateCombinerOpKHR::eMin: return "Min"; + case FragmentShadingRateCombinerOpKHR::eMax: return "Max"; + case FragmentShadingRateCombinerOpKHR::eMul: return "Mul"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_shader_core_properties2 === + + enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD + { + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD ) + { + return "(void)"; + } + + //=== VK_EXT_tooling_info === + + enum class ToolPurposeFlagBitsEXT : VkToolPurposeFlagsEXT + { + eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT, + eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT, + eTracing = VK_TOOL_PURPOSE_TRACING_BIT_EXT, + eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT, + eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT, + eDebugReporting = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT, + eDebugMarkers = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBitsEXT value ) + { + switch ( value ) + { + case ToolPurposeFlagBitsEXT::eValidation: return "Validation"; + case ToolPurposeFlagBitsEXT::eProfiling: return "Profiling"; + case ToolPurposeFlagBitsEXT::eTracing: return "Tracing"; + case ToolPurposeFlagBitsEXT::eAdditionalFeatures: return "AdditionalFeatures"; + case ToolPurposeFlagBitsEXT::eModifyingFeatures: return "ModifyingFeatures"; + case ToolPurposeFlagBitsEXT::eDebugReporting: return "DebugReporting"; + case ToolPurposeFlagBitsEXT::eDebugMarkers: return "DebugMarkers"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_validation_features === + + enum class ValidationFeatureEnableEXT + { + eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, + eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, + eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, + eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT, + eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value ) + { + switch ( value ) + { + case ValidationFeatureEnableEXT::eGpuAssisted: return "GpuAssisted"; + case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot: return "GpuAssistedReserveBindingSlot"; + case ValidationFeatureEnableEXT::eBestPractices: return "BestPractices"; + case ValidationFeatureEnableEXT::eDebugPrintf: return "DebugPrintf"; + case ValidationFeatureEnableEXT::eSynchronizationValidation: return "SynchronizationValidation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ValidationFeatureDisableEXT + { + eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, + eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, + eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, + eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, + eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, + eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, + eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT, + eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value ) + { + switch ( value ) + { + case ValidationFeatureDisableEXT::eAll: return "All"; + case ValidationFeatureDisableEXT::eShaders: return "Shaders"; + case ValidationFeatureDisableEXT::eThreadSafety: return "ThreadSafety"; + case ValidationFeatureDisableEXT::eApiParameters: return "ApiParameters"; + case ValidationFeatureDisableEXT::eObjectLifetimes: return "ObjectLifetimes"; + case ValidationFeatureDisableEXT::eCoreChecks: return "CoreChecks"; + case ValidationFeatureDisableEXT::eUniqueHandles: return "UniqueHandles"; + case ValidationFeatureDisableEXT::eShaderValidationCache: return "ShaderValidationCache"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_cooperative_matrix === + + enum class ScopeNV + { + eDevice = VK_SCOPE_DEVICE_NV, + eWorkgroup = VK_SCOPE_WORKGROUP_NV, + eSubgroup = VK_SCOPE_SUBGROUP_NV, + eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ScopeNV value ) + { + switch ( value ) + { + case ScopeNV::eDevice: return "Device"; + case ScopeNV::eWorkgroup: return "Workgroup"; + case ScopeNV::eSubgroup: return "Subgroup"; + case ScopeNV::eQueueFamily: return "QueueFamily"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ComponentTypeNV + { + eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV, + eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV, + eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV, + eSint8 = VK_COMPONENT_TYPE_SINT8_NV, + eSint16 = VK_COMPONENT_TYPE_SINT16_NV, + eSint32 = VK_COMPONENT_TYPE_SINT32_NV, + eSint64 = VK_COMPONENT_TYPE_SINT64_NV, + eUint8 = VK_COMPONENT_TYPE_UINT8_NV, + eUint16 = VK_COMPONENT_TYPE_UINT16_NV, + eUint32 = VK_COMPONENT_TYPE_UINT32_NV, + eUint64 = VK_COMPONENT_TYPE_UINT64_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value ) + { + switch ( value ) + { + case ComponentTypeNV::eFloat16: return "Float16"; + case ComponentTypeNV::eFloat32: return "Float32"; + case ComponentTypeNV::eFloat64: return "Float64"; + case ComponentTypeNV::eSint8: return "Sint8"; + case ComponentTypeNV::eSint16: return "Sint16"; + case ComponentTypeNV::eSint32: return "Sint32"; + case ComponentTypeNV::eSint64: return "Sint64"; + case ComponentTypeNV::eUint8: return "Uint8"; + case ComponentTypeNV::eUint16: return "Uint16"; + case ComponentTypeNV::eUint32: return "Uint32"; + case ComponentTypeNV::eUint64: return "Uint64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_coverage_reduction_mode === + + enum class CoverageReductionModeNV + { + eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, + eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value ) + { + switch ( value ) + { + case CoverageReductionModeNV::eMerge: return "Merge"; + case CoverageReductionModeNV::eTruncate: return "Truncate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_provoking_vertex === + + enum class ProvokingVertexModeEXT + { + eFirstVertex = VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT, + eLastVertex = VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ProvokingVertexModeEXT value ) + { + switch ( value ) + { + case ProvokingVertexModeEXT::eFirstVertex: return "FirstVertex"; + case ProvokingVertexModeEXT::eLastVertex: return "LastVertex"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + enum class FullScreenExclusiveEXT + { + eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, + eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, + eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, + eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value ) + { + switch ( value ) + { + case FullScreenExclusiveEXT::eDefault: return "Default"; + case FullScreenExclusiveEXT::eAllowed: return "Allowed"; + case FullScreenExclusiveEXT::eDisallowed: return "Disallowed"; + case FullScreenExclusiveEXT::eApplicationControlled: return "ApplicationControlled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_line_rasterization === + + enum class LineRasterizationModeEXT + { + eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, + eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, + eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, + eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value ) + { + switch ( value ) + { + case LineRasterizationModeEXT::eDefault: return "Default"; + case LineRasterizationModeEXT::eRectangular: return "Rectangular"; + case LineRasterizationModeEXT::eBresenham: return "Bresenham"; + case LineRasterizationModeEXT::eRectangularSmooth: return "RectangularSmooth"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_pipeline_executable_properties === + + enum class PipelineExecutableStatisticFormatKHR + { + eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, + eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, + eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, + eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value ) + { + switch ( value ) + { + case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32"; + case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64"; + case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64"; + case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_device_generated_commands === + + enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV + { + eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value ) + { + switch ( value ) + { + case IndirectStateFlagBitsNV::eFlagFrontface: return "FlagFrontface"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class IndirectCommandsTokenTypeNV + { + eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, + eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, + eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value ) + { + switch ( value ) + { + case IndirectCommandsTokenTypeNV::eShaderGroup: return "ShaderGroup"; + case IndirectCommandsTokenTypeNV::eStateFlags: return "StateFlags"; + case IndirectCommandsTokenTypeNV::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeNV::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeNV::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeNV::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeNV::eDraw: return "Draw"; + case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV + { + eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV, + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value ) + { + switch ( value ) + { + case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess: return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences: return "IndexedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences: return "UnorderedSequences"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_device_memory_report === + + enum class DeviceMemoryReportEventTypeEXT + { + eAllocate = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT, + eFree = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT, + eImport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT, + eUnimport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT, + eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value ) + { + switch ( value ) + { + case DeviceMemoryReportEventTypeEXT::eAllocate: return "Allocate"; + case DeviceMemoryReportEventTypeEXT::eFree: return "Free"; + case DeviceMemoryReportEventTypeEXT::eImport: return "Import"; + case DeviceMemoryReportEventTypeEXT::eUnimport: return "Unimport"; + case DeviceMemoryReportEventTypeEXT::eAllocationFailed: return "AllocationFailed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_private_data === + + enum class PrivateDataSlotCreateFlagBitsEXT : VkPrivateDataSlotCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBitsEXT ) + { + return "(void)"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_DEFAULT_KHR, + eReserved0 = VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeFlagBitsKHR::eReserved0: return "Reserved0"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR, + eReset = VK_VIDEO_ENCODE_RATE_CONTROL_RESET_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeRateControlFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeRateControlFlagBitsKHR::eReset: return "Reset"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeRateControlModeFlagBitsKHR : VkVideoEncodeRateControlModeFlagsKHR + { + eNone = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR, + eCbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR, + eVbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeRateControlModeFlagBitsKHR::eNone: return "None"; + case VideoEncodeRateControlModeFlagBitsKHR::eCbr: return "Cbr"; + case VideoEncodeRateControlModeFlagBitsKHR::eVbr: return "Vbr"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_device_diagnostics_config === + + enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV + { + eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV, + eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV, + eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value ) + { + switch ( value ) + { + case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo: return "EnableShaderDebugInfo"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking: return "EnableResourceTracking"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints: return "EnableAutomaticCheckpoints"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_synchronization2 === + + enum class PipelineStageFlagBits2KHR : VkPipelineStageFlags2KHR + { + eNone = VK_PIPELINE_STAGE_2_NONE_KHR, + eTopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, + eDrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, + eVertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, + eVertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR, + eTessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR, + eGeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR, + eFragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR, + eEarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, + eLateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, + eColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, + eComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR, + eAllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, + eBottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR, + eHost = VK_PIPELINE_STAGE_2_HOST_BIT_KHR, + eAllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, + eAllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, + eCopy = VK_PIPELINE_STAGE_2_COPY_BIT_KHR, + eResolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, + eBlit = VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, + eClear = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR, + eIndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR, + eVertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR, + ePreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecode = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR, + eVideoEncode = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackEXT = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, + eCommandPreprocessNV = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV, + eFragmentShadingRateAttachment = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eAccelerationStructureBuild = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eRayTracingShader = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV, + eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV, + eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, + eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits2KHR value ) + { + switch ( value ) + { + case PipelineStageFlagBits2KHR::eNone: return "None"; + case PipelineStageFlagBits2KHR::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits2KHR::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits2KHR::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits2KHR::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits2KHR::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits2KHR::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits2KHR::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits2KHR::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits2KHR::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits2KHR::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits2KHR::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits2KHR::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits2KHR::eAllTransfer: return "AllTransfer"; + case PipelineStageFlagBits2KHR::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits2KHR::eHost: return "Host"; + case PipelineStageFlagBits2KHR::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits2KHR::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits2KHR::eCopy: return "Copy"; + case PipelineStageFlagBits2KHR::eResolve: return "Resolve"; + case PipelineStageFlagBits2KHR::eBlit: return "Blit"; + case PipelineStageFlagBits2KHR::eClear: return "Clear"; + case PipelineStageFlagBits2KHR::eIndexInput: return "IndexInput"; + case PipelineStageFlagBits2KHR::eVertexAttributeInput: return "VertexAttributeInput"; + case PipelineStageFlagBits2KHR::ePreRasterizationShaders: return "PreRasterizationShaders"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineStageFlagBits2KHR::eVideoDecode: return "VideoDecode"; + case PipelineStageFlagBits2KHR::eVideoEncode: return "VideoEncode"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineStageFlagBits2KHR::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits2KHR::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits2KHR::eCommandPreprocessNV: return "CommandPreprocessNV"; + case PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment"; + case PipelineStageFlagBits2KHR::eAccelerationStructureBuild: return "AccelerationStructureBuild"; + case PipelineStageFlagBits2KHR::eRayTracingShader: return "RayTracingShader"; + case PipelineStageFlagBits2KHR::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits2KHR::eTaskShaderNV: return "TaskShaderNV"; + case PipelineStageFlagBits2KHR::eMeshShaderNV: return "MeshShaderNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccessFlagBits2KHR : VkAccessFlags2KHR + { + eNone = VK_ACCESS_2_NONE_KHR, + eIndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, + eIndexRead = VK_ACCESS_2_INDEX_READ_BIT_KHR, + eVertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR, + eUniformRead = VK_ACCESS_2_UNIFORM_READ_BIT_KHR, + eInputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR, + eShaderRead = VK_ACCESS_2_SHADER_READ_BIT_KHR, + eShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT_KHR, + eColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR, + eColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, + eDepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR, + eDepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR, + eTransferRead = VK_ACCESS_2_TRANSFER_READ_BIT_KHR, + eTransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, + eHostRead = VK_ACCESS_2_HOST_READ_BIT_KHR, + eHostWrite = VK_ACCESS_2_HOST_WRITE_BIT_KHR, + eMemoryRead = VK_ACCESS_2_MEMORY_READ_BIT_KHR, + eMemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT_KHR, + eShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR, + eShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR, + eShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeRead = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, + eVideoDecodeWrite = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, + eVideoEncodeRead = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, + eVideoEncodeWrite = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, + eCommandPreprocessReadNV = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNV = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, + eFragmentShadingRateAttachmentRead = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eAccelerationStructureRead = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWrite = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eFragmentDensityMapReadEXT = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2KHR value ) + { + switch ( value ) + { + case AccessFlagBits2KHR::eNone: return "None"; + case AccessFlagBits2KHR::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits2KHR::eIndexRead: return "IndexRead"; + case AccessFlagBits2KHR::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits2KHR::eUniformRead: return "UniformRead"; + case AccessFlagBits2KHR::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits2KHR::eShaderRead: return "ShaderRead"; + case AccessFlagBits2KHR::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits2KHR::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits2KHR::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits2KHR::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits2KHR::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits2KHR::eTransferRead: return "TransferRead"; + case AccessFlagBits2KHR::eTransferWrite: return "TransferWrite"; + case AccessFlagBits2KHR::eHostRead: return "HostRead"; + case AccessFlagBits2KHR::eHostWrite: return "HostWrite"; + case AccessFlagBits2KHR::eMemoryRead: return "MemoryRead"; + case AccessFlagBits2KHR::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits2KHR::eShaderSampledRead: return "ShaderSampledRead"; + case AccessFlagBits2KHR::eShaderStorageRead: return "ShaderStorageRead"; + case AccessFlagBits2KHR::eShaderStorageWrite: return "ShaderStorageWrite"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case AccessFlagBits2KHR::eVideoDecodeRead: return "VideoDecodeRead"; + case AccessFlagBits2KHR::eVideoDecodeWrite: return "VideoDecodeWrite"; + case AccessFlagBits2KHR::eVideoEncodeRead: return "VideoEncodeRead"; + case AccessFlagBits2KHR::eVideoEncodeWrite: return "VideoEncodeWrite"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case AccessFlagBits2KHR::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits2KHR::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits2KHR::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits2KHR::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits2KHR::eCommandPreprocessReadNV: return "CommandPreprocessReadNV"; + case AccessFlagBits2KHR::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV"; + case AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead: return "FragmentShadingRateAttachmentRead"; + case AccessFlagBits2KHR::eAccelerationStructureRead: return "AccelerationStructureRead"; + case AccessFlagBits2KHR::eAccelerationStructureWrite: return "AccelerationStructureWrite"; + case AccessFlagBits2KHR::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits2KHR::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SubmitFlagBitsKHR : VkSubmitFlagsKHR + { + eProtected = VK_SUBMIT_PROTECTED_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SubmitFlagBitsKHR value ) + { + switch ( value ) + { + case SubmitFlagBitsKHR::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_fragment_shading_rate_enums === + + enum class FragmentShadingRateNV + { + e1InvocationPerPixel = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer1X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X1Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV, + e2InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV, + e16InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV, + eNoInvocations = VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateNV value ) + { + switch ( value ) + { + case FragmentShadingRateNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case FragmentShadingRateNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case FragmentShadingRateNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + case FragmentShadingRateNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case FragmentShadingRateNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case FragmentShadingRateNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case FragmentShadingRateNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case FragmentShadingRateNV::eNoInvocations: return "NoInvocations"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FragmentShadingRateTypeNV + { + eFragmentSize = VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV, + eEnums = VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateTypeNV value ) + { + switch ( value ) + { + case FragmentShadingRateTypeNV::eFragmentSize: return "FragmentSize"; + case FragmentShadingRateTypeNV::eEnums: return "Enums"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_ray_tracing_pipeline === + + enum class RayTracingShaderGroupTypeKHR + { + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR + }; + using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value ) + { + switch ( value ) + { + case RayTracingShaderGroupTypeKHR::eGeneral: return "General"; + case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderGroupShaderKHR + { + eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR, + eClosestHit = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR, + eAnyHit = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR, + eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value ) + { + switch ( value ) + { + case ShaderGroupShaderKHR::eGeneral: return "General"; + case ShaderGroupShaderKHR::eClosestHit: return "ClosestHit"; + case ShaderGroupShaderKHR::eAnyHit: return "AnyHit"; + case ShaderGroupShaderKHR::eIntersection: return "Intersection"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + template + struct cpp_type + {}; + + template struct IndexTypeValue {}; @@ -9108,164 +12957,1199 @@ namespace VULKAN_HPP_NAMESPACE using Type = uint8_t; }; + //================ + //=== BITMASKs === + //================ - using AccessFlags = Flags; + //=== VK_VERSION_1_0 === - template <> struct FlagTraits + using FormatFeatureFlags = Flags; + + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eAccelerationStructureReadKHR) | VkFlags(AccessFlagBits::eAccelerationStructureWriteKHR) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) | VkFlags(AccessFlagBits::eCommandPreprocessReadNV) | VkFlags(AccessFlagBits::eCommandPreprocessWriteNV) + allFlags = + VkFlags( FormatFeatureFlagBits::eSampledImage ) | VkFlags( FormatFeatureFlagBits::eStorageImage ) | + VkFlags( FormatFeatureFlagBits::eStorageImageAtomic ) | VkFlags( FormatFeatureFlagBits::eUniformTexelBuffer ) | + VkFlags( FormatFeatureFlagBits::eStorageTexelBuffer ) | + VkFlags( FormatFeatureFlagBits::eStorageTexelBufferAtomic ) | VkFlags( FormatFeatureFlagBits::eVertexBuffer ) | + VkFlags( FormatFeatureFlagBits::eColorAttachment ) | VkFlags( FormatFeatureFlagBits::eColorAttachmentBlend ) | + VkFlags( FormatFeatureFlagBits::eDepthStencilAttachment ) | VkFlags( FormatFeatureFlagBits::eBlitSrc ) | + VkFlags( FormatFeatureFlagBits::eBlitDst ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterLinear ) | + VkFlags( FormatFeatureFlagBits::eTransferSrc ) | VkFlags( FormatFeatureFlagBits::eTransferDst ) | + VkFlags( FormatFeatureFlagBits::eMidpointChromaSamples ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) | + VkFlags( FormatFeatureFlagBits::eDisjoint ) | VkFlags( FormatFeatureFlagBits::eCositedChromaSamples ) | + VkFlags( FormatFeatureFlagBits::eSampledImageFilterMinmax ) | + VkFlags( FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( FormatFeatureFlagBits::eVideoDecodeOutputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoDecodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags( FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) | + VkFlags( FormatFeatureFlagBits::eFragmentDensityMapEXT ) | + VkFlags( FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( FormatFeatureFlagBits::eVideoEncodeInputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoEncodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return AccessFlags( bit0 ) | bit1; + return FormatFeatureFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&(FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return AccessFlags( bit0 ) & bit1; + return FormatFeatureFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return AccessFlags( bit0 ) ^ bit1; + return FormatFeatureFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT { - return ~( AccessFlags( bits ) ); + return ~( FormatFeatureFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & AccessFlagBits::eIndirectCommandRead ) result += "IndirectCommandRead | "; - if ( value & AccessFlagBits::eIndexRead ) result += "IndexRead | "; - if ( value & AccessFlagBits::eVertexAttributeRead ) result += "VertexAttributeRead | "; - if ( value & AccessFlagBits::eUniformRead ) result += "UniformRead | "; - if ( value & AccessFlagBits::eInputAttachmentRead ) result += "InputAttachmentRead | "; - if ( value & AccessFlagBits::eShaderRead ) result += "ShaderRead | "; - if ( value & AccessFlagBits::eShaderWrite ) result += "ShaderWrite | "; - if ( value & AccessFlagBits::eColorAttachmentRead ) result += "ColorAttachmentRead | "; - if ( value & AccessFlagBits::eColorAttachmentWrite ) result += "ColorAttachmentWrite | "; - if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) result += "DepthStencilAttachmentRead | "; - if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) result += "DepthStencilAttachmentWrite | "; - if ( value & AccessFlagBits::eTransferRead ) result += "TransferRead | "; - if ( value & AccessFlagBits::eTransferWrite ) result += "TransferWrite | "; - if ( value & AccessFlagBits::eHostRead ) result += "HostRead | "; - if ( value & AccessFlagBits::eHostWrite ) result += "HostWrite | "; - if ( value & AccessFlagBits::eMemoryRead ) result += "MemoryRead | "; - if ( value & AccessFlagBits::eMemoryWrite ) result += "MemoryWrite | "; - if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) result += "TransformFeedbackWriteEXT | "; - if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | "; - if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | "; - if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | "; - if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | "; - if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) result += "AccelerationStructureReadKHR | "; - if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) result += "AccelerationStructureWriteKHR | "; - if ( value & AccessFlagBits::eShadingRateImageReadNV ) result += "ShadingRateImageReadNV | "; - if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | "; - if ( value & AccessFlagBits::eCommandPreprocessReadNV ) result += "CommandPreprocessReadNV | "; - if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) result += "CommandPreprocessWriteNV | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & FormatFeatureFlagBits::eSampledImage ) + result += "SampledImage | "; + if ( value & FormatFeatureFlagBits::eStorageImage ) + result += "StorageImage | "; + if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) + result += "StorageImageAtomic | "; + if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) + result += "StorageTexelBufferAtomic | "; + if ( value & FormatFeatureFlagBits::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & FormatFeatureFlagBits::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) + result += "ColorAttachmentBlend | "; + if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & FormatFeatureFlagBits::eBlitSrc ) + result += "BlitSrc | "; + if ( value & FormatFeatureFlagBits::eBlitDst ) + result += "BlitDst | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) + result += "SampledImageFilterLinear | "; + if ( value & FormatFeatureFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & FormatFeatureFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) + result += "MidpointChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) + result += "SampledImageYcbcrConversionLinearFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) + result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; + if ( value & FormatFeatureFlagBits::eDisjoint ) + result += "Disjoint | "; + if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) + result += "CositedChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) + result += "SampledImageFilterMinmax | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) + result += "SampledImageFilterCubicIMG | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR ) + result += "VideoDecodeOutputKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR ) + result += "VideoDecodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) + result += "AccelerationStructureVertexBufferKHR | "; + if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits::eVideoEncodeInputKHR ) + result += "VideoEncodeInputKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits::eVideoEncodeDpbKHR ) + result += "VideoEncodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using ImageCreateFlags = Flags; - using AcquireProfilingLockFlagsKHR = Flags; - - VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR ) + template <> + struct FlagTraits { + enum : VkFlags + { + allFlags = VkFlags( ImageCreateFlagBits::eSparseBinding ) | VkFlags( ImageCreateFlagBits::eSparseResidency ) | + VkFlags( ImageCreateFlagBits::eSparseAliased ) | VkFlags( ImageCreateFlagBits::eMutableFormat ) | + VkFlags( ImageCreateFlagBits::eCubeCompatible ) | VkFlags( ImageCreateFlagBits::eAlias ) | + VkFlags( ImageCreateFlagBits::eSplitInstanceBindRegions ) | + VkFlags( ImageCreateFlagBits::e2DArrayCompatible ) | + VkFlags( ImageCreateFlagBits::eBlockTexelViewCompatible ) | + VkFlags( ImageCreateFlagBits::eExtendedUsage ) | VkFlags( ImageCreateFlagBits::eProtected ) | + VkFlags( ImageCreateFlagBits::eDisjoint ) | VkFlags( ImageCreateFlagBits::eCornerSampledNV ) | + VkFlags( ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) | + VkFlags( ImageCreateFlagBits::eSubsampledEXT ) + }; + }; + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&(ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageCreateFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & ImageCreateFlagBits::eSparseResidency ) + result += "SparseResidency | "; + if ( value & ImageCreateFlagBits::eSparseAliased ) + result += "SparseAliased | "; + if ( value & ImageCreateFlagBits::eMutableFormat ) + result += "MutableFormat | "; + if ( value & ImageCreateFlagBits::eCubeCompatible ) + result += "CubeCompatible | "; + if ( value & ImageCreateFlagBits::eAlias ) + result += "Alias | "; + if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) + result += "SplitInstanceBindRegions | "; + if ( value & ImageCreateFlagBits::e2DArrayCompatible ) + result += "2DArrayCompatible | "; + if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) + result += "BlockTexelViewCompatible | "; + if ( value & ImageCreateFlagBits::eExtendedUsage ) + result += "ExtendedUsage | "; + if ( value & ImageCreateFlagBits::eProtected ) + result += "Protected | "; + if ( value & ImageCreateFlagBits::eDisjoint ) + result += "Disjoint | "; + if ( value & ImageCreateFlagBits::eCornerSampledNV ) + result += "CornerSampledNV | "; + if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) + result += "SampleLocationsCompatibleDepthEXT | "; + if ( value & ImageCreateFlagBits::eSubsampledEXT ) + result += "SubsampledEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ImageUsageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ImageUsageFlagBits::eTransferSrc ) | VkFlags( ImageUsageFlagBits::eTransferDst ) | + VkFlags( ImageUsageFlagBits::eSampled ) | VkFlags( ImageUsageFlagBits::eStorage ) | + VkFlags( ImageUsageFlagBits::eColorAttachment ) | + VkFlags( ImageUsageFlagBits::eDepthStencilAttachment ) | + VkFlags( ImageUsageFlagBits::eTransientAttachment ) | VkFlags( ImageUsageFlagBits::eInputAttachment ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( ImageUsageFlagBits::eVideoDecodeDstKHR ) | + VkFlags( ImageUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( ImageUsageFlagBits::eVideoDecodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags( ImageUsageFlagBits::eFragmentDensityMapEXT ) | + VkFlags( ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( ImageUsageFlagBits::eVideoEncodeDstKHR ) | + VkFlags( ImageUsageFlagBits::eVideoEncodeSrcKHR ) | VkFlags( ImageUsageFlagBits::eVideoEncodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&(ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageUsageFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & ImageUsageFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & ImageUsageFlagBits::eSampled ) + result += "Sampled | "; + if ( value & ImageUsageFlagBits::eStorage ) + result += "Storage | "; + if ( value & ImageUsageFlagBits::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & ImageUsageFlagBits::eTransientAttachment ) + result += "TransientAttachment | "; + if ( value & ImageUsageFlagBits::eInputAttachment ) + result += "InputAttachment | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR ) + result += "VideoDecodeDstKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR ) + result += "VideoDecodeSrcKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR ) + result += "VideoDecodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR ) + result += "VideoEncodeDstKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR ) + result += "VideoEncodeSrcKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoEncodeDpbKHR ) + result += "VideoEncodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using InstanceCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags ) + { return "{}"; } -#ifdef VK_USE_PLATFORM_ANDROID_KHR - enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags - {}; + using MemoryHeapFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR ) + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( MemoryHeapFlagBits::eDeviceLocal ) | VkFlags( MemoryHeapFlagBits::eMultiInstance ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&(MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( MemoryHeapFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryHeapFlagBits::eDeviceLocal ) + result += "DeviceLocal | "; + if ( value & MemoryHeapFlagBits::eMultiInstance ) + result += "MultiInstance | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using MemoryPropertyFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( MemoryPropertyFlagBits::eDeviceLocal ) | VkFlags( MemoryPropertyFlagBits::eHostVisible ) | + VkFlags( MemoryPropertyFlagBits::eHostCoherent ) | VkFlags( MemoryPropertyFlagBits::eHostCached ) | + VkFlags( MemoryPropertyFlagBits::eLazilyAllocated ) | VkFlags( MemoryPropertyFlagBits::eProtected ) | + VkFlags( MemoryPropertyFlagBits::eDeviceCoherentAMD ) | + VkFlags( MemoryPropertyFlagBits::eDeviceUncachedAMD ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags + operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator&(MemoryPropertyFlagBits bit0, + MemoryPropertyFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags + operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( MemoryPropertyFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryPropertyFlagBits::eDeviceLocal ) + result += "DeviceLocal | "; + if ( value & MemoryPropertyFlagBits::eHostVisible ) + result += "HostVisible | "; + if ( value & MemoryPropertyFlagBits::eHostCoherent ) + result += "HostCoherent | "; + if ( value & MemoryPropertyFlagBits::eHostCached ) + result += "HostCached | "; + if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) + result += "LazilyAllocated | "; + if ( value & MemoryPropertyFlagBits::eProtected ) + result += "Protected | "; + if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) + result += "DeviceCoherentAMD | "; + if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) + result += "DeviceUncachedAMD | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueueFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueueFlagBits::eGraphics ) | VkFlags( QueueFlagBits::eCompute ) | + VkFlags( QueueFlagBits::eTransfer ) | VkFlags( QueueFlagBits::eSparseBinding ) | + VkFlags( QueueFlagBits::eProtected ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( QueueFlagBits::eVideoDecodeKHR ) | VkFlags( QueueFlagBits::eVideoEncodeKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, + QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&(QueueFlagBits bit0, QueueFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, + QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueueFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueueFlagBits::eGraphics ) + result += "Graphics | "; + if ( value & QueueFlagBits::eCompute ) + result += "Compute | "; + if ( value & QueueFlagBits::eTransfer ) + result += "Transfer | "; + if ( value & QueueFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & QueueFlagBits::eProtected ) + result += "Protected | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & QueueFlagBits::eVideoDecodeKHR ) + result += "VideoDecodeKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & QueueFlagBits::eVideoEncodeKHR ) + result += "VideoEncodeKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SampleCountFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SampleCountFlagBits::e1 ) | VkFlags( SampleCountFlagBits::e2 ) | + VkFlags( SampleCountFlagBits::e4 ) | VkFlags( SampleCountFlagBits::e8 ) | + VkFlags( SampleCountFlagBits::e16 ) | VkFlags( SampleCountFlagBits::e32 ) | + VkFlags( SampleCountFlagBits::e64 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, + SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&(SampleCountFlagBits bit0, + SampleCountFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, + SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SampleCountFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SampleCountFlagBits::e1 ) + result += "1 | "; + if ( value & SampleCountFlagBits::e2 ) + result += "2 | "; + if ( value & SampleCountFlagBits::e4 ) + result += "4 | "; + if ( value & SampleCountFlagBits::e8 ) + result += "8 | "; + if ( value & SampleCountFlagBits::e16 ) + result += "16 | "; + if ( value & SampleCountFlagBits::e32 ) + result += "32 | "; + if ( value & SampleCountFlagBits::e64 ) + result += "64 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DeviceCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags ) + { + return "{}"; + } + + using DeviceQueueCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DeviceQueueCreateFlagBits::eProtected ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator&(DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DeviceQueueCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DeviceQueueCreateFlagBits::eProtected ) + result += "Protected | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PipelineStageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( PipelineStageFlagBits::eTopOfPipe ) | VkFlags( PipelineStageFlagBits::eDrawIndirect ) | + VkFlags( PipelineStageFlagBits::eVertexInput ) | VkFlags( PipelineStageFlagBits::eVertexShader ) | + VkFlags( PipelineStageFlagBits::eTessellationControlShader ) | + VkFlags( PipelineStageFlagBits::eTessellationEvaluationShader ) | + VkFlags( PipelineStageFlagBits::eGeometryShader ) | VkFlags( PipelineStageFlagBits::eFragmentShader ) | + VkFlags( PipelineStageFlagBits::eEarlyFragmentTests ) | VkFlags( PipelineStageFlagBits::eLateFragmentTests ) | + VkFlags( PipelineStageFlagBits::eColorAttachmentOutput ) | VkFlags( PipelineStageFlagBits::eComputeShader ) | + VkFlags( PipelineStageFlagBits::eTransfer ) | VkFlags( PipelineStageFlagBits::eBottomOfPipe ) | + VkFlags( PipelineStageFlagBits::eHost ) | VkFlags( PipelineStageFlagBits::eAllGraphics ) | + VkFlags( PipelineStageFlagBits::eAllCommands ) | VkFlags( PipelineStageFlagBits::eTransformFeedbackEXT ) | + VkFlags( PipelineStageFlagBits::eConditionalRenderingEXT ) | + VkFlags( PipelineStageFlagBits::eAccelerationStructureBuildKHR ) | + VkFlags( PipelineStageFlagBits::eRayTracingShaderKHR ) | VkFlags( PipelineStageFlagBits::eTaskShaderNV ) | + VkFlags( PipelineStageFlagBits::eMeshShaderNV ) | VkFlags( PipelineStageFlagBits::eFragmentDensityProcessEXT ) | + VkFlags( PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) | + VkFlags( PipelineStageFlagBits::eCommandPreprocessNV ) | VkFlags( PipelineStageFlagBits::eNoneKHR ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&(PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineStageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineStageFlagBits::eTopOfPipe ) + result += "TopOfPipe | "; + if ( value & PipelineStageFlagBits::eDrawIndirect ) + result += "DrawIndirect | "; + if ( value & PipelineStageFlagBits::eVertexInput ) + result += "VertexInput | "; + if ( value & PipelineStageFlagBits::eVertexShader ) + result += "VertexShader | "; + if ( value & PipelineStageFlagBits::eTessellationControlShader ) + result += "TessellationControlShader | "; + if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) + result += "TessellationEvaluationShader | "; + if ( value & PipelineStageFlagBits::eGeometryShader ) + result += "GeometryShader | "; + if ( value & PipelineStageFlagBits::eFragmentShader ) + result += "FragmentShader | "; + if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) + result += "EarlyFragmentTests | "; + if ( value & PipelineStageFlagBits::eLateFragmentTests ) + result += "LateFragmentTests | "; + if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) + result += "ColorAttachmentOutput | "; + if ( value & PipelineStageFlagBits::eComputeShader ) + result += "ComputeShader | "; + if ( value & PipelineStageFlagBits::eTransfer ) + result += "Transfer | "; + if ( value & PipelineStageFlagBits::eBottomOfPipe ) + result += "BottomOfPipe | "; + if ( value & PipelineStageFlagBits::eHost ) + result += "Host | "; + if ( value & PipelineStageFlagBits::eAllGraphics ) + result += "AllGraphics | "; + if ( value & PipelineStageFlagBits::eAllCommands ) + result += "AllCommands | "; + if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) + result += "TransformFeedbackEXT | "; + if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) + result += "AccelerationStructureBuildKHR | "; + if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) + result += "RayTracingShaderKHR | "; + if ( value & PipelineStageFlagBits::eTaskShaderNV ) + result += "TaskShaderNV | "; + if ( value & PipelineStageFlagBits::eMeshShaderNV ) + result += "MeshShaderNV | "; + if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) + result += "FragmentDensityProcessEXT | "; + if ( value & PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; + if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) + result += "CommandPreprocessNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class MemoryMapFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) { return "(void)"; } - using AndroidSurfaceCreateFlagsKHR = Flags; + using MemoryMapFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) { - return "{}"; } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + using ImageAspectFlags = Flags; - using AttachmentDescriptionFlags = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias) + allFlags = VkFlags( ImageAspectFlagBits::eColor ) | VkFlags( ImageAspectFlagBits::eDepth ) | + VkFlags( ImageAspectFlagBits::eStencil ) | VkFlags( ImageAspectFlagBits::eMetadata ) | + VkFlags( ImageAspectFlagBits::ePlane0 ) | VkFlags( ImageAspectFlagBits::ePlane1 ) | + VkFlags( ImageAspectFlagBits::ePlane2 ) | VkFlags( ImageAspectFlagBits::eMemoryPlane0EXT ) | + VkFlags( ImageAspectFlagBits::eMemoryPlane1EXT ) | VkFlags( ImageAspectFlagBits::eMemoryPlane2EXT ) | + VkFlags( ImageAspectFlagBits::eMemoryPlane3EXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return AttachmentDescriptionFlags( bit0 ) | bit1; + return ImageAspectFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator&( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&(ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return AttachmentDescriptionFlags( bit0 ) & bit1; + return ImageAspectFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return AttachmentDescriptionFlags( bit0 ) ^ bit1; + return ImageAspectFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT { - return ~( AttachmentDescriptionFlags( bits ) ); + return ~( ImageAspectFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & AttachmentDescriptionFlagBits::eMayAlias ) result += "MayAlias | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ImageAspectFlagBits::eColor ) + result += "Color | "; + if ( value & ImageAspectFlagBits::eDepth ) + result += "Depth | "; + if ( value & ImageAspectFlagBits::eStencil ) + result += "Stencil | "; + if ( value & ImageAspectFlagBits::eMetadata ) + result += "Metadata | "; + if ( value & ImageAspectFlagBits::ePlane0 ) + result += "Plane0 | "; + if ( value & ImageAspectFlagBits::ePlane1 ) + result += "Plane1 | "; + if ( value & ImageAspectFlagBits::ePlane2 ) + result += "Plane2 | "; + if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) + result += "MemoryPlane0EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) + result += "MemoryPlane1EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) + result += "MemoryPlane2EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) + result += "MemoryPlane3EXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using SparseImageFormatFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SparseImageFormatFlagBits::eSingleMiptail ) | + VkFlags( SparseImageFormatFlagBits::eAlignedMipSize ) | + VkFlags( SparseImageFormatFlagBits::eNonstandardBlockSize ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator&(SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SparseImageFormatFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SparseImageFormatFlagBits::eSingleMiptail ) + result += "SingleMiptail | "; + if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) + result += "AlignedMipSize | "; + if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) + result += "NonstandardBlockSize | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SparseMemoryBindFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SparseMemoryBindFlagBits::eMetadata ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator&(SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SparseMemoryBindFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SparseMemoryBindFlagBits::eMetadata ) + result += "Metadata | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using FenceCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( FenceCreateFlagBits::eSignaled ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&(FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FenceCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FenceCreateFlagBits::eSignaled ) + result += "Signaled | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class SemaphoreCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits ) + { + return "(void)"; + } + + using SemaphoreCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags ) + { + return "{}"; + } + + using EventCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( EventCreateFlagBits::eDeviceOnlyKHR ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator|( EventCreateFlagBits bit0, + EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return EventCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator&(EventCreateFlagBits bit0, + EventCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return EventCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator^( EventCreateFlagBits bit0, + EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return EventCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator~( EventCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( EventCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & EventCreateFlagBits::eDeviceOnlyKHR ) + result += "DeviceOnlyKHR | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueryPipelineStatisticFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) | + VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eClippingInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eClippingPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) | + VkFlags( QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator&(QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( QueryPipelineStatisticFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) + result += "InputAssemblyVertices | "; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) + result += "InputAssemblyPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) + result += "VertexShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) + result += "GeometryShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) + result += "GeometryShaderPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) + result += "ClippingInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) + result += "ClippingPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) + result += "FragmentShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) + result += "TessellationControlShaderPatches | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) + result += "TessellationEvaluationShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) + result += "ComputeShaderInvocations | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueryPoolCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags ) + { + return "{}"; + } + + using QueryResultFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueryResultFlagBits::e64 ) | VkFlags( QueryResultFlagBits::eWait ) | + VkFlags( QueryResultFlagBits::eWithAvailability ) | VkFlags( QueryResultFlagBits::ePartial ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( QueryResultFlagBits::eWithStatusKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, + QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&(QueryResultFlagBits bit0, + QueryResultFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, + QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueryResultFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueryResultFlagBits::e64 ) + result += "64 | "; + if ( value & QueryResultFlagBits::eWait ) + result += "Wait | "; + if ( value & QueryResultFlagBits::eWithAvailability ) + result += "WithAvailability | "; + if ( value & QueryResultFlagBits::ePartial ) + result += "Partial | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & QueryResultFlagBits::eWithStatusKHR ) + result += "WithStatusKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } using BufferCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) | VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplay) + allFlags = VkFlags( BufferCreateFlagBits::eSparseBinding ) | VkFlags( BufferCreateFlagBits::eSparseResidency ) | + VkFlags( BufferCreateFlagBits::eSparseAliased ) | VkFlags( BufferCreateFlagBits::eProtected ) | + VkFlags( BufferCreateFlagBits::eDeviceAddressCaptureReplay ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return BufferCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&(BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT { return BufferCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return BufferCreateFlags( bit0 ) ^ bit1; } @@ -9275,42 +14159,67 @@ namespace VULKAN_HPP_NAMESPACE return ~( BufferCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & BufferCreateFlagBits::eSparseBinding ) result += "SparseBinding | "; - if ( value & BufferCreateFlagBits::eSparseResidency ) result += "SparseResidency | "; - if ( value & BufferCreateFlagBits::eSparseAliased ) result += "SparseAliased | "; - if ( value & BufferCreateFlagBits::eProtected ) result += "Protected | "; - if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & BufferCreateFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & BufferCreateFlagBits::eSparseResidency ) + result += "SparseResidency | "; + if ( value & BufferCreateFlagBits::eSparseAliased ) + result += "SparseAliased | "; + if ( value & BufferCreateFlagBits::eProtected ) + result += "Protected | "; + if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - using BufferUsageFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingKHR) + allFlags = + VkFlags( BufferUsageFlagBits::eTransferSrc ) | VkFlags( BufferUsageFlagBits::eTransferDst ) | + VkFlags( BufferUsageFlagBits::eUniformTexelBuffer ) | VkFlags( BufferUsageFlagBits::eStorageTexelBuffer ) | + VkFlags( BufferUsageFlagBits::eUniformBuffer ) | VkFlags( BufferUsageFlagBits::eStorageBuffer ) | + VkFlags( BufferUsageFlagBits::eIndexBuffer ) | VkFlags( BufferUsageFlagBits::eVertexBuffer ) | + VkFlags( BufferUsageFlagBits::eIndirectBuffer ) | VkFlags( BufferUsageFlagBits::eShaderDeviceAddress ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( BufferUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( BufferUsageFlagBits::eVideoDecodeDstKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags( BufferUsageFlagBits::eTransformFeedbackBufferEXT ) | + VkFlags( BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) | + VkFlags( BufferUsageFlagBits::eConditionalRenderingEXT ) | + VkFlags( BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) | + VkFlags( BufferUsageFlagBits::eAccelerationStructureStorageKHR ) | + VkFlags( BufferUsageFlagBits::eShaderBindingTableKHR ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( BufferUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( BufferUsageFlagBits::eVideoEncodeSrcKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return BufferUsageFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&(BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1)VULKAN_HPP_NOEXCEPT { return BufferUsageFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return BufferUsageFlags( bit0 ) ^ bit1; } @@ -9320,31 +14229,66 @@ namespace VULKAN_HPP_NAMESPACE return ~( BufferUsageFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & BufferUsageFlagBits::eTransferSrc ) result += "TransferSrc | "; - if ( value & BufferUsageFlagBits::eTransferDst ) result += "TransferDst | "; - if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | "; - if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | "; - if ( value & BufferUsageFlagBits::eUniformBuffer ) result += "UniformBuffer | "; - if ( value & BufferUsageFlagBits::eStorageBuffer ) result += "StorageBuffer | "; - if ( value & BufferUsageFlagBits::eIndexBuffer ) result += "IndexBuffer | "; - if ( value & BufferUsageFlagBits::eVertexBuffer ) result += "VertexBuffer | "; - if ( value & BufferUsageFlagBits::eIndirectBuffer ) result += "IndirectBuffer | "; - if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) result += "ShaderDeviceAddress | "; - if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | "; - if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | "; - if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; - if ( value & BufferUsageFlagBits::eRayTracingKHR ) result += "RayTracingKHR | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & BufferUsageFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & BufferUsageFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & BufferUsageFlagBits::eUniformBuffer ) + result += "UniformBuffer | "; + if ( value & BufferUsageFlagBits::eStorageBuffer ) + result += "StorageBuffer | "; + if ( value & BufferUsageFlagBits::eIndexBuffer ) + result += "IndexBuffer | "; + if ( value & BufferUsageFlagBits::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & BufferUsageFlagBits::eIndirectBuffer ) + result += "IndirectBuffer | "; + if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) + result += "ShaderDeviceAddress | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eVideoDecodeSrcKHR ) + result += "VideoDecodeSrcKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eVideoDecodeDstKHR ) + result += "VideoDecodeDstKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) + result += "TransformFeedbackBufferEXT | "; + if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) + result += "TransformFeedbackCounterBufferEXT | "; + if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) + result += "AccelerationStructureBuildInputReadOnlyKHR | "; + if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR ) + result += "AccelerationStructureStorageKHR | "; + if ( value & BufferUsageFlagBits::eShaderBindingTableKHR ) + result += "ShaderBindingTableKHR | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eVideoEncodeDstKHR ) + result += "VideoEncodeDstKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR ) + result += "VideoEncodeSrcKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class BufferViewCreateFlagBits : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits ) { @@ -9353,273 +14297,1595 @@ namespace VULKAN_HPP_NAMESPACE using BufferViewCreateFlags = Flags; - VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags ) { - return "{}"; } + using ImageViewCreateFlags = Flags; - using BuildAccelerationStructureFlagsKHR = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsKHR::eLowMemory) + allFlags = VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) | + VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags + operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1; + return ImageViewCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator&(ImageViewCreateFlagBits bit0, + ImageViewCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1; + return ImageViewCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags + operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1; + return ImageViewCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { - return ~( BuildAccelerationStructureFlagsKHR( bits ) ); + return ~( ImageViewCreateFlags( bits ) ); } - using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; - - VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) result += "AllowUpdate | "; - if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) result += "AllowCompaction | "; - if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) result += "PreferFastTrace | "; - if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) result += "PreferFastBuild | "; - if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) result += "LowMemory | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) + result += "FragmentDensityMapDynamicEXT | "; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) + result += "FragmentDensityMapDeferredEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using ShaderModuleCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags ) + { + return "{}"; + } + + using PipelineCacheCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator&(PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCacheCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) + result += "ExternallySynchronizedEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } using ColorComponentFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA) + allFlags = VkFlags( ColorComponentFlagBits::eR ) | VkFlags( ColorComponentFlagBits::eG ) | + VkFlags( ColorComponentFlagBits::eB ) | VkFlags( ColorComponentFlagBits::eA ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags + operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ColorComponentFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator&( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator&(ColorComponentFlagBits bit0, + ColorComponentFlagBits bit1)VULKAN_HPP_NOEXCEPT { return ColorComponentFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags + operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ColorComponentFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( ColorComponentFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & ColorComponentFlagBits::eR ) result += "R | "; - if ( value & ColorComponentFlagBits::eG ) result += "G | "; - if ( value & ColorComponentFlagBits::eB ) result += "B | "; - if ( value & ColorComponentFlagBits::eA ) result += "A | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ColorComponentFlagBits::eR ) + result += "R | "; + if ( value & ColorComponentFlagBits::eG ) + result += "G | "; + if ( value & ColorComponentFlagBits::eB ) + result += "B | "; + if ( value & ColorComponentFlagBits::eA ) + result += "A | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using CullModeFlags = Flags; - using CommandBufferResetFlags = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources) + allFlags = VkFlags( CullModeFlagBits::eNone ) | VkFlags( CullModeFlagBits::eFront ) | + VkFlags( CullModeFlagBits::eBack ) | VkFlags( CullModeFlagBits::eFrontAndBack ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, + CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return CommandBufferResetFlags( bit0 ) | bit1; + return CullModeFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator&( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&(CullModeFlagBits bit0, + CullModeFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return CommandBufferResetFlags( bit0 ) & bit1; + return CullModeFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, + CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return CommandBufferResetFlags( bit0 ) ^ bit1; + return CullModeFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT { - return ~( CommandBufferResetFlags( bits ) ); + return ~( CullModeFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) + VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & CommandBufferResetFlagBits::eReleaseResources ) result += "ReleaseResources | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & CullModeFlagBits::eFront ) + result += "Front | "; + if ( value & CullModeFlagBits::eBack ) + result += "Back | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + enum class PipelineColorBlendStateCreateFlagBits : VkFlags + { + }; - using CommandBufferUsageFlags = Flags; + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits ) + { + return "(void)"; + } - template <> struct FlagTraits + using PipelineColorBlendStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags ) + { + return "{}"; + } + + using PipelineCreateFlags = Flags; + + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse) + allFlags = + VkFlags( PipelineCreateFlagBits::eDisableOptimization ) | VkFlags( PipelineCreateFlagBits::eAllowDerivatives ) | + VkFlags( PipelineCreateFlagBits::eDerivative ) | VkFlags( PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) | + VkFlags( PipelineCreateFlagBits::eDispatchBase ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) | + VkFlags( PipelineCreateFlagBits::eDeferCompileNV ) | VkFlags( PipelineCreateFlagBits::eCaptureStatisticsKHR ) | + VkFlags( PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) | + VkFlags( PipelineCreateFlagBits::eIndirectBindableNV ) | VkFlags( PipelineCreateFlagBits::eLibraryKHR ) | + VkFlags( PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) | + VkFlags( PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags + operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return CommandBufferUsageFlags( bit0 ) | bit1; + return PipelineCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator&( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator&(PipelineCreateFlagBits bit0, + PipelineCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return CommandBufferUsageFlags( bit0 ) & bit1; + return PipelineCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags + operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return CommandBufferUsageFlags( bit0 ) ^ bit1; + return PipelineCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { - return ~( CommandBufferUsageFlags( bits ) ); + return ~( PipelineCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) result += "OneTimeSubmit | "; - if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) result += "RenderPassContinue | "; - if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) result += "SimultaneousUse | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & PipelineCreateFlagBits::eDisableOptimization ) + result += "DisableOptimization | "; + if ( value & PipelineCreateFlagBits::eAllowDerivatives ) + result += "AllowDerivatives | "; + if ( value & PipelineCreateFlagBits::eDerivative ) + result += "Derivative | "; + if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) + result += "ViewIndexFromDeviceIndex | "; + if ( value & PipelineCreateFlagBits::eDispatchBase ) + result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) + result += "RayTracingNoNullAnyHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) + result += "RayTracingNoNullClosestHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) + result += "RayTracingNoNullMissShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) + result += "RayTracingNoNullIntersectionShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) + result += "RayTracingSkipTrianglesKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) + result += "RayTracingSkipAabbsKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) + result += "RayTracingShaderGroupHandleCaptureReplayKHR | "; + if ( value & PipelineCreateFlagBits::eDeferCompileNV ) + result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) + result += "CaptureStatisticsKHR | "; + if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) + result += "CaptureInternalRepresentationsKHR | "; + if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) + result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits::eLibraryKHR ) + result += "LibraryKHR | "; + if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) + result += "FailOnPipelineCompileRequiredEXT | "; + if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) + result += "EarlyReturnOnFailureEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + enum class PipelineDepthStencilStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineDepthStencilStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineDynamicStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineDynamicStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineInputAssemblyStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineInputAssemblyStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineLayoutCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits ) + { + return "(void)"; + } + + using PipelineLayoutCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags ) + { + return "{}"; + } + + enum class PipelineMultisampleStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineMultisampleStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineRasterizationStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineRasterizationStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags ) + { + return "{}"; + } + + using PipelineShaderStageCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) | + VkFlags( PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator&(PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineShaderStageCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) + result += "AllowVaryingSubgroupSizeEXT | "; + if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) + result += "RequireFullSubgroupsEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class PipelineTessellationStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineTessellationStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineVertexInputStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineVertexInputStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineViewportStateCreateFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits ) + { + return "(void)"; + } + + using PipelineViewportStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags ) + { + return "{}"; + } + + using ShaderStageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ShaderStageFlagBits::eVertex ) | VkFlags( ShaderStageFlagBits::eTessellationControl ) | + VkFlags( ShaderStageFlagBits::eTessellationEvaluation ) | VkFlags( ShaderStageFlagBits::eGeometry ) | + VkFlags( ShaderStageFlagBits::eFragment ) | VkFlags( ShaderStageFlagBits::eCompute ) | + VkFlags( ShaderStageFlagBits::eAllGraphics ) | VkFlags( ShaderStageFlagBits::eAll ) | + VkFlags( ShaderStageFlagBits::eRaygenKHR ) | VkFlags( ShaderStageFlagBits::eAnyHitKHR ) | + VkFlags( ShaderStageFlagBits::eClosestHitKHR ) | VkFlags( ShaderStageFlagBits::eMissKHR ) | + VkFlags( ShaderStageFlagBits::eIntersectionKHR ) | VkFlags( ShaderStageFlagBits::eCallableKHR ) | + VkFlags( ShaderStageFlagBits::eTaskNV ) | VkFlags( ShaderStageFlagBits::eMeshNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&(ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ShaderStageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ShaderStageFlagBits::eVertex ) + result += "Vertex | "; + if ( value & ShaderStageFlagBits::eTessellationControl ) + result += "TessellationControl | "; + if ( value & ShaderStageFlagBits::eTessellationEvaluation ) + result += "TessellationEvaluation | "; + if ( value & ShaderStageFlagBits::eGeometry ) + result += "Geometry | "; + if ( value & ShaderStageFlagBits::eFragment ) + result += "Fragment | "; + if ( value & ShaderStageFlagBits::eCompute ) + result += "Compute | "; + if ( value & ShaderStageFlagBits::eRaygenKHR ) + result += "RaygenKHR | "; + if ( value & ShaderStageFlagBits::eAnyHitKHR ) + result += "AnyHitKHR | "; + if ( value & ShaderStageFlagBits::eClosestHitKHR ) + result += "ClosestHitKHR | "; + if ( value & ShaderStageFlagBits::eMissKHR ) + result += "MissKHR | "; + if ( value & ShaderStageFlagBits::eIntersectionKHR ) + result += "IntersectionKHR | "; + if ( value & ShaderStageFlagBits::eCallableKHR ) + result += "CallableKHR | "; + if ( value & ShaderStageFlagBits::eTaskNV ) + result += "TaskNV | "; + if ( value & ShaderStageFlagBits::eMeshNV ) + result += "MeshNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SamplerCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SamplerCreateFlagBits::eSubsampledEXT ) | + VkFlags( SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&(SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SamplerCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SamplerCreateFlagBits::eSubsampledEXT ) + result += "SubsampledEXT | "; + if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) + result += "SubsampledCoarseReconstructionEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DescriptorPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) | + VkFlags( DescriptorPoolCreateFlagBits::eUpdateAfterBind ) | + VkFlags( DescriptorPoolCreateFlagBits::eHostOnlyVALVE ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator&(DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorPoolCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) + result += "FreeDescriptorSet | "; + if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) + result += "UpdateAfterBind | "; + if ( value & DescriptorPoolCreateFlagBits::eHostOnlyVALVE ) + result += "HostOnlyVALVE | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class DescriptorPoolResetFlagBits : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits ) + { + return "(void)"; + } + + using DescriptorPoolResetFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags ) + { + return "{}"; + } + + using DescriptorSetLayoutCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) | + VkFlags( DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) | + VkFlags( DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator&(DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorSetLayoutCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) + result += "UpdateAfterBindPool | "; + if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) + result += "PushDescriptorKHR | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE ) + result += "HostOnlyPoolVALVE | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AccessFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( AccessFlagBits::eIndirectCommandRead ) | VkFlags( AccessFlagBits::eIndexRead ) | + VkFlags( AccessFlagBits::eVertexAttributeRead ) | VkFlags( AccessFlagBits::eUniformRead ) | + VkFlags( AccessFlagBits::eInputAttachmentRead ) | VkFlags( AccessFlagBits::eShaderRead ) | + VkFlags( AccessFlagBits::eShaderWrite ) | VkFlags( AccessFlagBits::eColorAttachmentRead ) | + VkFlags( AccessFlagBits::eColorAttachmentWrite ) | + VkFlags( AccessFlagBits::eDepthStencilAttachmentRead ) | + VkFlags( AccessFlagBits::eDepthStencilAttachmentWrite ) | VkFlags( AccessFlagBits::eTransferRead ) | + VkFlags( AccessFlagBits::eTransferWrite ) | VkFlags( AccessFlagBits::eHostRead ) | + VkFlags( AccessFlagBits::eHostWrite ) | VkFlags( AccessFlagBits::eMemoryRead ) | + VkFlags( AccessFlagBits::eMemoryWrite ) | VkFlags( AccessFlagBits::eTransformFeedbackWriteEXT ) | + VkFlags( AccessFlagBits::eTransformFeedbackCounterReadEXT ) | + VkFlags( AccessFlagBits::eTransformFeedbackCounterWriteEXT ) | + VkFlags( AccessFlagBits::eConditionalRenderingReadEXT ) | + VkFlags( AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) | + VkFlags( AccessFlagBits::eAccelerationStructureReadKHR ) | + VkFlags( AccessFlagBits::eAccelerationStructureWriteKHR ) | + VkFlags( AccessFlagBits::eFragmentDensityMapReadEXT ) | + VkFlags( AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) | + VkFlags( AccessFlagBits::eCommandPreprocessReadNV ) | + VkFlags( AccessFlagBits::eCommandPreprocessWriteNV ) | VkFlags( AccessFlagBits::eNoneKHR ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, + AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&(AccessFlagBits bit0, + AccessFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, + AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AccessFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & AccessFlagBits::eIndirectCommandRead ) + result += "IndirectCommandRead | "; + if ( value & AccessFlagBits::eIndexRead ) + result += "IndexRead | "; + if ( value & AccessFlagBits::eVertexAttributeRead ) + result += "VertexAttributeRead | "; + if ( value & AccessFlagBits::eUniformRead ) + result += "UniformRead | "; + if ( value & AccessFlagBits::eInputAttachmentRead ) + result += "InputAttachmentRead | "; + if ( value & AccessFlagBits::eShaderRead ) + result += "ShaderRead | "; + if ( value & AccessFlagBits::eShaderWrite ) + result += "ShaderWrite | "; + if ( value & AccessFlagBits::eColorAttachmentRead ) + result += "ColorAttachmentRead | "; + if ( value & AccessFlagBits::eColorAttachmentWrite ) + result += "ColorAttachmentWrite | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) + result += "DepthStencilAttachmentRead | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) + result += "DepthStencilAttachmentWrite | "; + if ( value & AccessFlagBits::eTransferRead ) + result += "TransferRead | "; + if ( value & AccessFlagBits::eTransferWrite ) + result += "TransferWrite | "; + if ( value & AccessFlagBits::eHostRead ) + result += "HostRead | "; + if ( value & AccessFlagBits::eHostWrite ) + result += "HostWrite | "; + if ( value & AccessFlagBits::eMemoryRead ) + result += "MemoryRead | "; + if ( value & AccessFlagBits::eMemoryWrite ) + result += "MemoryWrite | "; + if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) + result += "TransformFeedbackWriteEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) + result += "TransformFeedbackCounterReadEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) + result += "TransformFeedbackCounterWriteEXT | "; + if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) + result += "ConditionalRenderingReadEXT | "; + if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) + result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) + result += "AccelerationStructureReadKHR | "; + if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) + result += "AccelerationStructureWriteKHR | "; + if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) + result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) + result += "FragmentShadingRateAttachmentReadKHR | "; + if ( value & AccessFlagBits::eCommandPreprocessReadNV ) + result += "CommandPreprocessReadNV | "; + if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) + result += "CommandPreprocessWriteNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AttachmentDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( AttachmentDescriptionFlagBits::eMayAlias ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator&(AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( AttachmentDescriptionFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & AttachmentDescriptionFlagBits::eMayAlias ) + result += "MayAlias | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DependencyFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DependencyFlagBits::eByRegion ) | VkFlags( DependencyFlagBits::eDeviceGroup ) | + VkFlags( DependencyFlagBits::eViewLocal ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, + DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&(DependencyFlagBits bit0, + DependencyFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, + DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DependencyFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DependencyFlagBits::eByRegion ) + result += "ByRegion | "; + if ( value & DependencyFlagBits::eDeviceGroup ) + result += "DeviceGroup | "; + if ( value & DependencyFlagBits::eViewLocal ) + result += "ViewLocal | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using FramebufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( FramebufferCreateFlagBits::eImageless ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator&(FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( FramebufferCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FramebufferCreateFlagBits::eImageless ) + result += "Imageless | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using RenderPassCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( RenderPassCreateFlagBits::eTransformQCOM ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator&(RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( RenderPassCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & RenderPassCreateFlagBits::eTransformQCOM ) + result += "TransformQCOM | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SubpassDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SubpassDescriptionFlagBits::ePerViewAttributesNVX ) | + VkFlags( SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) | + VkFlags( SubpassDescriptionFlagBits::eFragmentRegionQCOM ) | + VkFlags( SubpassDescriptionFlagBits::eShaderResolveQCOM ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator&(SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SubpassDescriptionFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) + result += "PerViewAttributesNVX | "; + if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) + result += "PerViewPositionXOnlyNVX | "; + if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) + result += "FragmentRegionQCOM | "; + if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) + result += "ShaderResolveQCOM | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } using CommandPoolCreateFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected) + allFlags = VkFlags( CommandPoolCreateFlagBits::eTransient ) | + VkFlags( CommandPoolCreateFlagBits::eResetCommandBuffer ) | + VkFlags( CommandPoolCreateFlagBits::eProtected ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandPoolCreateFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator&( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator&(CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT { return CommandPoolCreateFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandPoolCreateFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( CommandPoolCreateFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & CommandPoolCreateFlagBits::eTransient ) result += "Transient | "; - if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) result += "ResetCommandBuffer | "; - if ( value & CommandPoolCreateFlagBits::eProtected ) result += "Protected | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & CommandPoolCreateFlagBits::eTransient ) + result += "Transient | "; + if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) + result += "ResetCommandBuffer | "; + if ( value & CommandPoolCreateFlagBits::eProtected ) + result += "Protected | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - using CommandPoolResetFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources) + allFlags = VkFlags( CommandPoolResetFlagBits::eReleaseResources ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandPoolResetFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator&( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator&(CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1)VULKAN_HPP_NOEXCEPT { return CommandPoolResetFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return CommandPoolResetFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) + VULKAN_HPP_NOEXCEPT { return ~( CommandPoolResetFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; + if ( value & CommandPoolResetFlagBits::eReleaseResources ) + result += "ReleaseResources | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } - if ( value & CommandPoolResetFlagBits::eReleaseResources ) result += "ReleaseResources | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + using CommandBufferResetFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CommandBufferResetFlagBits::eReleaseResources ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator&(CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CommandBufferResetFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CommandBufferResetFlagBits::eReleaseResources ) + result += "ReleaseResources | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CommandBufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CommandBufferUsageFlagBits::eOneTimeSubmit ) | + VkFlags( CommandBufferUsageFlagBits::eRenderPassContinue ) | + VkFlags( CommandBufferUsageFlagBits::eSimultaneousUse ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator&(CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CommandBufferUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) + result += "OneTimeSubmit | "; + if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) + result += "RenderPassContinue | "; + if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) + result += "SimultaneousUse | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueryControlFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueryControlFlagBits::ePrecise ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, + QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&(QueryControlFlagBits bit0, + QueryControlFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, + QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueryControlFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueryControlFlagBits::ePrecise ) + result += "Precise | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using StencilFaceFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( StencilFaceFlagBits::eFront ) | VkFlags( StencilFaceFlagBits::eBack ) | + VkFlags( StencilFaceFlagBits::eFrontAndBack ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&(StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( StencilFaceFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & StencilFaceFlagBits::eFront ) + result += "Front | "; + if ( value & StencilFaceFlagBits::eBack ) + result += "Back | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_VERSION_1_1 === + + using SubgroupFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SubgroupFeatureFlagBits::eBasic ) | VkFlags( SubgroupFeatureFlagBits::eVote ) | + VkFlags( SubgroupFeatureFlagBits::eArithmetic ) | VkFlags( SubgroupFeatureFlagBits::eBallot ) | + VkFlags( SubgroupFeatureFlagBits::eShuffle ) | VkFlags( SubgroupFeatureFlagBits::eShuffleRelative ) | + VkFlags( SubgroupFeatureFlagBits::eClustered ) | VkFlags( SubgroupFeatureFlagBits::eQuad ) | + VkFlags( SubgroupFeatureFlagBits::ePartitionedNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags + operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator&(SubgroupFeatureFlagBits bit0, + SubgroupFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags + operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SubgroupFeatureFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SubgroupFeatureFlagBits::eBasic ) + result += "Basic | "; + if ( value & SubgroupFeatureFlagBits::eVote ) + result += "Vote | "; + if ( value & SubgroupFeatureFlagBits::eArithmetic ) + result += "Arithmetic | "; + if ( value & SubgroupFeatureFlagBits::eBallot ) + result += "Ballot | "; + if ( value & SubgroupFeatureFlagBits::eShuffle ) + result += "Shuffle | "; + if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) + result += "ShuffleRelative | "; + if ( value & SubgroupFeatureFlagBits::eClustered ) + result += "Clustered | "; + if ( value & SubgroupFeatureFlagBits::eQuad ) + result += "Quad | "; + if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) + result += "PartitionedNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PeerMemoryFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PeerMemoryFeatureFlagBits::eCopySrc ) | VkFlags( PeerMemoryFeatureFlagBits::eCopyDst ) | + VkFlags( PeerMemoryFeatureFlagBits::eGenericSrc ) | VkFlags( PeerMemoryFeatureFlagBits::eGenericDst ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator&(PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PeerMemoryFeatureFlags( bits ) ); + } + + using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) + result += "CopySrc | "; + if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) + result += "CopyDst | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) + result += "GenericSrc | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) + result += "GenericDst | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using MemoryAllocateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( MemoryAllocateFlagBits::eDeviceMask ) | VkFlags( MemoryAllocateFlagBits::eDeviceAddress ) | + VkFlags( MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags + operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator&(MemoryAllocateFlagBits bit0, + MemoryAllocateFlagBits bit1)VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags + operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( MemoryAllocateFlags( bits ) ); + } + + using MemoryAllocateFlagsKHR = MemoryAllocateFlags; + + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryAllocateFlagBits::eDeviceMask ) + result += "DeviceMask | "; + if ( value & MemoryAllocateFlagBits::eDeviceAddress ) + result += "DeviceAddress | "; + if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } enum class CommandPoolTrimFlagBits : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits ) { @@ -9630,494 +15896,14 @@ namespace VULKAN_HPP_NAMESPACE using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; - VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags ) + VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags ) { - return "{}"; } - - using CompositeAlphaFlagsKHR = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return CompositeAlphaFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator&( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return CompositeAlphaFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return CompositeAlphaFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( CompositeAlphaFlagsKHR( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) result += "Opaque | "; - if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) result += "PreMultiplied | "; - if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) result += "PostMultiplied | "; - if ( value & CompositeAlphaFlagBitsKHR::eInherit ) result += "Inherit | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using ConditionalRenderingFlagsEXT = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ConditionalRenderingFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator&( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ConditionalRenderingFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ConditionalRenderingFlagsEXT( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) result += "Inverted | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using CullModeFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CullModeFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CullModeFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CullModeFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( CullModeFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & CullModeFlagBits::eFront ) result += "Front | "; - if ( value & CullModeFlagBits::eBack ) result += "Back | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using DebugReportFlagsEXT = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugReportFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator&( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugReportFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugReportFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DebugReportFlagsEXT( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DebugReportFlagBitsEXT::eInformation ) result += "Information | "; - if ( value & DebugReportFlagBitsEXT::eWarning ) result += "Warning | "; - if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) result += "PerformanceWarning | "; - if ( value & DebugReportFlagBitsEXT::eError ) result += "Error | "; - if ( value & DebugReportFlagBitsEXT::eDebug ) result += "Debug | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using DebugUtilsMessageSeverityFlagsEXT = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator&( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) result += "Verbose | "; - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) result += "Info | "; - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) result += "Warning | "; - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) result += "Error | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using DebugUtilsMessageTypeFlagsEXT = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator&( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DebugUtilsMessageTypeFlagsEXT( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) result += "General | "; - if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) result += "Validation | "; - if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) result += "Performance | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT ) - { - return "(void)"; - } - - using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) - { - - return "{}"; - } - - enum class DebugUtilsMessengerCreateFlagBitsEXT : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT ) - { - return "(void)"; - } - - using DebugUtilsMessengerCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) - { - - return "{}"; - } - - - using DependencyFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DependencyFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DependencyFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DependencyFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DependencyFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DependencyFlagBits::eByRegion ) result += "ByRegion | "; - if ( value & DependencyFlagBits::eDeviceGroup ) result += "DeviceGroup | "; - if ( value & DependencyFlagBits::eViewLocal ) result += "ViewLocal | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using DescriptorBindingFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DescriptorBindingFlagBits::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBits::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBits::ePartiallyBound) | VkFlags(DescriptorBindingFlagBits::eVariableDescriptorCount) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorBindingFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator&( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorBindingFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorBindingFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DescriptorBindingFlags( bits ) ); - } - - using DescriptorBindingFlagsEXT = DescriptorBindingFlags; - - VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | "; - if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) result += "UpdateUnusedWhilePending | "; - if ( value & DescriptorBindingFlagBits::ePartiallyBound ) result += "PartiallyBound | "; - if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) result += "VariableDescriptorCount | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using DescriptorPoolCreateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBind) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorPoolCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator&( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorPoolCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorPoolCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DescriptorPoolCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) result += "FreeDescriptorSet | "; - if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class DescriptorPoolResetFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits ) - { - return "(void)"; - } - - using DescriptorPoolResetFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags ) - { - - return "{}"; - } - - - using DescriptorSetLayoutCreateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool) | VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorSetLayoutCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator&( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorSetLayoutCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DescriptorSetLayoutCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) result += "UpdateAfterBindPool | "; - if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) result += "PushDescriptorKHR | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - enum class DescriptorUpdateTemplateCreateFlagBits : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits ) { @@ -10128,670 +15914,287 @@ namespace VULKAN_HPP_NAMESPACE using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; - VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags ) + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags ) { - return "{}"; } - - using DeviceCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags ) - { - - return "{}"; - } - - - using DeviceDiagnosticsConfigFlagsNV = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking) | VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DeviceDiagnosticsConfigFlagsNV( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) result += "EnableShaderDebugInfo | "; - if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) result += "EnableResourceTracking | "; - if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) result += "EnableAutomaticCheckpoints | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using DeviceGroupPresentModeFlagsKHR = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator&( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DeviceGroupPresentModeFlagsKHR( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) result += "Local | "; - if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) result += "Remote | "; - if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) result += "Sum | "; - if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) result += "LocalMultiDevice | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using DeviceQueueCreateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceQueueCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator&( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceQueueCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceQueueCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DeviceQueueCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DeviceQueueCreateFlagBits::eProtected ) result += "Protected | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class DisplayModeCreateFlagBitsKHR : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR ) - { - return "(void)"; - } - - using DisplayModeCreateFlagsKHR = Flags; - - VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR ) - { - - return "{}"; - } - - - using DisplayPlaneAlphaFlagsKHR = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator&( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DisplayPlaneAlphaFlagsKHR( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) result += "Opaque | "; - if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) result += "Global | "; - if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) result += "PerPixel | "; - if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) result += "PerPixelPremultiplied | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class DisplaySurfaceCreateFlagBitsKHR : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR ) - { - return "(void)"; - } - - using DisplaySurfaceCreateFlagsKHR = Flags; - - VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR ) - { - - return "{}"; - } - - enum class EventCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits ) - { - return "(void)"; - } - - using EventCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( EventCreateFlags ) - { - - return "{}"; - } - - - using ExternalFenceFeatureFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator&( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceFeatureFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceFeatureFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalFenceFeatureFlags( bits ) ); - } - - using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; - - VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & ExternalFenceFeatureFlagBits::eExportable ) result += "Exportable | "; - if ( value & ExternalFenceFeatureFlagBits::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using ExternalFenceHandleTypeFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceHandleTypeFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator&( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceHandleTypeFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalFenceHandleTypeFlags( bits ) ); - } - - using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; - - VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; - if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) result += "SyncFd | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using ExternalMemoryFeatureFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator&( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalMemoryFeatureFlags( bits ) ); - } - - using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) result += "DedicatedOnly | "; - if ( value & ExternalMemoryFeatureFlagBits::eExportable ) result += "Exportable | "; - if ( value & ExternalMemoryFeatureFlagBits::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using ExternalMemoryFeatureFlagsNV = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator&( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalMemoryFeatureFlagsNV( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) result += "DedicatedOnly | "; - if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) result += "Exportable | "; - if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - using ExternalMemoryHandleTypeFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) + allFlags = VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + | VkFlags( ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + | VkFlags( ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | VkFlags( ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryHandleTypeFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator&( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator&(ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1)VULKAN_HPP_NOEXCEPT { return ExternalMemoryHandleTypeFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT { return ~( ExternalMemoryHandleTypeFlags( bits ) ); } using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) result += "D3D11Texture | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) result += "D3D11TextureKmt | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) result += "D3D12Heap | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) result += "D3D12Resource | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) result += "DmaBufEXT | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) result += "AndroidHardwareBufferANDROID | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) result += "HostAllocationEXT | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) result += "HostMappedForeignMemoryEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) + result += "D3D11Texture | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) + result += "D3D11TextureKmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) + result += "D3D12Heap | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) + result += "D3D12Resource | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) + result += "DmaBufEXT | "; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) + result += "AndroidHardwareBufferANDROID | "; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) + result += "HostAllocationEXT | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) + result += "HostMappedForeignMemoryEXT | "; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + if ( value & ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA ) + result += "ZirconVmoFUCHSIA | "; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using ExternalMemoryFeatureFlags = Flags; - using ExternalMemoryHandleTypeFlagsNV = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) + allFlags = VkFlags( ExternalMemoryFeatureFlagBits::eDedicatedOnly ) | + VkFlags( ExternalMemoryFeatureFlagBits::eExportable ) | + VkFlags( ExternalMemoryFeatureFlagBits::eImportable ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1; + return ExternalMemoryFeatureFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator&( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator&(ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1; + return ExternalMemoryFeatureFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1; + return ExternalMemoryFeatureFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT { - return ~( ExternalMemoryHandleTypeFlagsNV( bits ) ); + return ~( ExternalMemoryFeatureFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) - { + using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) + { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) result += "D3D11Image | "; - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) result += "D3D11ImageKmt | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) + result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBits::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using ExternalFenceHandleTypeFlags = Flags; - using ExternalSemaphoreFeatureFlags = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable) + allFlags = VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eSyncFd ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return ExternalSemaphoreFeatureFlags( bit0 ) | bit1; + return ExternalFenceHandleTypeFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator&( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator&(ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return ExternalSemaphoreFeatureFlags( bit0 ) & bit1; + return ExternalFenceHandleTypeFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1; + return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) + VULKAN_HPP_NOEXCEPT { - return ~( ExternalSemaphoreFeatureFlags( bits ) ); + return ~( ExternalFenceHandleTypeFlags( bits ) ); } - using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; + using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; - VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) result += "Exportable | "; - if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) + result += "SyncFd | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using ExternalFenceFeatureFlags = Flags; - using ExternalSemaphoreHandleTypeFlags = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd) + allFlags = + VkFlags( ExternalFenceFeatureFlagBits::eExportable ) | VkFlags( ExternalFenceFeatureFlagBits::eImportable ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1; + return ExternalFenceFeatureFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator&( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator&(ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1; + return ExternalFenceFeatureFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1; + return ExternalFenceFeatureFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT { - return ~( ExternalSemaphoreHandleTypeFlags( bits ) ); + return ~( ExternalFenceFeatureFlags( bits ) ); } - using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; + using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; - VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) result += "D3D12Fence | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) result += "SyncFd | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalFenceFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalFenceFeatureFlagBits::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - - using FenceCreateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(FenceCreateFlagBits::eSignaled) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FenceCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FenceCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FenceCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( FenceCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & FenceCreateFlagBits::eSignaled ) result += "Signaled | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - using FenceImportFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(FenceImportFlagBits::eTemporary) + allFlags = VkFlags( FenceImportFlagBits::eTemporary ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, + FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FenceImportFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&(FenceImportFlagBits bit0, + FenceImportFlagBits bit1)VULKAN_HPP_NOEXCEPT { return FenceImportFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, + FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return FenceImportFlags( bit0 ) ^ bit1; } @@ -10803,1639 +16206,269 @@ namespace VULKAN_HPP_NAMESPACE using FenceImportFlagsKHR = FenceImportFlags; - VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) + VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & FenceImportFlagBits::eTemporary ) result += "Temporary | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & FenceImportFlagBits::eTemporary ) + result += "Temporary | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using SemaphoreImportFlags = Flags; - using FormatFeatureFlags = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT) + allFlags = VkFlags( SemaphoreImportFlagBits::eTemporary ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags + operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return FormatFeatureFlags( bit0 ) | bit1; + return SemaphoreImportFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator&(SemaphoreImportFlagBits bit0, + SemaphoreImportFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return FormatFeatureFlags( bit0 ) & bit1; + return SemaphoreImportFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags + operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return FormatFeatureFlags( bit0 ) ^ bit1; + return SemaphoreImportFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) + VULKAN_HPP_NOEXCEPT { - return ~( FormatFeatureFlags( bits ) ); + return ~( SemaphoreImportFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) - { + using SemaphoreImportFlagsKHR = SemaphoreImportFlags; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) + { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & FormatFeatureFlagBits::eSampledImage ) result += "SampledImage | "; - if ( value & FormatFeatureFlagBits::eStorageImage ) result += "StorageImage | "; - if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) result += "StorageImageAtomic | "; - if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | "; - if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | "; - if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) result += "StorageTexelBufferAtomic | "; - if ( value & FormatFeatureFlagBits::eVertexBuffer ) result += "VertexBuffer | "; - if ( value & FormatFeatureFlagBits::eColorAttachment ) result += "ColorAttachment | "; - if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) result += "ColorAttachmentBlend | "; - if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | "; - if ( value & FormatFeatureFlagBits::eBlitSrc ) result += "BlitSrc | "; - if ( value & FormatFeatureFlagBits::eBlitDst ) result += "BlitDst | "; - if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | "; - if ( value & FormatFeatureFlagBits::eTransferSrc ) result += "TransferSrc | "; - if ( value & FormatFeatureFlagBits::eTransferDst ) result += "TransferDst | "; - if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) result += "MidpointChromaSamples | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) result += "SampledImageYcbcrConversionLinearFilter | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; - if ( value & FormatFeatureFlagBits::eDisjoint ) result += "Disjoint | "; - if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | "; - if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | "; - if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | "; - if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | "; - if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SemaphoreImportFlagBits::eTemporary ) + result += "Temporary | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using ExternalSemaphoreHandleTypeFlags = Flags; - using FramebufferCreateFlags = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(FramebufferCreateFlagBits::eImageless) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FramebufferCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator&( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FramebufferCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FramebufferCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( FramebufferCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & FramebufferCreateFlagBits::eImageless ) result += "Imageless | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using GeometryFlagsKHR = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(GeometryFlagBitsKHR::eOpaque) | VkFlags(GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( GeometryFlagsKHR( bits ) ); - } - - using GeometryFlagsNV = GeometryFlagsKHR; - - VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & GeometryFlagBitsKHR::eOpaque ) result += "Opaque | "; - if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using GeometryInstanceFlagsKHR = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable) | VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsKHR::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsKHR::eForceNoOpaque) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryInstanceFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator&( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryInstanceFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryInstanceFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( GeometryInstanceFlagsKHR( bits ) ); - } - - using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; - - VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) result += "TriangleFacingCullDisable | "; - if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | "; - if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) result += "ForceOpaque | "; - if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) result += "ForceNoOpaque | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class HeadlessSurfaceCreateFlagBitsEXT : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT ) - { - return "(void)"; - } - - using HeadlessSurfaceCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT ) - { - - return "{}"; - } - -#ifdef VK_USE_PLATFORM_IOS_MVK - enum class IOSSurfaceCreateFlagBitsMVK : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK ) - { - return "(void)"; - } - - using IOSSurfaceCreateFlagsMVK = Flags; - - VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK ) - { - - return "{}"; - } -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - - - using ImageAspectFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) | VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageAspectFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageAspectFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageAspectFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ImageAspectFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & ImageAspectFlagBits::eColor ) result += "Color | "; - if ( value & ImageAspectFlagBits::eDepth ) result += "Depth | "; - if ( value & ImageAspectFlagBits::eStencil ) result += "Stencil | "; - if ( value & ImageAspectFlagBits::eMetadata ) result += "Metadata | "; - if ( value & ImageAspectFlagBits::ePlane0 ) result += "Plane0 | "; - if ( value & ImageAspectFlagBits::ePlane1 ) result += "Plane1 | "; - if ( value & ImageAspectFlagBits::ePlane2 ) result += "Plane2 | "; - if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) result += "MemoryPlane0EXT | "; - if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) result += "MemoryPlane1EXT | "; - if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) result += "MemoryPlane2EXT | "; - if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) result += "MemoryPlane3EXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using ImageCreateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eCornerSampledNV) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) | VkFlags(ImageCreateFlagBits::eSubsampledEXT) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ImageCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & ImageCreateFlagBits::eSparseBinding ) result += "SparseBinding | "; - if ( value & ImageCreateFlagBits::eSparseResidency ) result += "SparseResidency | "; - if ( value & ImageCreateFlagBits::eSparseAliased ) result += "SparseAliased | "; - if ( value & ImageCreateFlagBits::eMutableFormat ) result += "MutableFormat | "; - if ( value & ImageCreateFlagBits::eCubeCompatible ) result += "CubeCompatible | "; - if ( value & ImageCreateFlagBits::eAlias ) result += "Alias | "; - if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | "; - if ( value & ImageCreateFlagBits::e2DArrayCompatible ) result += "2DArrayCompatible | "; - if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) result += "BlockTexelViewCompatible | "; - if ( value & ImageCreateFlagBits::eExtendedUsage ) result += "ExtendedUsage | "; - if ( value & ImageCreateFlagBits::eProtected ) result += "Protected | "; - if ( value & ImageCreateFlagBits::eDisjoint ) result += "Disjoint | "; - if ( value & ImageCreateFlagBits::eCornerSampledNV ) result += "CornerSampledNV | "; - if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) result += "SampleLocationsCompatibleDepthEXT | "; - if ( value & ImageCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - -#ifdef VK_USE_PLATFORM_FUCHSIA - enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA ) - { - return "(void)"; - } - - using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; - - VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) - { - - return "{}"; - } + allFlags = VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | VkFlags( ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA ) #endif /*VK_USE_PLATFORM_FUCHSIA*/ - - - using ImageUsageFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) | VkFlags(ImageUsageFlagBits::eShadingRateImageNV) | VkFlags(ImageUsageFlagBits::eFragmentDensityMapEXT) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return ImageUsageFlags( bit0 ) | bit1; + return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator&(ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return ImageUsageFlags( bit0 ) & bit1; + return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return ImageUsageFlags( bit0 ) ^ bit1; + return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT { - return ~( ImageUsageFlags( bits ) ); + return ~( ExternalSemaphoreHandleTypeFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) - { + using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) + { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & ImageUsageFlagBits::eTransferSrc ) result += "TransferSrc | "; - if ( value & ImageUsageFlagBits::eTransferDst ) result += "TransferDst | "; - if ( value & ImageUsageFlagBits::eSampled ) result += "Sampled | "; - if ( value & ImageUsageFlagBits::eStorage ) result += "Storage | "; - if ( value & ImageUsageFlagBits::eColorAttachment ) result += "ColorAttachment | "; - if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | "; - if ( value & ImageUsageFlagBits::eTransientAttachment ) result += "TransientAttachment | "; - if ( value & ImageUsageFlagBits::eInputAttachment ) result += "InputAttachment | "; - if ( value & ImageUsageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | "; - if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) + result += "D3D12Fence | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) + result += "SyncFd | "; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + if ( value & ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA ) + result += "ZirconEventFUCHSIA | "; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using ExternalSemaphoreFeatureFlags = Flags; - using ImageViewCreateFlags = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT) + allFlags = VkFlags( ExternalSemaphoreFeatureFlagBits::eExportable ) | + VkFlags( ExternalSemaphoreFeatureFlagBits::eImportable ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return ImageViewCreateFlags( bit0 ) | bit1; + return ExternalSemaphoreFeatureFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator&( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator&(ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return ImageViewCreateFlags( bit0 ) & bit1; + return ExternalSemaphoreFeatureFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return ImageViewCreateFlags( bit0 ) ^ bit1; + return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT { - return ~( ImageViewCreateFlags( bits ) ); + return ~( ExternalSemaphoreFeatureFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) - { + using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) + { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + //=== VK_VERSION_1_2 === - using IndirectCommandsLayoutUsageFlagsNV = Flags; + using DescriptorBindingFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences) + allFlags = VkFlags( DescriptorBindingFlagBits::eUpdateAfterBind ) | + VkFlags( DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) | + VkFlags( DescriptorBindingFlagBits::ePartiallyBound ) | + VkFlags( DescriptorBindingFlagBits::eVariableDescriptorCount ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1; + return DescriptorBindingFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator&(DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1)VULKAN_HPP_NOEXCEPT { - return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1; + return DescriptorBindingFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { - return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1; + return DescriptorBindingFlags( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits ) + VULKAN_HPP_NOEXCEPT { - return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) ); + return ~( DescriptorBindingFlags( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value ) - { + using DescriptorBindingFlagsEXT = DescriptorBindingFlags; + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value ) + { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) result += "ExplicitPreprocess | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) result += "IndexedSequences | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) result += "UnorderedSequences | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) + result += "UpdateAfterBind | "; + if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) + result += "UpdateUnusedWhilePending | "; + if ( value & DescriptorBindingFlagBits::ePartiallyBound ) + result += "PartiallyBound | "; + if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) + result += "VariableDescriptorCount | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - - using IndirectStateFlagsNV = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(IndirectStateFlagBitsNV::eFlagFrontface) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return IndirectStateFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return IndirectStateFlagsNV( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return IndirectStateFlagsNV( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT - { - return ~( IndirectStateFlagsNV( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) result += "FlagFrontface | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using InstanceCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags ) - { - - return "{}"; - } - -#ifdef VK_USE_PLATFORM_MACOS_MVK - enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK ) - { - return "(void)"; - } - - using MacOSSurfaceCreateFlagsMVK = Flags; - - VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK ) - { - - return "{}"; - } -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - - using MemoryAllocateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) | VkFlags(MemoryAllocateFlagBits::eDeviceAddress) | VkFlags(MemoryAllocateFlagBits::eDeviceAddressCaptureReplay) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryAllocateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator&( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryAllocateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryAllocateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( MemoryAllocateFlags( bits ) ); - } - - using MemoryAllocateFlagsKHR = MemoryAllocateFlags; - - VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & MemoryAllocateFlagBits::eDeviceMask ) result += "DeviceMask | "; - if ( value & MemoryAllocateFlagBits::eDeviceAddress ) result += "DeviceAddress | "; - if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using MemoryHeapFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryHeapFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryHeapFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryHeapFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( MemoryHeapFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & MemoryHeapFlagBits::eDeviceLocal ) result += "DeviceLocal | "; - if ( value & MemoryHeapFlagBits::eMultiInstance ) result += "MultiInstance | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class MemoryMapFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) - { - return "(void)"; - } - - using MemoryMapFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) - { - - return "{}"; - } - - - using MemoryPropertyFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected) | VkFlags(MemoryPropertyFlagBits::eDeviceCoherentAMD) | VkFlags(MemoryPropertyFlagBits::eDeviceUncachedAMD) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryPropertyFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator&( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryPropertyFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryPropertyFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( MemoryPropertyFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & MemoryPropertyFlagBits::eDeviceLocal ) result += "DeviceLocal | "; - if ( value & MemoryPropertyFlagBits::eHostVisible ) result += "HostVisible | "; - if ( value & MemoryPropertyFlagBits::eHostCoherent ) result += "HostCoherent | "; - if ( value & MemoryPropertyFlagBits::eHostCached ) result += "HostCached | "; - if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) result += "LazilyAllocated | "; - if ( value & MemoryPropertyFlagBits::eProtected ) result += "Protected | "; - if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) result += "DeviceCoherentAMD | "; - if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) result += "DeviceUncachedAMD | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - -#ifdef VK_USE_PLATFORM_METAL_EXT - enum class MetalSurfaceCreateFlagBitsEXT : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT ) - { - return "(void)"; - } - - using MetalSurfaceCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT ) - { - - return "{}"; - } -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - - using PeerMemoryFeatureFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PeerMemoryFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator&( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PeerMemoryFeatureFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PeerMemoryFeatureFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PeerMemoryFeatureFlags( bits ) ); - } - - using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; - - VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) result += "CopySrc | "; - if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) result += "CopyDst | "; - if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) result += "GenericSrc | "; - if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) result += "GenericDst | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using PerformanceCounterDescriptionFlagsKHR = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting) | VkFlags(PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PerformanceCounterDescriptionFlagsKHR( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) result += "PerformanceImpacting | "; - if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) result += "ConcurrentlyImpacted | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using PipelineCacheCreateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(PipelineCacheCreateFlagBits::eExternallySynchronizedEXT) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCacheCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator&( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCacheCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCacheCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineCacheCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) result += "ExternallySynchronizedEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class PipelineColorBlendStateCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineColorBlendStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags ) - { - - return "{}"; - } - - - using PipelineCompilerControlFlagsAMD = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD ) - { - - return "{}"; - } - - enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV ) - { - return "(void)"; - } - - using PipelineCoverageModulationStateCreateFlagsNV = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) - { - - return "{}"; - } - - enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV ) - { - return "(void)"; - } - - using PipelineCoverageReductionStateCreateFlagsNV = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) - { - - return "{}"; - } - - enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV ) - { - return "(void)"; - } - - using PipelineCoverageToColorStateCreateFlagsNV = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) - { - - return "{}"; - } - - - using PipelineCreateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR) | VkFlags(PipelineCreateFlagBits::eRayTracingSkipAabbsKHR) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) | VkFlags(PipelineCreateFlagBits::eIndirectBindableNV) | VkFlags(PipelineCreateFlagBits::eLibraryKHR) | VkFlags(PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT) | VkFlags(PipelineCreateFlagBits::eEarlyReturnOnFailureEXT) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator&( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & PipelineCreateFlagBits::eDisableOptimization ) result += "DisableOptimization | "; - if ( value & PipelineCreateFlagBits::eAllowDerivatives ) result += "AllowDerivatives | "; - if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | "; - if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | "; - if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | "; - if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | "; - if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) result += "RayTracingNoNullClosestHitShadersKHR | "; - if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) result += "RayTracingNoNullMissShadersKHR | "; - if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) result += "RayTracingNoNullIntersectionShadersKHR | "; - if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) result += "RayTracingSkipTrianglesKHR | "; - if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) result += "RayTracingSkipAabbsKHR | "; - if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | "; - if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | "; - if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | "; - if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) result += "IndirectBindableNV | "; - if ( value & PipelineCreateFlagBits::eLibraryKHR ) result += "LibraryKHR | "; - if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) result += "FailOnPipelineCompileRequiredEXT | "; - if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) result += "EarlyReturnOnFailureEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using PipelineCreationFeedbackFlagsEXT = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(PipelineCreationFeedbackFlagBitsEXT::eValid) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreationFeedbackFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator&( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreationFeedbackFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator^( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreationFeedbackFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator~( PipelineCreationFeedbackFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineCreationFeedbackFlagsEXT( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid ) result += "Valid | "; - if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) result += "ApplicationPipelineCacheHit | "; - if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) result += "BasePipelineAcceleration | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class PipelineDepthStencilStateCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineDepthStencilStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags ) - { - - return "{}"; - } - - enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT ) - { - return "(void)"; - } - - using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) - { - - return "{}"; - } - - enum class PipelineDynamicStateCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineDynamicStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags ) - { - - return "{}"; - } - - enum class PipelineInputAssemblyStateCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineInputAssemblyStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags ) - { - - return "{}"; - } - - enum class PipelineLayoutCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits ) - { - return "(void)"; - } - - using PipelineLayoutCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags ) - { - - return "{}"; - } - - enum class PipelineMultisampleStateCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineMultisampleStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags ) - { - - return "{}"; - } - - enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT ) - { - return "(void)"; - } - - using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) - { - - return "{}"; - } - - enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT ) - { - return "(void)"; - } - - using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) - { - - return "{}"; - } - - enum class PipelineRasterizationStateCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineRasterizationStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags ) - { - - return "{}"; - } - - enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT ) - { - return "(void)"; - } - - using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) - { - - return "{}"; - } - - - using PipelineShaderStageCreateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT) | VkFlags(PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineShaderStageCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator&( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineShaderStageCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineShaderStageCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineShaderStageCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) result += "AllowVaryingSubgroupSizeEXT | "; - if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) result += "RequireFullSubgroupsEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using PipelineStageFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eRayTracingShaderKHR) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildKHR) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) | VkFlags(PipelineStageFlagBits::eCommandPreprocessNV) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineStageFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineStageFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineStageFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineStageFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & PipelineStageFlagBits::eTopOfPipe ) result += "TopOfPipe | "; - if ( value & PipelineStageFlagBits::eDrawIndirect ) result += "DrawIndirect | "; - if ( value & PipelineStageFlagBits::eVertexInput ) result += "VertexInput | "; - if ( value & PipelineStageFlagBits::eVertexShader ) result += "VertexShader | "; - if ( value & PipelineStageFlagBits::eTessellationControlShader ) result += "TessellationControlShader | "; - if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) result += "TessellationEvaluationShader | "; - if ( value & PipelineStageFlagBits::eGeometryShader ) result += "GeometryShader | "; - if ( value & PipelineStageFlagBits::eFragmentShader ) result += "FragmentShader | "; - if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) result += "EarlyFragmentTests | "; - if ( value & PipelineStageFlagBits::eLateFragmentTests ) result += "LateFragmentTests | "; - if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) result += "ColorAttachmentOutput | "; - if ( value & PipelineStageFlagBits::eComputeShader ) result += "ComputeShader | "; - if ( value & PipelineStageFlagBits::eTransfer ) result += "Transfer | "; - if ( value & PipelineStageFlagBits::eBottomOfPipe ) result += "BottomOfPipe | "; - if ( value & PipelineStageFlagBits::eHost ) result += "Host | "; - if ( value & PipelineStageFlagBits::eAllGraphics ) result += "AllGraphics | "; - if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | "; - if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | "; - if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; - if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) result += "RayTracingShaderKHR | "; - if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) result += "AccelerationStructureBuildKHR | "; - if ( value & PipelineStageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | "; - if ( value & PipelineStageFlagBits::eTaskShaderNV ) result += "TaskShaderNV | "; - if ( value & PipelineStageFlagBits::eMeshShaderNV ) result += "MeshShaderNV | "; - if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | "; - if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) result += "CommandPreprocessNV | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class PipelineTessellationStateCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineTessellationStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags ) - { - - return "{}"; - } - - enum class PipelineVertexInputStateCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineVertexInputStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags ) - { - - return "{}"; - } - - enum class PipelineViewportStateCreateFlagBits : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineViewportStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags ) - { - - return "{}"; - } - - enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV ) - { - return "(void)"; - } - - using PipelineViewportSwizzleStateCreateFlagsNV = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) - { - - return "{}"; - } - - - using PrivateDataSlotCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagsEXT ) - { - - return "{}"; - } - - - using QueryControlFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(QueryControlFlagBits::ePrecise) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryControlFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryControlFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryControlFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( QueryControlFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & QueryControlFlagBits::ePrecise ) result += "Precise | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using QueryPipelineStatisticFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryPipelineStatisticFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator&( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryPipelineStatisticFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryPipelineStatisticFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( QueryPipelineStatisticFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) result += "InputAssemblyVertices | "; - if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) result += "InputAssemblyPrimitives | "; - if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) result += "VertexShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) result += "GeometryShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) result += "GeometryShaderPrimitives | "; - if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) result += "ClippingInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) result += "ClippingPrimitives | "; - if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) result += "FragmentShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) result += "TessellationControlShaderPatches | "; - if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) result += "TessellationEvaluationShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) result += "ComputeShaderInvocations | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using QueryPoolCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags ) - { - - return "{}"; - } - - - using QueryResultFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryResultFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryResultFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryResultFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( QueryResultFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & QueryResultFlagBits::e64 ) result += "64 | "; - if ( value & QueryResultFlagBits::eWait ) result += "Wait | "; - if ( value & QueryResultFlagBits::eWithAvailability ) result += "WithAvailability | "; - if ( value & QueryResultFlagBits::ePartial ) result += "Partial | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using QueueFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueueFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueueFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueueFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( QueueFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & QueueFlagBits::eGraphics ) result += "Graphics | "; - if ( value & QueueFlagBits::eCompute ) result += "Compute | "; - if ( value & QueueFlagBits::eTransfer ) result += "Transfer | "; - if ( value & QueueFlagBits::eSparseBinding ) result += "SparseBinding | "; - if ( value & QueueFlagBits::eProtected ) result += "Protected | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using RenderPassCreateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(RenderPassCreateFlagBits::eTransformQCOM) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return RenderPassCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator&( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return RenderPassCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return RenderPassCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( RenderPassCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & RenderPassCreateFlagBits::eTransformQCOM ) result += "TransformQCOM | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - using ResolveModeFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ResolveModeFlagBits::eNone) | VkFlags(ResolveModeFlagBits::eSampleZero) | VkFlags(ResolveModeFlagBits::eAverage) | VkFlags(ResolveModeFlagBits::eMin) | VkFlags(ResolveModeFlagBits::eMax) + allFlags = VkFlags( ResolveModeFlagBits::eNone ) | VkFlags( ResolveModeFlagBits::eSampleZero ) | + VkFlags( ResolveModeFlagBits::eAverage ) | VkFlags( ResolveModeFlagBits::eMin ) | + VkFlags( ResolveModeFlagBits::eMax ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ResolveModeFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&(ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1)VULKAN_HPP_NOEXCEPT { return ResolveModeFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return ResolveModeFlags( bit0 ) ^ bit1; } @@ -12447,182 +16480,48 @@ namespace VULKAN_HPP_NAMESPACE using ResolveModeFlagsKHR = ResolveModeFlags; - VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value ) + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & ResolveModeFlagBits::eSampleZero ) result += "SampleZero | "; - if ( value & ResolveModeFlagBits::eAverage ) result += "Average | "; - if ( value & ResolveModeFlagBits::eMin ) result += "Min | "; - if ( value & ResolveModeFlagBits::eMax ) result += "Max | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & ResolveModeFlagBits::eSampleZero ) + result += "SampleZero | "; + if ( value & ResolveModeFlagBits::eAverage ) + result += "Average | "; + if ( value & ResolveModeFlagBits::eMin ) + result += "Min | "; + if ( value & ResolveModeFlagBits::eMax ) + result += "Max | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - - using SampleCountFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SampleCountFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SampleCountFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SampleCountFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SampleCountFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & SampleCountFlagBits::e1 ) result += "1 | "; - if ( value & SampleCountFlagBits::e2 ) result += "2 | "; - if ( value & SampleCountFlagBits::e4 ) result += "4 | "; - if ( value & SampleCountFlagBits::e8 ) result += "8 | "; - if ( value & SampleCountFlagBits::e16 ) result += "16 | "; - if ( value & SampleCountFlagBits::e32 ) result += "32 | "; - if ( value & SampleCountFlagBits::e64 ) result += "64 | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using SamplerCreateFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(SamplerCreateFlagBits::eSubsampledEXT) | VkFlags(SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SamplerCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SamplerCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SamplerCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SamplerCreateFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & SamplerCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | "; - if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) result += "SubsampledCoarseReconstructionEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using SemaphoreCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags ) - { - - return "{}"; - } - - - using SemaphoreImportFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SemaphoreImportFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator&( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SemaphoreImportFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SemaphoreImportFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SemaphoreImportFlags( bits ) ); - } - - using SemaphoreImportFlagsKHR = SemaphoreImportFlags; - - VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & SemaphoreImportFlagBits::eTemporary ) result += "Temporary | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - using SemaphoreWaitFlags = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SemaphoreWaitFlagBits::eAny) + allFlags = VkFlags( SemaphoreWaitFlagBits::eAny ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SemaphoreWaitFlags( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&(SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1)VULKAN_HPP_NOEXCEPT { return SemaphoreWaitFlags( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT { return SemaphoreWaitFlags( bit0 ) ^ bit1; } @@ -12634,596 +16533,344 @@ namespace VULKAN_HPP_NAMESPACE using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags; - VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value ) + VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & SemaphoreWaitFlagBits::eAny ) result += "Any | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SemaphoreWaitFlagBits::eAny ) + result += "Any | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + //=== VK_KHR_surface === - using ShaderCorePropertiesFlagsAMD = Flags; + using CompositeAlphaFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD ) - { - - return "{}"; - } - - - using ShaderModuleCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags ) - { - - return "{}"; - } - - - using ShaderStageFlags = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenKHR) | VkFlags(ShaderStageFlagBits::eAnyHitKHR) | VkFlags(ShaderStageFlagBits::eClosestHitKHR) | VkFlags(ShaderStageFlagBits::eMissKHR) | VkFlags(ShaderStageFlagBits::eIntersectionKHR) | VkFlags(ShaderStageFlagBits::eCallableKHR) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV) + allFlags = VkFlags( CompositeAlphaFlagBitsKHR::eOpaque ) | VkFlags( CompositeAlphaFlagBitsKHR::ePreMultiplied ) | + VkFlags( CompositeAlphaFlagBitsKHR::ePostMultiplied ) | VkFlags( CompositeAlphaFlagBitsKHR::eInherit ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return ShaderStageFlags( bit0 ) | bit1; + return CompositeAlphaFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator&(CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT { - return ShaderStageFlags( bit0 ) & bit1; + return CompositeAlphaFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return ShaderStageFlags( bit0 ) ^ bit1; + return CompositeAlphaFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT { - return ~( ShaderStageFlags( bits ) ); + return ~( CompositeAlphaFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & ShaderStageFlagBits::eVertex ) result += "Vertex | "; - if ( value & ShaderStageFlagBits::eTessellationControl ) result += "TessellationControl | "; - if ( value & ShaderStageFlagBits::eTessellationEvaluation ) result += "TessellationEvaluation | "; - if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | "; - if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | "; - if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | "; - if ( value & ShaderStageFlagBits::eRaygenKHR ) result += "RaygenKHR | "; - if ( value & ShaderStageFlagBits::eAnyHitKHR ) result += "AnyHitKHR | "; - if ( value & ShaderStageFlagBits::eClosestHitKHR ) result += "ClosestHitKHR | "; - if ( value & ShaderStageFlagBits::eMissKHR ) result += "MissKHR | "; - if ( value & ShaderStageFlagBits::eIntersectionKHR ) result += "IntersectionKHR | "; - if ( value & ShaderStageFlagBits::eCallableKHR ) result += "CallableKHR | "; - if ( value & ShaderStageFlagBits::eTaskNV ) result += "TaskNV | "; - if ( value & ShaderStageFlagBits::eMeshNV ) result += "MeshNV | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using SparseImageFormatFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SparseImageFormatFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator&( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SparseImageFormatFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SparseImageFormatFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SparseImageFormatFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & SparseImageFormatFlagBits::eSingleMiptail ) result += "SingleMiptail | "; - if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) result += "AlignedMipSize | "; - if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) result += "NonstandardBlockSize | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using SparseMemoryBindFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SparseMemoryBindFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator&( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SparseMemoryBindFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SparseMemoryBindFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SparseMemoryBindFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & SparseMemoryBindFlagBits::eMetadata ) result += "Metadata | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using StencilFaceFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eFrontAndBack) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return StencilFaceFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return StencilFaceFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return StencilFaceFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( StencilFaceFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & StencilFaceFlagBits::eFront ) result += "Front | "; - if ( value & StencilFaceFlagBits::eBack ) result += "Back | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - -#ifdef VK_USE_PLATFORM_GGP - enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP ) - { - return "(void)"; - } - - using StreamDescriptorSurfaceCreateFlagsGGP = Flags; - - VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) - { - - return "{}"; - } -#endif /*VK_USE_PLATFORM_GGP*/ - - - using SubgroupFeatureFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubgroupFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator&( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubgroupFeatureFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubgroupFeatureFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SubgroupFeatureFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & SubgroupFeatureFlagBits::eBasic ) result += "Basic | "; - if ( value & SubgroupFeatureFlagBits::eVote ) result += "Vote | "; - if ( value & SubgroupFeatureFlagBits::eArithmetic ) result += "Arithmetic | "; - if ( value & SubgroupFeatureFlagBits::eBallot ) result += "Ballot | "; - if ( value & SubgroupFeatureFlagBits::eShuffle ) result += "Shuffle | "; - if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) result += "ShuffleRelative | "; - if ( value & SubgroupFeatureFlagBits::eClustered ) result += "Clustered | "; - if ( value & SubgroupFeatureFlagBits::eQuad ) result += "Quad | "; - if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) result += "PartitionedNV | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using SubpassDescriptionFlags = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) | VkFlags(SubpassDescriptionFlagBits::eFragmentRegionQCOM) | VkFlags(SubpassDescriptionFlagBits::eShaderResolveQCOM) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubpassDescriptionFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator&( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubpassDescriptionFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubpassDescriptionFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SubpassDescriptionFlags( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) result += "PerViewAttributesNVX | "; - if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) result += "PerViewPositionXOnlyNVX | "; - if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) result += "FragmentRegionQCOM | "; - if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) result += "ShaderResolveQCOM | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using SurfaceCounterFlagsEXT = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceCounterFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator&( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceCounterFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceCounterFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SurfaceCounterFlagsEXT( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & SurfaceCounterFlagBitsEXT::eVblank ) result += "Vblank | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - - using SurfaceTransformFlagsKHR = Flags; - - template <> struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceTransformFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator&( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceTransformFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceTransformFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SurfaceTransformFlagsKHR( bits ) ); - } - - VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) - { - - if ( !value ) return "{}"; - std::string result; - - if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) result += "Identity | "; - if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) result += "Rotate90 | "; - if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) result += "Rotate180 | "; - if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) result += "Rotate270 | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) result += "HorizontalMirror | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) result += "HorizontalMirrorRotate90 | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) result += "HorizontalMirrorRotate180 | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) result += "HorizontalMirrorRotate270 | "; - if ( value & SurfaceTransformFlagBitsKHR::eInherit ) result += "Inherit | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) + result += "PreMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) + result += "PostMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::eInherit ) + result += "Inherit | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + //=== VK_KHR_swapchain === using SwapchainCreateFlagsKHR = Flags; - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected) | VkFlags(SwapchainCreateFlagBitsKHR::eMutableFormat) + allFlags = VkFlags( SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) | + VkFlags( SwapchainCreateFlagBitsKHR::eProtected ) | + VkFlags( SwapchainCreateFlagBitsKHR::eMutableFormat ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return SwapchainCreateFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator&( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator&(SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT { return SwapchainCreateFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { return SwapchainCreateFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT { return ~( SwapchainCreateFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | "; - if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += "Protected | "; - if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) + result += "SplitInstanceBindRegions | "; + if ( value & SwapchainCreateFlagBitsKHR::eProtected ) + result += "Protected | "; + if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) + result += "MutableFormat | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + using DeviceGroupPresentModeFlagsKHR = Flags; - using ToolPurposeFlagsEXT = Flags; - - template <> struct FlagTraits + template <> + struct FlagTraits { enum : VkFlags { - allFlags = VkFlags(ToolPurposeFlagBitsEXT::eValidation) | VkFlags(ToolPurposeFlagBitsEXT::eProfiling) | VkFlags(ToolPurposeFlagBitsEXT::eTracing) | VkFlags(ToolPurposeFlagBitsEXT::eAdditionalFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eModifyingFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eDebugReporting) | VkFlags(ToolPurposeFlagBitsEXT::eDebugMarkers) + allFlags = VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocal ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eRemote ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eSum ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) }; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator|( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return ToolPurposeFlagsEXT( bit0 ) | bit1; + return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator&( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator&(DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT { - return ToolPurposeFlagsEXT( bit0 ) & bit1; + return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator^( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - return ToolPurposeFlagsEXT( bit0 ) ^ bit1; + return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1; } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator~( ToolPurposeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT { - return ~( ToolPurposeFlagsEXT( bits ) ); + return ~( DeviceGroupPresentModeFlagsKHR( bits ) ); } - VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) { + if ( !value ) + return "{}"; - if ( !value ) return "{}"; std::string result; - - if ( value & ToolPurposeFlagBitsEXT::eValidation ) result += "Validation | "; - if ( value & ToolPurposeFlagBitsEXT::eProfiling ) result += "Profiling | "; - if ( value & ToolPurposeFlagBitsEXT::eTracing ) result += "Tracing | "; - if ( value & ToolPurposeFlagBitsEXT::eAdditionalFeatures ) result += "AdditionalFeatures | "; - if ( value & ToolPurposeFlagBitsEXT::eModifyingFeatures ) result += "ModifyingFeatures | "; - if ( value & ToolPurposeFlagBitsEXT::eDebugReporting ) result += "DebugReporting | "; - if ( value & ToolPurposeFlagBitsEXT::eDebugMarkers ) result += "DebugMarkers | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) + result += "Local | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) + result += "Remote | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) + result += "Sum | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) + result += "LocalMultiDevice | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - enum class ValidationCacheCreateFlagBitsEXT : VkFlags - {}; + //=== VK_KHR_display === - VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT ) + enum class DisplayModeCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR ) { return "(void)"; } - using ValidationCacheCreateFlagsEXT = Flags; + using DisplayModeCreateFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT ) + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR ) { - return "{}"; } -#ifdef VK_USE_PLATFORM_VI_NN - enum class ViSurfaceCreateFlagBitsNN : VkFlags - {}; + using DisplayPlaneAlphaFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN ) + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DisplayPlaneAlphaFlagBitsKHR::eOpaque ) | VkFlags( DisplayPlaneAlphaFlagBitsKHR::eGlobal ) | + VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) | + VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator&(DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DisplayPlaneAlphaFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) + result += "Global | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) + result += "PerPixel | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) + result += "PerPixelPremultiplied | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class DisplaySurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR ) { return "(void)"; } - using ViSurfaceCreateFlagsNN = Flags; + using DisplaySurfaceCreateFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN ) + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR ) { - return "{}"; } -#endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags - {}; + using SurfaceTransformFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR ) + template <> + struct FlagTraits { - return "(void)"; + enum : VkFlags + { + allFlags = VkFlags( SurfaceTransformFlagBitsKHR::eIdentity ) | VkFlags( SurfaceTransformFlagBitsKHR::eRotate90 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eRotate180 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eRotate270 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirror ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eInherit ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceTransformFlagsKHR( bit0 ) | bit1; } - using WaylandSurfaceCreateFlagsKHR = Flags; - - VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator&(SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT { - - return "{}"; - } -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - enum class Win32SurfaceCreateFlagBitsKHR : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR ) - { - return "(void)"; + return SurfaceTransformFlagsKHR( bit0 ) & bit1; } - using Win32SurfaceCreateFlagsKHR = Flags; - - VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT { - - return "{}"; - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_XCB_KHR - enum class XcbSurfaceCreateFlagBitsKHR : VkFlags - {}; - - VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR ) - { - return "(void)"; + return SurfaceTransformFlagsKHR( bit0 ) ^ bit1; } - using XcbSurfaceCreateFlagsKHR = Flags; - - VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT { - - return "{}"; + return ~( SurfaceTransformFlagsKHR( bits ) ); } -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) + result += "Identity | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) + result += "Rotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) + result += "Rotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) + result += "Rotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) + result += "HorizontalMirror | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) + result += "HorizontalMirrorRotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) + result += "HorizontalMirrorRotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) + result += "HorizontalMirrorRotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eInherit ) + result += "Inherit | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + enum class XlibSurfaceCreateFlagBitsKHR : VkFlags - {}; + { + }; VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR ) { @@ -13232,13 +16879,2680 @@ namespace VULKAN_HPP_NAMESPACE using XlibSurfaceCreateFlagsKHR = Flags; - VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR ) { - return "{}"; } #endif /*VK_USE_PLATFORM_XLIB_KHR*/ -} // namespace VULKAN_HPP_NAMESPACE + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + enum class XcbSurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using XcbSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using WaylandSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using AndroidSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + enum class Win32SurfaceCreateFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using Win32SurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + using DebugReportFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DebugReportFlagBitsEXT::eInformation ) | VkFlags( DebugReportFlagBitsEXT::eWarning ) | + VkFlags( DebugReportFlagBitsEXT::ePerformanceWarning ) | VkFlags( DebugReportFlagBitsEXT::eError ) | + VkFlags( DebugReportFlagBitsEXT::eDebug ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT + operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator&(DebugReportFlagBitsEXT bit0, + DebugReportFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT + operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DebugReportFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DebugReportFlagBitsEXT::eInformation ) + result += "Information | "; + if ( value & DebugReportFlagBitsEXT::eWarning ) + result += "Warning | "; + if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) + result += "PerformanceWarning | "; + if ( value & DebugReportFlagBitsEXT::eError ) + result += "Error | "; + if ( value & DebugReportFlagBitsEXT::eDebug ) + result += "Debug | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + using VideoCodecOperationFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCodecOperationFlagBitsKHR::eInvalid ) +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) | + VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) | + VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH265EXT ) +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR + operator|( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodecOperationFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR + operator&(VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoCodecOperationFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR + operator^( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodecOperationFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR operator~( VideoCodecOperationFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoCodecOperationFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) + result += "EncodeH264EXT | "; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) + result += "DecodeH264EXT | "; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265EXT ) + result += "DecodeH265EXT | "; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoChromaSubsamplingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoChromaSubsamplingFlagBitsKHR::eInvalid ) | + VkFlags( VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) | + VkFlags( VideoChromaSubsamplingFlagBitsKHR::e420 ) | + VkFlags( VideoChromaSubsamplingFlagBitsKHR::e422 ) | VkFlags( VideoChromaSubsamplingFlagBitsKHR::e444 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator|( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoChromaSubsamplingFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator&(VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoChromaSubsamplingFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator^( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoChromaSubsamplingFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator~( VideoChromaSubsamplingFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoChromaSubsamplingFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) + result += "Monochrome | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e420 ) + result += "420 | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e422 ) + result += "422 | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e444 ) + result += "444 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoComponentBitDepthFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoComponentBitDepthFlagBitsKHR::eInvalid ) | + VkFlags( VideoComponentBitDepthFlagBitsKHR::e8 ) | VkFlags( VideoComponentBitDepthFlagBitsKHR::e10 ) | + VkFlags( VideoComponentBitDepthFlagBitsKHR::e12 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator|( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoComponentBitDepthFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator&(VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoComponentBitDepthFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator^( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoComponentBitDepthFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator~( VideoComponentBitDepthFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoComponentBitDepthFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoComponentBitDepthFlagBitsKHR::e8 ) + result += "8 | "; + if ( value & VideoComponentBitDepthFlagBitsKHR::e10 ) + result += "10 | "; + if ( value & VideoComponentBitDepthFlagBitsKHR::e12 ) + result += "12 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoCapabilitiesFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCapabilitiesFlagBitsKHR::eProtectedContent ) | + VkFlags( VideoCapabilitiesFlagBitsKHR::eSeparateReferenceImages ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR + operator|( VideoCapabilitiesFlagBitsKHR bit0, VideoCapabilitiesFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCapabilitiesFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR + operator&(VideoCapabilitiesFlagBitsKHR bit0, VideoCapabilitiesFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoCapabilitiesFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR + operator^( VideoCapabilitiesFlagBitsKHR bit0, VideoCapabilitiesFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCapabilitiesFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR operator~( VideoCapabilitiesFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoCapabilitiesFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCapabilitiesFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoCapabilitiesFlagBitsKHR::eProtectedContent ) + result += "ProtectedContent | "; + if ( value & VideoCapabilitiesFlagBitsKHR::eSeparateReferenceImages ) + result += "SeparateReferenceImages | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoSessionCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( VideoSessionCreateFlagBitsKHR::eDefault ) | VkFlags( VideoSessionCreateFlagBitsKHR::eProtectedContent ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR + operator|( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoSessionCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR + operator&(VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoSessionCreateFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR + operator^( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoSessionCreateFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR operator~( VideoSessionCreateFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoSessionCreateFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent ) + result += "ProtectedContent | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class VideoBeginCodingFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagBitsKHR ) + { + return "(void)"; + } + + using VideoBeginCodingFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagsKHR ) + { + return "{}"; + } + + enum class VideoEndCodingFlagBitsKHR : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagBitsKHR ) + { + return "(void)"; + } + + using VideoEndCodingFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagsKHR ) + { + return "{}"; + } + + using VideoCodingControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCodingControlFlagBitsKHR::eDefault ) | VkFlags( VideoCodingControlFlagBitsKHR::eReset ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR + operator|( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingControlFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR + operator&(VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoCodingControlFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR + operator^( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingControlFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR operator~( VideoCodingControlFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoCodingControlFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoCodingControlFlagBitsKHR::eReset ) + result += "Reset | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoCodingQualityPresetFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCodingQualityPresetFlagBitsKHR::eDefault ) | + VkFlags( VideoCodingQualityPresetFlagBitsKHR::eNormal ) | + VkFlags( VideoCodingQualityPresetFlagBitsKHR::ePower ) | + VkFlags( VideoCodingQualityPresetFlagBitsKHR::eQuality ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator|( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingQualityPresetFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator&(VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoCodingQualityPresetFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator^( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingQualityPresetFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator~( VideoCodingQualityPresetFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoCodingQualityPresetFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodingQualityPresetFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoCodingQualityPresetFlagBitsKHR::eNormal ) + result += "Normal | "; + if ( value & VideoCodingQualityPresetFlagBitsKHR::ePower ) + result += "Power | "; + if ( value & VideoCodingQualityPresetFlagBitsKHR::eQuality ) + result += "Quality | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + using VideoDecodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoDecodeFlagBitsKHR::eDefault ) | VkFlags( VideoDecodeFlagBitsKHR::eReserved0 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR + operator|( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR operator&(VideoDecodeFlagBitsKHR bit0, + VideoDecodeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoDecodeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR + operator^( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR operator~( VideoDecodeFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoDecodeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoDecodeFlagBitsKHR::eReserved0 ) + result += "Reserved0 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) + { + return "{}"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h264 === + + using VideoEncodeH264CapabilitiesFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCabac ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCavlc ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityWeightedBiPredImplicit ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityTransform8X8 ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityChromaQpOffset ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilitySecondChromaQpOffset ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterDisabled ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterEnabled ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterPartial ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityMultipleSlicePerFrame ) | + VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT operator|( + VideoEncodeH264CapabilitiesFlagBitsEXT bit0, VideoEncodeH264CapabilitiesFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CapabilitiesFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT operator&( + VideoEncodeH264CapabilitiesFlagBitsEXT bit0, VideoEncodeH264CapabilitiesFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CapabilitiesFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT operator^( + VideoEncodeH264CapabilitiesFlagBitsEXT bit0, VideoEncodeH264CapabilitiesFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CapabilitiesFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT + operator~( VideoEncodeH264CapabilitiesFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264CapabilitiesFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilitiesFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCabac ) + result += "VkVideoEncodeH264CapabilityCabac | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCavlc ) + result += "VkVideoEncodeH264CapabilityCavlc | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityWeightedBiPredImplicit ) + result += "VkVideoEncodeH264CapabilityWeightedBiPredImplicit | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityTransform8X8 ) + result += "VkVideoEncodeH264CapabilityTransform8X8 | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityChromaQpOffset ) + result += "VkVideoEncodeH264CapabilityChromaQpOffset | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilitySecondChromaQpOffset ) + result += "VkVideoEncodeH264CapabilitySecondChromaQpOffset | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterDisabled ) + result += "VkVideoEncodeH264CapabilityDeblockingFilterDisabled | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterEnabled ) + result += "VkVideoEncodeH264CapabilityDeblockingFilterEnabled | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterPartial ) + result += "VkVideoEncodeH264CapabilityDeblockingFilterPartial | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityMultipleSlicePerFrame ) + result += "VkVideoEncodeH264CapabilityMultipleSlicePerFrame | "; + if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize ) + result += "VkVideoEncodeH264CapabilityEvenlyDistributedSliceSize | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH264InputModeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eFrame ) | + VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eSlice ) | + VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eNonVcl ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator|( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264InputModeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator&(VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264InputModeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator^( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264InputModeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator~( VideoEncodeH264InputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264InputModeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264InputModeFlagBitsEXT::eFrame ) + result += "Frame | "; + if ( value & VideoEncodeH264InputModeFlagBitsEXT::eSlice ) + result += "Slice | "; + if ( value & VideoEncodeH264InputModeFlagBitsEXT::eNonVcl ) + result += "NonVcl | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH264OutputModeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) | + VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) | + VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator|( + VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264OutputModeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT + operator&(VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264OutputModeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator^( + VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264OutputModeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT + operator~( VideoEncodeH264OutputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264OutputModeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) + result += "Frame | "; + if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) + result += "Slice | "; + if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl ) + result += "NonVcl | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH264CreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( VideoEncodeH264CreateFlagBitsEXT::eDefault ) | VkFlags( VideoEncodeH264CreateFlagBitsEXT::eReserved0 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator|( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CreateFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator&(VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CreateFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator^( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CreateFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator~( VideoEncodeH264CreateFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264CreateFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264CreateFlagBitsEXT::eReserved0 ) + result += "Reserved0 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h264 === + + using VideoDecodeH264FieldLayoutFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264ProgressivePicturesOnly ) | + VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eLineInterlacedPlane ) | + VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eSeparateInterlacedPlane ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT operator|( + VideoDecodeH264FieldLayoutFlagBitsEXT bit0, VideoDecodeH264FieldLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeH264FieldLayoutFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT + operator&(VideoDecodeH264FieldLayoutFlagBitsEXT bit0, VideoDecodeH264FieldLayoutFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return VideoDecodeH264FieldLayoutFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT operator^( + VideoDecodeH264FieldLayoutFlagBitsEXT bit0, VideoDecodeH264FieldLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeH264FieldLayoutFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT + operator~( VideoDecodeH264FieldLayoutFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoDecodeH264FieldLayoutFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264FieldLayoutFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoDecodeH264FieldLayoutFlagBitsEXT::eLineInterlacedPlane ) + result += "LineInterlacedPlane | "; + if ( value & VideoDecodeH264FieldLayoutFlagBitsEXT::eSeparateInterlacedPlane ) + result += "SeparateInterlacedPlane | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class VideoDecodeH264CreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264CreateFlagBitsEXT ) + { + return "(void)"; + } + + using VideoDecodeH264CreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264CreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP ) + { + return "(void)"; + } + + using StreamDescriptorSurfaceCreateFlagsGGP = Flags; + + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + using ExternalMemoryHandleTypeFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator&(ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryHandleTypeFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) + result += "D3D11Image | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) + result += "D3D11ImageKmt | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalMemoryFeatureFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) | + VkFlags( ExternalMemoryFeatureFlagBitsNV::eExportable ) | + VkFlags( ExternalMemoryFeatureFlagBitsNV::eImportable ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator&(ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryFeatureFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) + result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) + result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) + result += "Importable | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + enum class ViSurfaceCreateFlagBitsNN : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN ) + { + return "(void)"; + } + + using ViSurfaceCreateFlagsNN = Flags; + + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_conditional_rendering === + + using ConditionalRenderingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ConditionalRenderingFlagBitsEXT::eInverted ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator&(ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ConditionalRenderingFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) + result += "Inverted | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_display_surface_counter === + + using SurfaceCounterFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SurfaceCounterFlagBitsEXT::eVblank ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator&(SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SurfaceCounterFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SurfaceCounterFlagBitsEXT::eVblank ) + result += "Vblank | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_viewport_swizzle === + + enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineViewportSwizzleStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_discard_rectangles === + + enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_conservative_rasterization === + + enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineRasterizationConservativeStateCreateFlagsEXT = + Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_depth_clip_enable === + + enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_KHR_performance_query === + + using PerformanceCounterDescriptionFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) | + VkFlags( PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR + operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PerformanceCounterDescriptionFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) + result += "PerformanceImpacting | "; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) + result += "ConcurrentlyImpacted | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AcquireProfilingLockFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR ) + { + return "{}"; + } + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + enum class IOSSurfaceCreateFlagBitsMVK : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } + + using IOSSurfaceCreateFlagsMVK = Flags; + + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } + + using MacOSSurfaceCreateFlagsMVK = Flags; + + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + using DebugUtilsMessageSeverityFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eError ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( + DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT + operator&(DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( + DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT + operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) + result += "Verbose | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) + result += "Info | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) + result += "Warning | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) + result += "Error | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DebugUtilsMessageTypeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) | + VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eValidation ) | + VkFlags( DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator&(DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DebugUtilsMessageTypeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) + result += "General | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) + result += "Validation | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) + result += "Performance | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT ) + { + return "(void)"; + } + + using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) + { + return "{}"; + } + + enum class DebugUtilsMessengerCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT ) + { + return "(void)"; + } + + using DebugUtilsMessengerCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_NV_fragment_coverage_to_color === + + enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineCoverageToColorStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_KHR_acceleration_structure === + + using GeometryFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( GeometryFlagBitsKHR::eOpaque ) | VkFlags( GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&(GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( GeometryFlagsKHR( bits ) ); + } + + using GeometryFlagsNV = GeometryFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & GeometryFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) + result += "NoDuplicateAnyHitInvocation | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using GeometryInstanceFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) | + VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) | + VkFlags( GeometryInstanceFlagBitsKHR::eForceOpaque ) | + VkFlags( GeometryInstanceFlagBitsKHR::eForceNoOpaque ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator&(GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( GeometryInstanceFlagsKHR( bits ) ); + } + + using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) + result += "TriangleFacingCullDisable | "; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) + result += "TriangleFrontCounterclockwise | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) + result += "ForceOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) + result += "ForceNoOpaque | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using BuildAccelerationStructureFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::eLowMemory ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( + BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR + operator&(BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( + BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR + operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( BuildAccelerationStructureFlagsKHR( bits ) ); + } + + using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) + result += "AllowUpdate | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) + result += "AllowCompaction | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) + result += "PreferFastTrace | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) + result += "PreferFastBuild | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) + result += "LowMemory | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AccelerationStructureCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator|( + AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccelerationStructureCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator&( + AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return AccelerationStructureCreateFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator^( + AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccelerationStructureCreateFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR + operator~( AccelerationStructureCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AccelerationStructureCreateFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_framebuffer_mixed_samples === + + enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineCoverageModulationStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_validation_cache === + + enum class ValidationCacheCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT ) + { + return "(void)"; + } + + using ValidationCacheCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_AMD_pipeline_compiler_control === + + using PipelineCompilerControlFlagsAMD = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD ) + { + return "{}"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h265 === + + enum class VideoDecodeH265CreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH265CreateFlagBitsEXT ) + { + return "(void)"; + } + + using VideoDecodeH265CreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH265CreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_pipeline_creation_feedback === + + using PipelineCreationFeedbackFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineCreationFeedbackFlagBitsEXT::eValid ) | + VkFlags( PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) | + VkFlags( PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator&(PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator^( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT + operator~( PipelineCreationFeedbackFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCreationFeedbackFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid ) + result += "Valid | "; + if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) + result += "ApplicationPipelineCacheHit | "; + if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) + result += "BasePipelineAcceleration | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA ) + { + return "(void)"; + } + + using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; + + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + enum class MetalSurfaceCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + using MetalSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_AMD_shader_core_properties2 === + + using ShaderCorePropertiesFlagsAMD = Flags; + + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD ) + { + return "{}"; + } + + //=== VK_EXT_tooling_info === + + using ToolPurposeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ToolPurposeFlagBitsEXT::eValidation ) | VkFlags( ToolPurposeFlagBitsEXT::eProfiling ) | + VkFlags( ToolPurposeFlagBitsEXT::eTracing ) | VkFlags( ToolPurposeFlagBitsEXT::eAdditionalFeatures ) | + VkFlags( ToolPurposeFlagBitsEXT::eModifyingFeatures ) | + VkFlags( ToolPurposeFlagBitsEXT::eDebugReporting ) | VkFlags( ToolPurposeFlagBitsEXT::eDebugMarkers ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT + operator|( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ToolPurposeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator&(ToolPurposeFlagBitsEXT bit0, + ToolPurposeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT + { + return ToolPurposeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT + operator^( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ToolPurposeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator~( ToolPurposeFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ToolPurposeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ToolPurposeFlagBitsEXT::eValidation ) + result += "Validation | "; + if ( value & ToolPurposeFlagBitsEXT::eProfiling ) + result += "Profiling | "; + if ( value & ToolPurposeFlagBitsEXT::eTracing ) + result += "Tracing | "; + if ( value & ToolPurposeFlagBitsEXT::eAdditionalFeatures ) + result += "AdditionalFeatures | "; + if ( value & ToolPurposeFlagBitsEXT::eModifyingFeatures ) + result += "ModifyingFeatures | "; + if ( value & ToolPurposeFlagBitsEXT::eDebugReporting ) + result += "DebugReporting | "; + if ( value & ToolPurposeFlagBitsEXT::eDebugMarkers ) + result += "DebugMarkers | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_coverage_reduction_mode === + + enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineCoverageReductionStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_headless_surface === + + enum class HeadlessSurfaceCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + using HeadlessSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_NV_device_generated_commands === + + using IndirectStateFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( IndirectStateFlagBitsNV::eFlagFrontface ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV + operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&(IndirectStateFlagBitsNV bit0, + IndirectStateFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV + operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( IndirectStateFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) + result += "FlagFrontface | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using IndirectCommandsLayoutUsageFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) | + VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) | + VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( + IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV + operator&(IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( + IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV + operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) + result += "ExplicitPreprocess | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) + result += "IndexedSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) + result += "UnorderedSequences | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_device_memory_report === + + enum class DeviceMemoryReportFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagBitsEXT ) + { + return "(void)"; + } + + using DeviceMemoryReportFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_private_data === + + using PrivateDataSlotCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagsEXT ) + { + return "{}"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + using VideoEncodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeFlagBitsKHR::eReserved0 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR + operator|( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR operator&(VideoEncodeFlagBitsKHR bit0, + VideoEncodeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR + operator^( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR operator~( VideoEncodeFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeFlagBitsKHR::eReserved0 ) + result += "Reserved0 | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeRateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( VideoEncodeRateControlFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeRateControlFlagBitsKHR::eReset ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator|( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator&(VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator^( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator~( VideoEncodeRateControlFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeRateControlFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeRateControlFlagBitsKHR::eReset ) + result += "Reset | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeRateControlModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eNone ) | + VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eCbr ) | + VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eVbr ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator|( + VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlModeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR + operator&(VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlModeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator^( + VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlModeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR + operator~( VideoEncodeRateControlModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeRateControlModeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_device_diagnostics_config === + + using DeviceDiagnosticsConfigFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) | + VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) | + VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator&(DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DeviceDiagnosticsConfigFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) + result += "EnableShaderDebugInfo | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) + result += "EnableResourceTracking | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) + result += "EnableAutomaticCheckpoints | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_synchronization2 === + + using PipelineStageFlags2KHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags64 + { + allFlags = + VkFlags64( PipelineStageFlagBits2KHR::eNone ) | VkFlags64( PipelineStageFlagBits2KHR::eTopOfPipe ) | + VkFlags64( PipelineStageFlagBits2KHR::eDrawIndirect ) | VkFlags64( PipelineStageFlagBits2KHR::eVertexInput ) | + VkFlags64( PipelineStageFlagBits2KHR::eVertexShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eTessellationControlShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eTessellationEvaluationShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eGeometryShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eFragmentShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eEarlyFragmentTests ) | + VkFlags64( PipelineStageFlagBits2KHR::eLateFragmentTests ) | + VkFlags64( PipelineStageFlagBits2KHR::eColorAttachmentOutput ) | + VkFlags64( PipelineStageFlagBits2KHR::eComputeShader ) | VkFlags64( PipelineStageFlagBits2KHR::eAllTransfer ) | + VkFlags64( PipelineStageFlagBits2KHR::eBottomOfPipe ) | VkFlags64( PipelineStageFlagBits2KHR::eHost ) | + VkFlags64( PipelineStageFlagBits2KHR::eAllGraphics ) | VkFlags64( PipelineStageFlagBits2KHR::eAllCommands ) | + VkFlags64( PipelineStageFlagBits2KHR::eCopy ) | VkFlags64( PipelineStageFlagBits2KHR::eResolve ) | + VkFlags64( PipelineStageFlagBits2KHR::eBlit ) | VkFlags64( PipelineStageFlagBits2KHR::eClear ) | + VkFlags64( PipelineStageFlagBits2KHR::eIndexInput ) | + VkFlags64( PipelineStageFlagBits2KHR::eVertexAttributeInput ) | + VkFlags64( PipelineStageFlagBits2KHR::ePreRasterizationShaders ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags64( PipelineStageFlagBits2KHR::eVideoDecode ) | VkFlags64( PipelineStageFlagBits2KHR::eVideoEncode ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags64( PipelineStageFlagBits2KHR::eTransformFeedbackEXT ) | + VkFlags64( PipelineStageFlagBits2KHR::eConditionalRenderingEXT ) | + VkFlags64( PipelineStageFlagBits2KHR::eCommandPreprocessNV ) | + VkFlags64( PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment ) | + VkFlags64( PipelineStageFlagBits2KHR::eAccelerationStructureBuild ) | + VkFlags64( PipelineStageFlagBits2KHR::eRayTracingShader ) | + VkFlags64( PipelineStageFlagBits2KHR::eFragmentDensityProcessEXT ) | + VkFlags64( PipelineStageFlagBits2KHR::eTaskShaderNV ) | VkFlags64( PipelineStageFlagBits2KHR::eMeshShaderNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR + operator|( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags2KHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR + operator&(PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1)VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags2KHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR + operator^( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags2KHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator~( PipelineStageFlagBits2KHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PipelineStageFlags2KHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2KHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineStageFlagBits2KHR::eTopOfPipe ) + result += "TopOfPipe | "; + if ( value & PipelineStageFlagBits2KHR::eDrawIndirect ) + result += "DrawIndirect | "; + if ( value & PipelineStageFlagBits2KHR::eVertexInput ) + result += "VertexInput | "; + if ( value & PipelineStageFlagBits2KHR::eVertexShader ) + result += "VertexShader | "; + if ( value & PipelineStageFlagBits2KHR::eTessellationControlShader ) + result += "TessellationControlShader | "; + if ( value & PipelineStageFlagBits2KHR::eTessellationEvaluationShader ) + result += "TessellationEvaluationShader | "; + if ( value & PipelineStageFlagBits2KHR::eGeometryShader ) + result += "GeometryShader | "; + if ( value & PipelineStageFlagBits2KHR::eFragmentShader ) + result += "FragmentShader | "; + if ( value & PipelineStageFlagBits2KHR::eEarlyFragmentTests ) + result += "EarlyFragmentTests | "; + if ( value & PipelineStageFlagBits2KHR::eLateFragmentTests ) + result += "LateFragmentTests | "; + if ( value & PipelineStageFlagBits2KHR::eColorAttachmentOutput ) + result += "ColorAttachmentOutput | "; + if ( value & PipelineStageFlagBits2KHR::eComputeShader ) + result += "ComputeShader | "; + if ( value & PipelineStageFlagBits2KHR::eAllTransfer ) + result += "AllTransfer | "; + if ( value & PipelineStageFlagBits2KHR::eBottomOfPipe ) + result += "BottomOfPipe | "; + if ( value & PipelineStageFlagBits2KHR::eHost ) + result += "Host | "; + if ( value & PipelineStageFlagBits2KHR::eAllGraphics ) + result += "AllGraphics | "; + if ( value & PipelineStageFlagBits2KHR::eAllCommands ) + result += "AllCommands | "; + if ( value & PipelineStageFlagBits2KHR::eCopy ) + result += "Copy | "; + if ( value & PipelineStageFlagBits2KHR::eResolve ) + result += "Resolve | "; + if ( value & PipelineStageFlagBits2KHR::eBlit ) + result += "Blit | "; + if ( value & PipelineStageFlagBits2KHR::eClear ) + result += "Clear | "; + if ( value & PipelineStageFlagBits2KHR::eIndexInput ) + result += "IndexInput | "; + if ( value & PipelineStageFlagBits2KHR::eVertexAttributeInput ) + result += "VertexAttributeInput | "; + if ( value & PipelineStageFlagBits2KHR::ePreRasterizationShaders ) + result += "PreRasterizationShaders | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineStageFlagBits2KHR::eVideoDecode ) + result += "VideoDecode | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineStageFlagBits2KHR::eVideoEncode ) + result += "VideoEncode | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & PipelineStageFlagBits2KHR::eTransformFeedbackEXT ) + result += "TransformFeedbackEXT | "; + if ( value & PipelineStageFlagBits2KHR::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & PipelineStageFlagBits2KHR::eCommandPreprocessNV ) + result += "CommandPreprocessNV | "; + if ( value & PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment ) + result += "FragmentShadingRateAttachment | "; + if ( value & PipelineStageFlagBits2KHR::eAccelerationStructureBuild ) + result += "AccelerationStructureBuild | "; + if ( value & PipelineStageFlagBits2KHR::eRayTracingShader ) + result += "RayTracingShader | "; + if ( value & PipelineStageFlagBits2KHR::eFragmentDensityProcessEXT ) + result += "FragmentDensityProcessEXT | "; + if ( value & PipelineStageFlagBits2KHR::eTaskShaderNV ) + result += "TaskShaderNV | "; + if ( value & PipelineStageFlagBits2KHR::eMeshShaderNV ) + result += "MeshShaderNV | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AccessFlags2KHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags64 + { + allFlags = + VkFlags64( AccessFlagBits2KHR::eNone ) | VkFlags64( AccessFlagBits2KHR::eIndirectCommandRead ) | + VkFlags64( AccessFlagBits2KHR::eIndexRead ) | VkFlags64( AccessFlagBits2KHR::eVertexAttributeRead ) | + VkFlags64( AccessFlagBits2KHR::eUniformRead ) | VkFlags64( AccessFlagBits2KHR::eInputAttachmentRead ) | + VkFlags64( AccessFlagBits2KHR::eShaderRead ) | VkFlags64( AccessFlagBits2KHR::eShaderWrite ) | + VkFlags64( AccessFlagBits2KHR::eColorAttachmentRead ) | VkFlags64( AccessFlagBits2KHR::eColorAttachmentWrite ) | + VkFlags64( AccessFlagBits2KHR::eDepthStencilAttachmentRead ) | + VkFlags64( AccessFlagBits2KHR::eDepthStencilAttachmentWrite ) | VkFlags64( AccessFlagBits2KHR::eTransferRead ) | + VkFlags64( AccessFlagBits2KHR::eTransferWrite ) | VkFlags64( AccessFlagBits2KHR::eHostRead ) | + VkFlags64( AccessFlagBits2KHR::eHostWrite ) | VkFlags64( AccessFlagBits2KHR::eMemoryRead ) | + VkFlags64( AccessFlagBits2KHR::eMemoryWrite ) | VkFlags64( AccessFlagBits2KHR::eShaderSampledRead ) | + VkFlags64( AccessFlagBits2KHR::eShaderStorageRead ) | VkFlags64( AccessFlagBits2KHR::eShaderStorageWrite ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags64( AccessFlagBits2KHR::eVideoDecodeRead ) | VkFlags64( AccessFlagBits2KHR::eVideoDecodeWrite ) | + VkFlags64( AccessFlagBits2KHR::eVideoEncodeRead ) | VkFlags64( AccessFlagBits2KHR::eVideoEncodeWrite ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags64( AccessFlagBits2KHR::eTransformFeedbackWriteEXT ) | + VkFlags64( AccessFlagBits2KHR::eTransformFeedbackCounterReadEXT ) | + VkFlags64( AccessFlagBits2KHR::eTransformFeedbackCounterWriteEXT ) | + VkFlags64( AccessFlagBits2KHR::eConditionalRenderingReadEXT ) | + VkFlags64( AccessFlagBits2KHR::eCommandPreprocessReadNV ) | + VkFlags64( AccessFlagBits2KHR::eCommandPreprocessWriteNV ) | + VkFlags64( AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead ) | + VkFlags64( AccessFlagBits2KHR::eAccelerationStructureRead ) | + VkFlags64( AccessFlagBits2KHR::eAccelerationStructureWrite ) | + VkFlags64( AccessFlagBits2KHR::eFragmentDensityMapReadEXT ) | + VkFlags64( AccessFlagBits2KHR::eColorAttachmentReadNoncoherentEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator|( AccessFlagBits2KHR bit0, + AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags2KHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator&(AccessFlagBits2KHR bit0, + AccessFlagBits2KHR bit1)VULKAN_HPP_NOEXCEPT + { + return AccessFlags2KHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator^( AccessFlagBits2KHR bit0, + AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags2KHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator~( AccessFlagBits2KHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AccessFlags2KHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlags2KHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & AccessFlagBits2KHR::eIndirectCommandRead ) + result += "IndirectCommandRead | "; + if ( value & AccessFlagBits2KHR::eIndexRead ) + result += "IndexRead | "; + if ( value & AccessFlagBits2KHR::eVertexAttributeRead ) + result += "VertexAttributeRead | "; + if ( value & AccessFlagBits2KHR::eUniformRead ) + result += "UniformRead | "; + if ( value & AccessFlagBits2KHR::eInputAttachmentRead ) + result += "InputAttachmentRead | "; + if ( value & AccessFlagBits2KHR::eShaderRead ) + result += "ShaderRead | "; + if ( value & AccessFlagBits2KHR::eShaderWrite ) + result += "ShaderWrite | "; + if ( value & AccessFlagBits2KHR::eColorAttachmentRead ) + result += "ColorAttachmentRead | "; + if ( value & AccessFlagBits2KHR::eColorAttachmentWrite ) + result += "ColorAttachmentWrite | "; + if ( value & AccessFlagBits2KHR::eDepthStencilAttachmentRead ) + result += "DepthStencilAttachmentRead | "; + if ( value & AccessFlagBits2KHR::eDepthStencilAttachmentWrite ) + result += "DepthStencilAttachmentWrite | "; + if ( value & AccessFlagBits2KHR::eTransferRead ) + result += "TransferRead | "; + if ( value & AccessFlagBits2KHR::eTransferWrite ) + result += "TransferWrite | "; + if ( value & AccessFlagBits2KHR::eHostRead ) + result += "HostRead | "; + if ( value & AccessFlagBits2KHR::eHostWrite ) + result += "HostWrite | "; + if ( value & AccessFlagBits2KHR::eMemoryRead ) + result += "MemoryRead | "; + if ( value & AccessFlagBits2KHR::eMemoryWrite ) + result += "MemoryWrite | "; + if ( value & AccessFlagBits2KHR::eShaderSampledRead ) + result += "ShaderSampledRead | "; + if ( value & AccessFlagBits2KHR::eShaderStorageRead ) + result += "ShaderStorageRead | "; + if ( value & AccessFlagBits2KHR::eShaderStorageWrite ) + result += "ShaderStorageWrite | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & AccessFlagBits2KHR::eVideoDecodeRead ) + result += "VideoDecodeRead | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & AccessFlagBits2KHR::eVideoDecodeWrite ) + result += "VideoDecodeWrite | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & AccessFlagBits2KHR::eVideoEncodeRead ) + result += "VideoEncodeRead | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & AccessFlagBits2KHR::eVideoEncodeWrite ) + result += "VideoEncodeWrite | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & AccessFlagBits2KHR::eTransformFeedbackWriteEXT ) + result += "TransformFeedbackWriteEXT | "; + if ( value & AccessFlagBits2KHR::eTransformFeedbackCounterReadEXT ) + result += "TransformFeedbackCounterReadEXT | "; + if ( value & AccessFlagBits2KHR::eTransformFeedbackCounterWriteEXT ) + result += "TransformFeedbackCounterWriteEXT | "; + if ( value & AccessFlagBits2KHR::eConditionalRenderingReadEXT ) + result += "ConditionalRenderingReadEXT | "; + if ( value & AccessFlagBits2KHR::eCommandPreprocessReadNV ) + result += "CommandPreprocessReadNV | "; + if ( value & AccessFlagBits2KHR::eCommandPreprocessWriteNV ) + result += "CommandPreprocessWriteNV | "; + if ( value & AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead ) + result += "FragmentShadingRateAttachmentRead | "; + if ( value & AccessFlagBits2KHR::eAccelerationStructureRead ) + result += "AccelerationStructureRead | "; + if ( value & AccessFlagBits2KHR::eAccelerationStructureWrite ) + result += "AccelerationStructureWrite | "; + if ( value & AccessFlagBits2KHR::eFragmentDensityMapReadEXT ) + result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits2KHR::eColorAttachmentReadNoncoherentEXT ) + result += "ColorAttachmentReadNoncoherentEXT | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SubmitFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SubmitFlagBitsKHR::eProtected ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator|( SubmitFlagBitsKHR bit0, + SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubmitFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator&(SubmitFlagBitsKHR bit0, + SubmitFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT + { + return SubmitFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator^( SubmitFlagBitsKHR bit0, + SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubmitFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator~( SubmitFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SubmitFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SubmitFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SubmitFlagBitsKHR::eProtected ) + result += "Protected | "; + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + enum class DirectFBSurfaceCreateFlagBitsEXT : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + using DirectFBSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + enum class ScreenSurfaceCreateFlagBitsQNX : VkFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagBitsQNX ) + { + return "(void)"; + } + + using ScreenSurfaceCreateFlagsQNX = Flags; + + VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagsQNX ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +} // namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_NO_EXCEPTIONS namespace std @@ -13246,7 +19560,7 @@ namespace std template <> struct is_error_code_enum : public true_type {}; -} +} // namespace std #endif namespace VULKAN_HPP_NAMESPACE @@ -13254,310 +19568,356 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_NO_EXCEPTIONS class ErrorCategoryImpl : public std::error_category { - public: - virtual const char* name() const VULKAN_HPP_NOEXCEPT override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; } - virtual std::string message(int ev) const override { return to_string(static_cast(ev)); } + public: + virtual const char * name() const VULKAN_HPP_NOEXCEPT override + { + return VULKAN_HPP_NAMESPACE_STRING "::Result"; + } + virtual std::string message( int ev ) const override + { + return to_string( static_cast( ev ) ); + } }; class Error { - public: - Error() VULKAN_HPP_NOEXCEPT = default; - Error(const Error&) VULKAN_HPP_NOEXCEPT = default; - virtual ~Error() VULKAN_HPP_NOEXCEPT = default; + public: + Error() VULKAN_HPP_NOEXCEPT = default; + Error( const Error & ) VULKAN_HPP_NOEXCEPT = default; + virtual ~Error() VULKAN_HPP_NOEXCEPT = default; - virtual const char* what() const VULKAN_HPP_NOEXCEPT = 0; + virtual const char * what() const VULKAN_HPP_NOEXCEPT = 0; }; - class LogicError : public Error, public std::logic_error + class LogicError + : public Error + , public std::logic_error { - public: - explicit LogicError( const std::string& what ) - : Error(), std::logic_error(what) {} - explicit LogicError( char const * what ) - : Error(), std::logic_error(what) {} + public: + explicit LogicError( const std::string & what ) : Error(), std::logic_error( what ) {} + explicit LogicError( char const * what ) : Error(), std::logic_error( what ) {} - virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::logic_error::what(); } + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::logic_error::what(); + } }; - class SystemError : public Error, public std::system_error + class SystemError + : public Error + , public std::system_error { - public: - SystemError( std::error_code ec ) - : Error(), std::system_error(ec) {} - SystemError( std::error_code ec, std::string const& what ) - : Error(), std::system_error(ec, what) {} - SystemError( std::error_code ec, char const * what ) - : Error(), std::system_error(ec, what) {} - SystemError( int ev, std::error_category const& ecat ) - : Error(), std::system_error(ev, ecat) {} - SystemError( int ev, std::error_category const& ecat, std::string const& what) - : Error(), std::system_error(ev, ecat, what) {} - SystemError( int ev, std::error_category const& ecat, char const * what) - : Error(), std::system_error(ev, ecat, what) {} + public: + SystemError( std::error_code ec ) : Error(), std::system_error( ec ) {} + SystemError( std::error_code ec, std::string const & what ) : Error(), std::system_error( ec, what ) {} + SystemError( std::error_code ec, char const * what ) : Error(), std::system_error( ec, what ) {} + SystemError( int ev, std::error_category const & ecat ) : Error(), std::system_error( ev, ecat ) {} + SystemError( int ev, std::error_category const & ecat, std::string const & what ) + : Error(), std::system_error( ev, ecat, what ) + {} + SystemError( int ev, std::error_category const & ecat, char const * what ) + : Error(), std::system_error( ev, ecat, what ) + {} - virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::system_error::what(); } + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::system_error::what(); + } }; - VULKAN_HPP_INLINE const std::error_category& errorCategory() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE const std::error_category & errorCategory() VULKAN_HPP_NOEXCEPT { static ErrorCategoryImpl instance; return instance; } - VULKAN_HPP_INLINE std::error_code make_error_code(Result e) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE std::error_code make_error_code( Result e ) VULKAN_HPP_NOEXCEPT { - return std::error_code(static_cast(e), errorCategory()); + return std::error_code( static_cast( e ), errorCategory() ); } - VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE std::error_condition make_error_condition( Result e ) VULKAN_HPP_NOEXCEPT { - return std::error_condition(static_cast(e), errorCategory()); + return std::error_condition( static_cast( e ), errorCategory() ); } class OutOfHostMemoryError : public SystemError { public: - OutOfHostMemoryError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + OutOfHostMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) + {} OutOfHostMemoryError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) + {} }; class OutOfDeviceMemoryError : public SystemError { public: - OutOfDeviceMemoryError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + OutOfDeviceMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) + {} OutOfDeviceMemoryError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) + {} }; class InitializationFailedError : public SystemError { public: - InitializationFailedError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + InitializationFailedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) + {} InitializationFailedError( char const * message ) - : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) + {} }; class DeviceLostError : public SystemError { public: - DeviceLostError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} - DeviceLostError( char const * message ) - : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) + {} + DeviceLostError( char const * message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} }; class MemoryMapFailedError : public SystemError { public: - MemoryMapFailedError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + MemoryMapFailedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) + {} MemoryMapFailedError( char const * message ) - : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) + {} }; class LayerNotPresentError : public SystemError { public: - LayerNotPresentError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + LayerNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) + {} LayerNotPresentError( char const * message ) - : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) + {} }; class ExtensionNotPresentError : public SystemError { public: - ExtensionNotPresentError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + ExtensionNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) + {} ExtensionNotPresentError( char const * message ) - : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) + {} }; class FeatureNotPresentError : public SystemError { public: - FeatureNotPresentError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + FeatureNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) + {} FeatureNotPresentError( char const * message ) - : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) + {} }; class IncompatibleDriverError : public SystemError { public: - IncompatibleDriverError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + IncompatibleDriverError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) + {} IncompatibleDriverError( char const * message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) + {} }; class TooManyObjectsError : public SystemError { public: - TooManyObjectsError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + TooManyObjectsError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) + {} TooManyObjectsError( char const * message ) - : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) + {} }; class FormatNotSupportedError : public SystemError { public: - FormatNotSupportedError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + FormatNotSupportedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) + {} FormatNotSupportedError( char const * message ) - : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) + {} }; class FragmentedPoolError : public SystemError { public: - FragmentedPoolError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + FragmentedPoolError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) + {} FragmentedPoolError( char const * message ) - : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) + {} }; class UnknownError : public SystemError { public: - UnknownError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} - UnknownError( char const * message ) - : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + UnknownError( std::string const & message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + UnknownError( char const * message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} }; class OutOfPoolMemoryError : public SystemError { public: - OutOfPoolMemoryError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + OutOfPoolMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) + {} OutOfPoolMemoryError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) + {} }; class InvalidExternalHandleError : public SystemError { public: - InvalidExternalHandleError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + InvalidExternalHandleError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) + {} InvalidExternalHandleError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) + {} }; class FragmentationError : public SystemError { public: - FragmentationError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} - FragmentationError( char const * message ) - : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} + FragmentationError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFragmentation ), message ) + {} + FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) + {} }; class InvalidOpaqueCaptureAddressError : public SystemError { public: - InvalidOpaqueCaptureAddressError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + InvalidOpaqueCaptureAddressError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) + {} InvalidOpaqueCaptureAddressError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) + {} }; class SurfaceLostKHRError : public SystemError { public: - SurfaceLostKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + SurfaceLostKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) + {} SurfaceLostKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) + {} }; class NativeWindowInUseKHRError : public SystemError { public: - NativeWindowInUseKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + NativeWindowInUseKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) + {} NativeWindowInUseKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) + {} }; class OutOfDateKHRError : public SystemError { public: - OutOfDateKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} - OutOfDateKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + OutOfDateKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) + {} + OutOfDateKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} }; class IncompatibleDisplayKHRError : public SystemError { public: - IncompatibleDisplayKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + IncompatibleDisplayKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) + {} IncompatibleDisplayKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) + {} }; class ValidationFailedEXTError : public SystemError { public: - ValidationFailedEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + ValidationFailedEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) + {} ValidationFailedEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) + {} }; class InvalidShaderNVError : public SystemError { public: - InvalidShaderNVError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + InvalidShaderNVError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) + {} InvalidShaderNVError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} - }; - - class IncompatibleVersionKHRError : public SystemError - { - public: - IncompatibleVersionKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {} - IncompatibleVersionKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorIncompatibleVersionKHR ), message ) {} + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) + {} }; class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError { public: - InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {} + InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + {} InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {} + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + {} }; class NotPermittedEXTError : public SystemError { public: - NotPermittedEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + NotPermittedEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) + {} NotPermittedEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) + {} }; +# if defined( VK_USE_PLATFORM_WIN32_KHR ) class FullScreenExclusiveModeLostEXTError : public SystemError { public: - FullScreenExclusiveModeLostEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} + FullScreenExclusiveModeLostEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + {} FullScreenExclusiveModeLostEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} + : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + {} }; - +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ [[noreturn]] static void throwResultException( Result result, char const * message ) { @@ -13586,93 +19946,145 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); - case Result::eErrorIncompatibleVersionKHR: throw IncompatibleVersionKHRError( message ); - case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: + throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message ); +# if defined( VK_USE_PLATFORM_WIN32_KHR ) case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ default: throw SystemError( make_error_code( result ) ); } } #endif - template void ignore(T const&) VULKAN_HPP_NOEXCEPT {} + template + void ignore( T const & ) VULKAN_HPP_NOEXCEPT + {} template struct ResultValue { #ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(v))) + ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( v ) ) ) #else ResultValue( Result r, T & v ) #endif - : result( r ) - , value( v ) + : result( r ), value( v ) {} #ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(std::move(v)))) + ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( std::move( v ) ) ) ) #else ResultValue( Result r, T && v ) #endif - : result( r ) - , value( std::move( v ) ) + : result( r ), value( std::move( v ) ) {} - Result result; - T value; + Result result; + T value; - operator std::tuple() VULKAN_HPP_NOEXCEPT { return std::tuple(result, value); } + operator std::tuple() VULKAN_HPP_NOEXCEPT + { + return std::tuple( result, value ); + } -#if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST) - operator T const& () const & VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T const &() const & VULKAN_HPP_NOEXCEPT { return value; } - operator T& () & VULKAN_HPP_NOEXCEPT + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T &() & VULKAN_HPP_NOEXCEPT { return value; } - operator T const&& () const && VULKAN_HPP_NOEXCEPT + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T const &&() const && VULKAN_HPP_NOEXCEPT { return std::move( value ); } - operator T&& () && VULKAN_HPP_NOEXCEPT + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T &&() && VULKAN_HPP_NOEXCEPT { return std::move( value ); } #endif }; -#if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST) +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) template - struct ResultValue> + struct ResultValue> { -#ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue(Result r, UniqueHandle && v) VULKAN_HPP_NOEXCEPT -#else - ResultValue(Result r, UniqueHandle && v) -#endif - : result(r) - , value(std::move(v)) +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, UniqueHandle && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, UniqueHandle && v ) +# endif + : result( r ) + , value( std::move( v ) ) {} - Result result; - UniqueHandle value; + std::tuple> asTuple() + { + return std::make_tuple( result, std::move( value ) ); + } - operator std::tuple&>() VULKAN_HPP_NOEXCEPT { return std::tuple&>(result, value); } - - operator UniqueHandle& () & VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator UniqueHandle &() & VULKAN_HPP_NOEXCEPT { return value; } + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) operator UniqueHandle() VULKAN_HPP_NOEXCEPT { - return std::move(value); + return std::move( value ); } +# endif + + Result result; + UniqueHandle value; + }; + + template + struct ResultValue>> + { +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, std::vector> && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, std::vector> && v ) +# endif + : result( r ) + , value( std::move( v ) ) + {} + + std::tuple>> asTuple() + { + return std::make_tuple( result, std::move( value ) ); + } + + Result result; + std::vector> value; + +# if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator std::tuple> &>() VULKAN_HPP_NOEXCEPT + { + return std::tuple> &>( result, value ); + } +# endif }; #endif @@ -13680,9 +20092,9 @@ namespace VULKAN_HPP_NAMESPACE struct ResultValueType { #ifdef VULKAN_HPP_NO_EXCEPTIONS - typedef ResultValue type; + typedef ResultValue type; #else - typedef T type; + typedef T type; #endif }; @@ -13692,15 +20104,15 @@ namespace VULKAN_HPP_NAMESPACE #ifdef VULKAN_HPP_NO_EXCEPTIONS typedef Result type; #else - typedef void type; + typedef void type; #endif }; VULKAN_HPP_INLINE ResultValueType::type createResultValue( Result result, char const * message ) { #ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( result == Result::eSuccess ); + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); return result; #else if ( result != Result::eSuccess ) @@ -13714,8 +20126,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE typename ResultValueType::type createResultValue( Result result, T & data, char const * message ) { #ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( result == Result::eSuccess ); + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); return ResultValue( result, std::move( data ) ); #else if ( result != Result::eSuccess ) @@ -13726,11 +20138,14 @@ namespace VULKAN_HPP_NAMESPACE #endif } - VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list successCodes ) + VULKAN_HPP_INLINE Result createResultValue( Result result, + char const * message, + std::initializer_list successCodes ) { #ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); #else if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) { @@ -13741,7756 +20156,102 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_INLINE ResultValue createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes ) + VULKAN_HPP_INLINE ResultValue + createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes ) { #ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); #else if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) { throwResultException( result, message ); } #endif - return ResultValue( result, data ); + return ResultValue( result, std::move( data ) ); } #ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE typename ResultValueType>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits::deleter const& deleter ) + VULKAN_HPP_INLINE typename ResultValueType>::type createResultValue( + Result result, T & data, char const * message, typename UniqueHandleTraits::deleter const & deleter ) { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( result == Result::eSuccess ); - return ResultValue>( result, UniqueHandle(data, deleter) ); -#else +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return ResultValue>( result, UniqueHandle( data, deleter ) ); +# else if ( result != Result::eSuccess ) { throwResultException( result, message ); } - return UniqueHandle(data, deleter); -#endif + return UniqueHandle( data, deleter ); +# endif } template - VULKAN_HPP_INLINE ResultValue> createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes, typename UniqueHandleTraits::deleter const& deleter ) + VULKAN_HPP_INLINE ResultValue> + createResultValue( Result result, + T & data, + char const * message, + std::initializer_list successCodes, + typename UniqueHandleTraits::deleter const & deleter ) { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); - return ResultValue>( result, UniqueHandle(data, deleter) ); -#else +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +# else if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) { throwResultException( result, message ); } - return ResultValue>( result, UniqueHandle(data, deleter) ); -#endif +# endif + return ResultValue>( result, UniqueHandle( data, deleter ) ); + } + + template + VULKAN_HPP_INLINE typename ResultValueType>>::type + createResultValue( Result result, std::vector> && data, char const * message ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return ResultValue>>( result, std::move( data ) ); +# else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return std::move( data ); +# endif + } + + template + VULKAN_HPP_INLINE ResultValue>> + createResultValue( Result result, + std::vector> && data, + char const * message, + std::initializer_list successCodes ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +# else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +# endif + return ResultValue>>( result, std::move( data ) ); } #endif - struct AabbPositionsKHR; - using AabbPositionsNV = AabbPositionsKHR; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureBuildGeometryInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureBuildOffsetInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureCreateGeometryTypeInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureCreateInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct AccelerationStructureCreateInfoNV; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureDeviceAddressInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureGeometryAabbsDataKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - union AccelerationStructureGeometryDataKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureGeometryInstancesDataKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureGeometryKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureGeometryTrianglesDataKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct AccelerationStructureInfoNV; - struct AccelerationStructureInstanceKHR; - using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureMemoryRequirementsInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct AccelerationStructureMemoryRequirementsInfoNV; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureVersionKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct AcquireNextImageInfoKHR; - struct AcquireProfilingLockInfoKHR; - struct AllocationCallbacks; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferFormatPropertiesANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferPropertiesANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferUsageANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct ApplicationInfo; - struct AttachmentDescription; - struct AttachmentDescription2; - using AttachmentDescription2KHR = AttachmentDescription2; - struct AttachmentDescriptionStencilLayout; - using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; - struct AttachmentReference; - struct AttachmentReference2; - using AttachmentReference2KHR = AttachmentReference2; - struct AttachmentReferenceStencilLayout; - using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; - struct AttachmentSampleLocationsEXT; - struct BaseInStructure; - struct BaseOutStructure; - struct BindAccelerationStructureMemoryInfoKHR; - using BindAccelerationStructureMemoryInfoNV = BindAccelerationStructureMemoryInfoKHR; - struct BindBufferMemoryDeviceGroupInfo; - using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; - struct BindBufferMemoryInfo; - using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; - struct BindImageMemoryDeviceGroupInfo; - using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; - struct BindImageMemoryInfo; - using BindImageMemoryInfoKHR = BindImageMemoryInfo; - struct BindImageMemorySwapchainInfoKHR; - struct BindImagePlaneMemoryInfo; - using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; - struct BindIndexBufferIndirectCommandNV; - struct BindShaderGroupIndirectCommandNV; - struct BindSparseInfo; - struct BindVertexBufferIndirectCommandNV; - struct BufferCopy; - struct BufferCreateInfo; - struct BufferDeviceAddressCreateInfoEXT; - struct BufferDeviceAddressInfo; - using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; - using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; - struct BufferImageCopy; - struct BufferMemoryBarrier; - struct BufferMemoryRequirementsInfo2; - using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; - struct BufferOpaqueCaptureAddressCreateInfo; - using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; - struct BufferViewCreateInfo; - struct CalibratedTimestampInfoEXT; - struct CheckpointDataNV; - struct ClearAttachment; - union ClearColorValue; - struct ClearDepthStencilValue; - struct ClearRect; - union ClearValue; - struct CoarseSampleLocationNV; - struct CoarseSampleOrderCustomNV; - struct CommandBufferAllocateInfo; - struct CommandBufferBeginInfo; - struct CommandBufferInheritanceConditionalRenderingInfoEXT; - struct CommandBufferInheritanceInfo; - struct CommandBufferInheritanceRenderPassTransformInfoQCOM; - struct CommandPoolCreateInfo; - struct ComponentMapping; - struct ComputePipelineCreateInfo; - struct ConditionalRenderingBeginInfoEXT; - struct ConformanceVersion; - using ConformanceVersionKHR = ConformanceVersion; - struct CooperativeMatrixPropertiesNV; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct CopyAccelerationStructureInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct CopyAccelerationStructureToMemoryInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct CopyDescriptorSet; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct CopyMemoryToAccelerationStructureInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct D3D12FenceSubmitInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct DebugMarkerMarkerInfoEXT; - struct DebugMarkerObjectNameInfoEXT; - struct DebugMarkerObjectTagInfoEXT; - struct DebugReportCallbackCreateInfoEXT; - struct DebugUtilsLabelEXT; - struct DebugUtilsMessengerCallbackDataEXT; - struct DebugUtilsMessengerCreateInfoEXT; - struct DebugUtilsObjectNameInfoEXT; - struct DebugUtilsObjectTagInfoEXT; - struct DedicatedAllocationBufferCreateInfoNV; - struct DedicatedAllocationImageCreateInfoNV; - struct DedicatedAllocationMemoryAllocateInfoNV; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct DeferredOperationInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct DescriptorBufferInfo; - struct DescriptorImageInfo; - struct DescriptorPoolCreateInfo; - struct DescriptorPoolInlineUniformBlockCreateInfoEXT; - struct DescriptorPoolSize; - struct DescriptorSetAllocateInfo; - struct DescriptorSetLayoutBinding; - struct DescriptorSetLayoutBindingFlagsCreateInfo; - using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; - struct DescriptorSetLayoutCreateInfo; - struct DescriptorSetLayoutSupport; - using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; - struct DescriptorSetVariableDescriptorCountAllocateInfo; - using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; - struct DescriptorSetVariableDescriptorCountLayoutSupport; - using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; - struct DescriptorUpdateTemplateCreateInfo; - using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; - struct DescriptorUpdateTemplateEntry; - using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; - struct DeviceCreateInfo; - struct DeviceDiagnosticsConfigCreateInfoNV; - struct DeviceEventInfoEXT; - struct DeviceGroupBindSparseInfo; - using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; - struct DeviceGroupCommandBufferBeginInfo; - using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; - struct DeviceGroupDeviceCreateInfo; - using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; - struct DeviceGroupPresentCapabilitiesKHR; - struct DeviceGroupPresentInfoKHR; - struct DeviceGroupRenderPassBeginInfo; - using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; - struct DeviceGroupSubmitInfo; - using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; - struct DeviceGroupSwapchainCreateInfoKHR; - struct DeviceMemoryOpaqueCaptureAddressInfo; - using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; - struct DeviceMemoryOverallocationCreateInfoAMD; -#ifdef VK_ENABLE_BETA_EXTENSIONS - union DeviceOrHostAddressConstKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - union DeviceOrHostAddressKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct DevicePrivateDataCreateInfoEXT; - struct DeviceQueueCreateInfo; - struct DeviceQueueGlobalPriorityCreateInfoEXT; - struct DeviceQueueInfo2; - struct DispatchIndirectCommand; - struct DisplayEventInfoEXT; - struct DisplayModeCreateInfoKHR; - struct DisplayModeParametersKHR; - struct DisplayModeProperties2KHR; - struct DisplayModePropertiesKHR; - struct DisplayNativeHdrSurfaceCapabilitiesAMD; - struct DisplayPlaneCapabilities2KHR; - struct DisplayPlaneCapabilitiesKHR; - struct DisplayPlaneInfo2KHR; - struct DisplayPlaneProperties2KHR; - struct DisplayPlanePropertiesKHR; - struct DisplayPowerInfoEXT; - struct DisplayPresentInfoKHR; - struct DisplayProperties2KHR; - struct DisplayPropertiesKHR; - struct DisplaySurfaceCreateInfoKHR; - struct DrawIndexedIndirectCommand; - struct DrawIndirectCommand; - struct DrawMeshTasksIndirectCommandNV; - struct DrmFormatModifierPropertiesEXT; - struct DrmFormatModifierPropertiesListEXT; - struct EventCreateInfo; - struct ExportFenceCreateInfo; - using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportFenceWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ExportMemoryAllocateInfo; - using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; - struct ExportMemoryAllocateInfoNV; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportMemoryWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportMemoryWin32HandleInfoNV; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ExportSemaphoreCreateInfo; - using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportSemaphoreWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ExtensionProperties; - struct Extent2D; - struct Extent3D; - struct ExternalBufferProperties; - using ExternalBufferPropertiesKHR = ExternalBufferProperties; - struct ExternalFenceProperties; - using ExternalFencePropertiesKHR = ExternalFenceProperties; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct ExternalFormatANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct ExternalImageFormatProperties; - using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; - struct ExternalImageFormatPropertiesNV; - struct ExternalMemoryBufferCreateInfo; - using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; - struct ExternalMemoryImageCreateInfo; - using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; - struct ExternalMemoryImageCreateInfoNV; - struct ExternalMemoryProperties; - using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; - struct ExternalSemaphoreProperties; - using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; - struct FenceCreateInfo; - struct FenceGetFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct FenceGetWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct FilterCubicImageViewImageFormatPropertiesEXT; - struct FormatProperties; - struct FormatProperties2; - using FormatProperties2KHR = FormatProperties2; - struct FramebufferAttachmentImageInfo; - using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; - struct FramebufferAttachmentsCreateInfo; - using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; - struct FramebufferCreateInfo; - struct FramebufferMixedSamplesCombinationNV; - struct GeneratedCommandsInfoNV; - struct GeneratedCommandsMemoryRequirementsInfoNV; - struct GeometryAABBNV; - struct GeometryDataNV; - struct GeometryNV; - struct GeometryTrianglesNV; - struct GraphicsPipelineCreateInfo; - struct GraphicsPipelineShaderGroupsCreateInfoNV; - struct GraphicsShaderGroupCreateInfoNV; - struct HdrMetadataEXT; - struct HeadlessSurfaceCreateInfoEXT; -#ifdef VK_USE_PLATFORM_IOS_MVK - struct IOSSurfaceCreateInfoMVK; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - struct ImageBlit; - struct ImageCopy; - struct ImageCreateInfo; - struct ImageDrmFormatModifierExplicitCreateInfoEXT; - struct ImageDrmFormatModifierListCreateInfoEXT; - struct ImageDrmFormatModifierPropertiesEXT; - struct ImageFormatListCreateInfo; - using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; - struct ImageFormatProperties; - struct ImageFormatProperties2; - using ImageFormatProperties2KHR = ImageFormatProperties2; - struct ImageMemoryBarrier; - struct ImageMemoryRequirementsInfo2; - using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; -#ifdef VK_USE_PLATFORM_FUCHSIA - struct ImagePipeSurfaceCreateInfoFUCHSIA; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct ImagePlaneMemoryRequirementsInfo; - using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; - struct ImageResolve; - struct ImageSparseMemoryRequirementsInfo2; - using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; - struct ImageStencilUsageCreateInfo; - using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; - struct ImageSubresource; - struct ImageSubresourceLayers; - struct ImageSubresourceRange; - struct ImageSwapchainCreateInfoKHR; - struct ImageViewASTCDecodeModeEXT; - struct ImageViewAddressPropertiesNVX; - struct ImageViewCreateInfo; - struct ImageViewHandleInfoNVX; - struct ImageViewUsageCreateInfo; - using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct ImportAndroidHardwareBufferInfoANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct ImportFenceFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportFenceWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ImportMemoryFdInfoKHR; - struct ImportMemoryHostPointerInfoEXT; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportMemoryWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportMemoryWin32HandleInfoNV; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ImportSemaphoreFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportSemaphoreWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct IndirectCommandsLayoutCreateInfoNV; - struct IndirectCommandsLayoutTokenNV; - struct IndirectCommandsStreamNV; - struct InitializePerformanceApiInfoINTEL; - struct InputAttachmentAspectReference; - using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; - struct InstanceCreateInfo; - struct LayerProperties; -#ifdef VK_USE_PLATFORM_MACOS_MVK - struct MacOSSurfaceCreateInfoMVK; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - struct MappedMemoryRange; - struct MemoryAllocateFlagsInfo; - using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; - struct MemoryAllocateInfo; - struct MemoryBarrier; - struct MemoryDedicatedAllocateInfo; - using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; - struct MemoryDedicatedRequirements; - using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; - struct MemoryFdPropertiesKHR; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct MemoryGetAndroidHardwareBufferInfoANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct MemoryGetFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct MemoryGetWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct MemoryHeap; - struct MemoryHostPointerPropertiesEXT; - struct MemoryOpaqueCaptureAddressAllocateInfo; - using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; - struct MemoryPriorityAllocateInfoEXT; - struct MemoryRequirements; - struct MemoryRequirements2; - using MemoryRequirements2KHR = MemoryRequirements2; - struct MemoryType; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct MemoryWin32HandlePropertiesKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - struct MetalSurfaceCreateInfoEXT; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct MultisamplePropertiesEXT; - struct Offset2D; - struct Offset3D; - struct PastPresentationTimingGOOGLE; - struct PerformanceConfigurationAcquireInfoINTEL; - struct PerformanceCounterDescriptionKHR; - struct PerformanceCounterKHR; - union PerformanceCounterResultKHR; - struct PerformanceMarkerInfoINTEL; - struct PerformanceOverrideInfoINTEL; - struct PerformanceQuerySubmitInfoKHR; - struct PerformanceStreamMarkerInfoINTEL; - union PerformanceValueDataINTEL; - struct PerformanceValueINTEL; - struct PhysicalDevice16BitStorageFeatures; - using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; - struct PhysicalDevice8BitStorageFeatures; - using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; - struct PhysicalDeviceASTCDecodeFeaturesEXT; - struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT; - struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT; - struct PhysicalDeviceBufferDeviceAddressFeatures; - using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; - struct PhysicalDeviceBufferDeviceAddressFeaturesEXT; - using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; - struct PhysicalDeviceCoherentMemoryFeaturesAMD; - struct PhysicalDeviceComputeShaderDerivativesFeaturesNV; - struct PhysicalDeviceConditionalRenderingFeaturesEXT; - struct PhysicalDeviceConservativeRasterizationPropertiesEXT; - struct PhysicalDeviceCooperativeMatrixFeaturesNV; - struct PhysicalDeviceCooperativeMatrixPropertiesNV; - struct PhysicalDeviceCornerSampledImageFeaturesNV; - struct PhysicalDeviceCoverageReductionModeFeaturesNV; - struct PhysicalDeviceCustomBorderColorFeaturesEXT; - struct PhysicalDeviceCustomBorderColorPropertiesEXT; - struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; - struct PhysicalDeviceDepthClipEnableFeaturesEXT; - struct PhysicalDeviceDepthStencilResolveProperties; - using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; - struct PhysicalDeviceDescriptorIndexingFeatures; - using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; - struct PhysicalDeviceDescriptorIndexingProperties; - using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; - struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; - struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; - struct PhysicalDeviceDiagnosticsConfigFeaturesNV; - struct PhysicalDeviceDiscardRectanglePropertiesEXT; - struct PhysicalDeviceDriverProperties; - using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; - struct PhysicalDeviceExclusiveScissorFeaturesNV; - struct PhysicalDeviceExtendedDynamicStateFeaturesEXT; - struct PhysicalDeviceExternalBufferInfo; - using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; - struct PhysicalDeviceExternalFenceInfo; - using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; - struct PhysicalDeviceExternalImageFormatInfo; - using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; - struct PhysicalDeviceExternalMemoryHostPropertiesEXT; - struct PhysicalDeviceExternalSemaphoreInfo; - using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; - struct PhysicalDeviceFeatures; - struct PhysicalDeviceFeatures2; - using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; - struct PhysicalDeviceFloatControlsProperties; - using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; - struct PhysicalDeviceFragmentDensityMapFeaturesEXT; - struct PhysicalDeviceFragmentDensityMapPropertiesEXT; - struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV; - struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT; - struct PhysicalDeviceGroupProperties; - using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; - struct PhysicalDeviceHostQueryResetFeatures; - using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; - struct PhysicalDeviceIDProperties; - using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; - struct PhysicalDeviceImageDrmFormatModifierInfoEXT; - struct PhysicalDeviceImageFormatInfo2; - using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; - struct PhysicalDeviceImageViewImageFormatInfoEXT; - struct PhysicalDeviceImagelessFramebufferFeatures; - using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; - struct PhysicalDeviceIndexTypeUint8FeaturesEXT; - struct PhysicalDeviceInlineUniformBlockFeaturesEXT; - struct PhysicalDeviceInlineUniformBlockPropertiesEXT; - struct PhysicalDeviceLimits; - struct PhysicalDeviceLineRasterizationFeaturesEXT; - struct PhysicalDeviceLineRasterizationPropertiesEXT; - struct PhysicalDeviceMaintenance3Properties; - using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; - struct PhysicalDeviceMemoryBudgetPropertiesEXT; - struct PhysicalDeviceMemoryPriorityFeaturesEXT; - struct PhysicalDeviceMemoryProperties; - struct PhysicalDeviceMemoryProperties2; - using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; - struct PhysicalDeviceMeshShaderFeaturesNV; - struct PhysicalDeviceMeshShaderPropertiesNV; - struct PhysicalDeviceMultiviewFeatures; - using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; - struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - struct PhysicalDeviceMultiviewProperties; - using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; - struct PhysicalDevicePCIBusInfoPropertiesEXT; - struct PhysicalDevicePerformanceQueryFeaturesKHR; - struct PhysicalDevicePerformanceQueryPropertiesKHR; - struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT; - struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; - struct PhysicalDevicePointClippingProperties; - using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; - struct PhysicalDevicePrivateDataFeaturesEXT; - struct PhysicalDeviceProperties; - struct PhysicalDeviceProperties2; - using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; - struct PhysicalDeviceProtectedMemoryFeatures; - struct PhysicalDeviceProtectedMemoryProperties; - struct PhysicalDevicePushDescriptorPropertiesKHR; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct PhysicalDeviceRayTracingFeaturesKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct PhysicalDeviceRayTracingPropertiesKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PhysicalDeviceRayTracingPropertiesNV; - struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV; - struct PhysicalDeviceRobustness2FeaturesEXT; - struct PhysicalDeviceRobustness2PropertiesEXT; - struct PhysicalDeviceSampleLocationsPropertiesEXT; - struct PhysicalDeviceSamplerFilterMinmaxProperties; - using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; - struct PhysicalDeviceSamplerYcbcrConversionFeatures; - using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; - struct PhysicalDeviceScalarBlockLayoutFeatures; - using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; - struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures; - using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; - struct PhysicalDeviceShaderAtomicInt64Features; - using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; - struct PhysicalDeviceShaderClockFeaturesKHR; - struct PhysicalDeviceShaderCoreProperties2AMD; - struct PhysicalDeviceShaderCorePropertiesAMD; - struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; - struct PhysicalDeviceShaderDrawParametersFeatures; - using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; - struct PhysicalDeviceShaderFloat16Int8Features; - using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; - using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; - struct PhysicalDeviceShaderImageFootprintFeaturesNV; - struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; - struct PhysicalDeviceShaderSMBuiltinsFeaturesNV; - struct PhysicalDeviceShaderSMBuiltinsPropertiesNV; - struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures; - using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; - struct PhysicalDeviceShadingRateImageFeaturesNV; - struct PhysicalDeviceShadingRateImagePropertiesNV; - struct PhysicalDeviceSparseImageFormatInfo2; - using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; - struct PhysicalDeviceSparseProperties; - struct PhysicalDeviceSubgroupProperties; - struct PhysicalDeviceSubgroupSizeControlFeaturesEXT; - struct PhysicalDeviceSubgroupSizeControlPropertiesEXT; - struct PhysicalDeviceSurfaceInfo2KHR; - struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT; - struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT; - struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; - struct PhysicalDeviceTimelineSemaphoreFeatures; - using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; - struct PhysicalDeviceTimelineSemaphoreProperties; - using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; - struct PhysicalDeviceToolPropertiesEXT; - struct PhysicalDeviceTransformFeedbackFeaturesEXT; - struct PhysicalDeviceTransformFeedbackPropertiesEXT; - struct PhysicalDeviceUniformBufferStandardLayoutFeatures; - using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; - struct PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; - struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT; - struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT; - struct PhysicalDeviceVulkan11Features; - struct PhysicalDeviceVulkan11Properties; - struct PhysicalDeviceVulkan12Features; - struct PhysicalDeviceVulkan12Properties; - struct PhysicalDeviceVulkanMemoryModelFeatures; - using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; - struct PhysicalDeviceYcbcrImageArraysFeaturesEXT; - struct PipelineCacheCreateInfo; - struct PipelineColorBlendAdvancedStateCreateInfoEXT; - struct PipelineColorBlendAttachmentState; - struct PipelineColorBlendStateCreateInfo; - struct PipelineCompilerControlCreateInfoAMD; - struct PipelineCoverageModulationStateCreateInfoNV; - struct PipelineCoverageReductionStateCreateInfoNV; - struct PipelineCoverageToColorStateCreateInfoNV; - struct PipelineCreationFeedbackCreateInfoEXT; - struct PipelineCreationFeedbackEXT; - struct PipelineDepthStencilStateCreateInfo; - struct PipelineDiscardRectangleStateCreateInfoEXT; - struct PipelineDynamicStateCreateInfo; - struct PipelineExecutableInfoKHR; - struct PipelineExecutableInternalRepresentationKHR; - struct PipelineExecutablePropertiesKHR; - struct PipelineExecutableStatisticKHR; - union PipelineExecutableStatisticValueKHR; - struct PipelineInfoKHR; - struct PipelineInputAssemblyStateCreateInfo; - struct PipelineLayoutCreateInfo; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct PipelineLibraryCreateInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PipelineMultisampleStateCreateInfo; - struct PipelineRasterizationConservativeStateCreateInfoEXT; - struct PipelineRasterizationDepthClipStateCreateInfoEXT; - struct PipelineRasterizationLineStateCreateInfoEXT; - struct PipelineRasterizationStateCreateInfo; - struct PipelineRasterizationStateRasterizationOrderAMD; - struct PipelineRasterizationStateStreamCreateInfoEXT; - struct PipelineRepresentativeFragmentTestStateCreateInfoNV; - struct PipelineSampleLocationsStateCreateInfoEXT; - struct PipelineShaderStageCreateInfo; - struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; - struct PipelineTessellationDomainOriginStateCreateInfo; - using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; - struct PipelineTessellationStateCreateInfo; - struct PipelineVertexInputDivisorStateCreateInfoEXT; - struct PipelineVertexInputStateCreateInfo; - struct PipelineViewportCoarseSampleOrderStateCreateInfoNV; - struct PipelineViewportExclusiveScissorStateCreateInfoNV; - struct PipelineViewportShadingRateImageStateCreateInfoNV; - struct PipelineViewportStateCreateInfo; - struct PipelineViewportSwizzleStateCreateInfoNV; - struct PipelineViewportWScalingStateCreateInfoNV; -#ifdef VK_USE_PLATFORM_GGP - struct PresentFrameTokenGGP; -#endif /*VK_USE_PLATFORM_GGP*/ - struct PresentInfoKHR; - struct PresentRegionKHR; - struct PresentRegionsKHR; - struct PresentTimeGOOGLE; - struct PresentTimesInfoGOOGLE; - struct PrivateDataSlotCreateInfoEXT; - struct ProtectedSubmitInfo; - struct PushConstantRange; - struct QueryPoolCreateInfo; - struct QueryPoolPerformanceCreateInfoKHR; - struct QueryPoolPerformanceQueryCreateInfoINTEL; - using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; - struct QueueFamilyCheckpointPropertiesNV; - struct QueueFamilyProperties; - struct QueueFamilyProperties2; - using QueueFamilyProperties2KHR = QueueFamilyProperties2; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct RayTracingPipelineCreateInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct RayTracingPipelineCreateInfoNV; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct RayTracingPipelineInterfaceCreateInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct RayTracingShaderGroupCreateInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct RayTracingShaderGroupCreateInfoNV; - struct Rect2D; - struct RectLayerKHR; - struct RefreshCycleDurationGOOGLE; - struct RenderPassAttachmentBeginInfo; - using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; - struct RenderPassBeginInfo; - struct RenderPassCreateInfo; - struct RenderPassCreateInfo2; - using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; - struct RenderPassFragmentDensityMapCreateInfoEXT; - struct RenderPassInputAttachmentAspectCreateInfo; - using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; - struct RenderPassMultiviewCreateInfo; - using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; - struct RenderPassSampleLocationsBeginInfoEXT; - struct RenderPassTransformBeginInfoQCOM; - struct SampleLocationEXT; - struct SampleLocationsInfoEXT; - struct SamplerCreateInfo; - struct SamplerCustomBorderColorCreateInfoEXT; - struct SamplerReductionModeCreateInfo; - using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; - struct SamplerYcbcrConversionCreateInfo; - using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; - struct SamplerYcbcrConversionImageFormatProperties; - using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; - struct SamplerYcbcrConversionInfo; - using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; - struct SemaphoreCreateInfo; - struct SemaphoreGetFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SemaphoreGetWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SemaphoreSignalInfo; - using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; - struct SemaphoreTypeCreateInfo; - using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; - struct SemaphoreWaitInfo; - using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; - struct SetStateFlagsIndirectCommandNV; - struct ShaderModuleCreateInfo; - struct ShaderModuleValidationCacheCreateInfoEXT; - struct ShaderResourceUsageAMD; - struct ShaderStatisticsInfoAMD; - struct ShadingRatePaletteNV; - struct SharedPresentSurfaceCapabilitiesKHR; - struct SparseBufferMemoryBindInfo; - struct SparseImageFormatProperties; - struct SparseImageFormatProperties2; - using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; - struct SparseImageMemoryBind; - struct SparseImageMemoryBindInfo; - struct SparseImageMemoryRequirements; - struct SparseImageMemoryRequirements2; - using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; - struct SparseImageOpaqueMemoryBindInfo; - struct SparseMemoryBind; - struct SpecializationInfo; - struct SpecializationMapEntry; - struct StencilOpState; -#ifdef VK_USE_PLATFORM_GGP - struct StreamDescriptorSurfaceCreateInfoGGP; -#endif /*VK_USE_PLATFORM_GGP*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct StridedBufferRegionKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct SubmitInfo; - struct SubpassBeginInfo; - using SubpassBeginInfoKHR = SubpassBeginInfo; - struct SubpassDependency; - struct SubpassDependency2; - using SubpassDependency2KHR = SubpassDependency2; - struct SubpassDescription; - struct SubpassDescription2; - using SubpassDescription2KHR = SubpassDescription2; - struct SubpassDescriptionDepthStencilResolve; - using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; - struct SubpassEndInfo; - using SubpassEndInfoKHR = SubpassEndInfo; - struct SubpassSampleLocationsEXT; - struct SubresourceLayout; - struct SurfaceCapabilities2EXT; - struct SurfaceCapabilities2KHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceCapabilitiesFullScreenExclusiveEXT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SurfaceCapabilitiesKHR; - struct SurfaceFormat2KHR; - struct SurfaceFormatKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceFullScreenExclusiveInfoEXT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceFullScreenExclusiveWin32InfoEXT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SurfaceProtectedCapabilitiesKHR; - struct SwapchainCounterCreateInfoEXT; - struct SwapchainCreateInfoKHR; - struct SwapchainDisplayNativeHdrCreateInfoAMD; - struct TextureLODGatherFormatPropertiesAMD; - struct TimelineSemaphoreSubmitInfo; - using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct TraceRaysIndirectCommandKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct TransformMatrixKHR; - using TransformMatrixNV = TransformMatrixKHR; - struct ValidationCacheCreateInfoEXT; - struct ValidationFeaturesEXT; - struct ValidationFlagsEXT; - struct VertexInputAttributeDescription; - struct VertexInputBindingDescription; - struct VertexInputBindingDivisorDescriptionEXT; -#ifdef VK_USE_PLATFORM_VI_NN - struct ViSurfaceCreateInfoNN; -#endif /*VK_USE_PLATFORM_VI_NN*/ - struct Viewport; - struct ViewportSwizzleNV; - struct ViewportWScalingNV; -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - struct WaylandSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32KeyedMutexAcquireReleaseInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32KeyedMutexAcquireReleaseInfoNV; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32SurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct WriteDescriptorSet; - struct WriteDescriptorSetAccelerationStructureKHR; - using WriteDescriptorSetAccelerationStructureNV = WriteDescriptorSetAccelerationStructureKHR; - struct WriteDescriptorSetInlineUniformBlockEXT; - struct XYColorEXT; -#ifdef VK_USE_PLATFORM_XCB_KHR - struct XcbSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - struct XlibSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - - class SurfaceKHR - { - public: - using CType = VkSurfaceKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSurfaceKHR; - - public: - VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT - : m_surfaceKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_surfaceKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT - : m_surfaceKHR( surfaceKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT - { - m_surfaceKHR = surfaceKHR; - return *this; - } -#endif - - SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_surfaceKHR = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceKHR const& ) const = default; -#else - bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR == rhs.m_surfaceKHR; - } - - bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR != rhs.m_surfaceKHR; - } - - bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR < rhs.m_surfaceKHR; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR == VK_NULL_HANDLE; - } - - private: - VkSurfaceKHR m_surfaceKHR; - }; - static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = SurfaceKHR; - }; - - template <> - struct CppType - { - using Type = SurfaceKHR; - }; - - class DebugReportCallbackEXT - { - public: - using CType = VkDebugReportCallbackEXT; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugReportCallbackEXT; - - public: - VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT - : m_debugReportCallbackEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_debugReportCallbackEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT - : m_debugReportCallbackEXT( debugReportCallbackEXT ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT - { - m_debugReportCallbackEXT = debugReportCallbackEXT; - return *this; - } -#endif - - DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_debugReportCallbackEXT = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugReportCallbackEXT const& ) const = default; -#else - bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; - } - - bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; - } - - bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT == VK_NULL_HANDLE; - } - - private: - VkDebugReportCallbackEXT m_debugReportCallbackEXT; - }; - static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = DebugReportCallbackEXT; - }; - - template <> - struct CppType - { - using Type = DebugReportCallbackEXT; - }; - - class DebugUtilsMessengerEXT - { - public: - using CType = VkDebugUtilsMessengerEXT; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugUtilsMessengerEXT; - - public: - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT - : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT - : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT - { - m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; - return *this; - } -#endif - - DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_debugUtilsMessengerEXT = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsMessengerEXT const& ) const = default; -#else - bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; - } - - bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; - } - - bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; - } - - private: - VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT; - }; - static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = DebugUtilsMessengerEXT; - }; - - template <> - struct CppType - { - using Type = DebugUtilsMessengerEXT; - }; - - class DisplayKHR - { - public: - using CType = VkDisplayKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayKHR; - - public: - VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT - : m_displayKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_displayKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT - : m_displayKHR( displayKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT - { - m_displayKHR = displayKHR; - return *this; - } -#endif - - DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_displayKHR = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayKHR const& ) const = default; -#else - bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR == rhs.m_displayKHR; - } - - bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR != rhs.m_displayKHR; - } - - bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR < rhs.m_displayKHR; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR == VK_NULL_HANDLE; - } - - private: - VkDisplayKHR m_displayKHR; - }; - static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = DisplayKHR; - }; - - template <> - struct CppType - { - using Type = DisplayKHR; - }; - - class SwapchainKHR - { - public: - using CType = VkSwapchainKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSwapchainKHR; - - public: - VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT - : m_swapchainKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_swapchainKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT - : m_swapchainKHR( swapchainKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT - { - m_swapchainKHR = swapchainKHR; - return *this; - } -#endif - - SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_swapchainKHR = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SwapchainKHR const& ) const = default; -#else - bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR == rhs.m_swapchainKHR; - } - - bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR != rhs.m_swapchainKHR; - } - - bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR < rhs.m_swapchainKHR; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR == VK_NULL_HANDLE; - } - - private: - VkSwapchainKHR m_swapchainKHR; - }; - static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = SwapchainKHR; - }; - - template <> - struct CppType - { - using Type = SwapchainKHR; - }; - - class Semaphore - { - public: - using CType = VkSemaphore; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSemaphore; - - public: - VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT - : m_semaphore(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_semaphore(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT - : m_semaphore( semaphore ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT - { - m_semaphore = semaphore; - return *this; - } -#endif - - Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_semaphore = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Semaphore const& ) const = default; -#else - bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore == rhs.m_semaphore; - } - - bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore != rhs.m_semaphore; - } - - bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore < rhs.m_semaphore; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT - { - return m_semaphore; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_semaphore != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_semaphore == VK_NULL_HANDLE; - } - - private: - VkSemaphore m_semaphore; - }; - static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Semaphore; - }; - - template <> - struct CppType - { - using Type = Semaphore; - }; - - class Fence - { - public: - using CType = VkFence; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFence; - - public: - VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT - : m_fence(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_fence(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT - : m_fence( fence ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT - { - m_fence = fence; - return *this; - } -#endif - - Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_fence = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Fence const& ) const = default; -#else - bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence == rhs.m_fence; - } - - bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence != rhs.m_fence; - } - - bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence < rhs.m_fence; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT - { - return m_fence; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_fence != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_fence == VK_NULL_HANDLE; - } - - private: - VkFence m_fence; - }; - static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Fence; - }; - - template <> - struct CppType - { - using Type = Fence; - }; - - class PerformanceConfigurationINTEL - { - public: - using CType = VkPerformanceConfigurationINTEL; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePerformanceConfigurationINTEL; - - public: - VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT - : m_performanceConfigurationINTEL(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_performanceConfigurationINTEL(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT - : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT - { - m_performanceConfigurationINTEL = performanceConfigurationINTEL; - return *this; - } -#endif - - PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_performanceConfigurationINTEL = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceConfigurationINTEL const& ) const = default; -#else - bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; - } - - bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; - } - - bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL == VK_NULL_HANDLE; - } - - private: - VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL; - }; - static_assert( sizeof( PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = PerformanceConfigurationINTEL; - }; - - template <> - struct CppType - { - using Type = PerformanceConfigurationINTEL; - }; - - class QueryPool - { - public: - using CType = VkQueryPool; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueryPool; - - public: - VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT - : m_queryPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_queryPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT - : m_queryPool( queryPool ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT - { - m_queryPool = queryPool; - return *this; - } -#endif - - QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_queryPool = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueryPool const& ) const = default; -#else - bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queryPool == rhs.m_queryPool; - } - - bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queryPool != rhs.m_queryPool; - } - - bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queryPool < rhs.m_queryPool; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT - { - return m_queryPool; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_queryPool != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_queryPool == VK_NULL_HANDLE; - } - - private: - VkQueryPool m_queryPool; - }; - static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = QueryPool; - }; - - template <> - struct CppType - { - using Type = QueryPool; - }; - - class Buffer - { - public: - using CType = VkBuffer; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBuffer; - - public: - VULKAN_HPP_CONSTEXPR Buffer() VULKAN_HPP_NOEXCEPT - : m_buffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_buffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT - : m_buffer( buffer ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Buffer & operator=(VkBuffer buffer) VULKAN_HPP_NOEXCEPT - { - m_buffer = buffer; - return *this; - } -#endif - - Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_buffer = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Buffer const& ) const = default; -#else - bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_buffer == rhs.m_buffer; - } - - bool operator!=(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_buffer != rhs.m_buffer; - } - - bool operator<(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_buffer < rhs.m_buffer; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT - { - return m_buffer; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_buffer != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_buffer == VK_NULL_HANDLE; - } - - private: - VkBuffer m_buffer; - }; - static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Buffer; - }; - - template <> - struct CppType - { - using Type = Buffer; - }; - - class PipelineLayout - { - public: - using CType = VkPipelineLayout; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineLayout; - - public: - VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT - : m_pipelineLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_pipelineLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT - : m_pipelineLayout( pipelineLayout ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT - { - m_pipelineLayout = pipelineLayout; - return *this; - } -#endif - - PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_pipelineLayout = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineLayout const& ) const = default; -#else - bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout == rhs.m_pipelineLayout; - } - - bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout != rhs.m_pipelineLayout; - } - - bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout < rhs.m_pipelineLayout; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout == VK_NULL_HANDLE; - } - - private: - VkPipelineLayout m_pipelineLayout; - }; - static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = PipelineLayout; - }; - - template <> - struct CppType - { - using Type = PipelineLayout; - }; - - class DescriptorSet - { - public: - using CType = VkDescriptorSet; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSet; - - public: - VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT - : m_descriptorSet(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorSet(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT - : m_descriptorSet( descriptorSet ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT - { - m_descriptorSet = descriptorSet; - return *this; - } -#endif - - DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorSet = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSet const& ) const = default; -#else - bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet == rhs.m_descriptorSet; - } - - bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet != rhs.m_descriptorSet; - } - - bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet < rhs.m_descriptorSet; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet == VK_NULL_HANDLE; - } - - private: - VkDescriptorSet m_descriptorSet; - }; - static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = DescriptorSet; - }; - - template <> - struct CppType - { - using Type = DescriptorSet; - }; - - class Pipeline - { - public: - using CType = VkPipeline; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipeline; - - public: - VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT - : m_pipeline(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_pipeline(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT - : m_pipeline( pipeline ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT - { - m_pipeline = pipeline; - return *this; - } -#endif - - Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_pipeline = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Pipeline const& ) const = default; -#else - bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipeline == rhs.m_pipeline; - } - - bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipeline != rhs.m_pipeline; - } - - bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipeline < rhs.m_pipeline; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT - { - return m_pipeline; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_pipeline != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_pipeline == VK_NULL_HANDLE; - } - - private: - VkPipeline m_pipeline; - }; - static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Pipeline; - }; - - template <> - struct CppType - { - using Type = Pipeline; - }; - - class ImageView - { - public: - using CType = VkImageView; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImageView; - - public: - VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT - : m_imageView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_imageView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT - : m_imageView( imageView ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT - { - m_imageView = imageView; - return *this; - } -#endif - - ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_imageView = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageView const& ) const = default; -#else - bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_imageView == rhs.m_imageView; - } - - bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_imageView != rhs.m_imageView; - } - - bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_imageView < rhs.m_imageView; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT - { - return m_imageView; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_imageView != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_imageView == VK_NULL_HANDLE; - } - - private: - VkImageView m_imageView; - }; - static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = ImageView; - }; - - template <> - struct CppType - { - using Type = ImageView; - }; - - class Image - { - public: - using CType = VkImage; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImage; - - public: - VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT - : m_image(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_image(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT - : m_image( image ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT - { - m_image = image; - return *this; - } -#endif - - Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_image = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Image const& ) const = default; -#else - bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_image == rhs.m_image; - } - - bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_image != rhs.m_image; - } - - bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_image < rhs.m_image; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT - { - return m_image; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_image != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_image == VK_NULL_HANDLE; - } - - private: - VkImage m_image; - }; - static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Image; - }; - - template <> - struct CppType - { - using Type = Image; - }; - - class AccelerationStructureKHR - { - public: - using CType = VkAccelerationStructureKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eAccelerationStructureKHR; - - public: - VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT - : m_accelerationStructureKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_accelerationStructureKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT - : m_accelerationStructureKHR( accelerationStructureKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - AccelerationStructureKHR & operator=(VkAccelerationStructureKHR accelerationStructureKHR) VULKAN_HPP_NOEXCEPT - { - m_accelerationStructureKHR = accelerationStructureKHR; - return *this; - } -#endif - - AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_accelerationStructureKHR = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureKHR const& ) const = default; -#else - bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR; - } - - bool operator!=(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR; - } - - bool operator<(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureKHR; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureKHR != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureKHR == VK_NULL_HANDLE; - } - - private: - VkAccelerationStructureKHR m_accelerationStructureKHR; - }; - static_assert( sizeof( AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = AccelerationStructureKHR; - }; - - template <> - struct CppType - { - using Type = AccelerationStructureKHR; - }; - using AccelerationStructureNV = AccelerationStructureKHR; - - class DescriptorUpdateTemplate - { - public: - using CType = VkDescriptorUpdateTemplate; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorUpdateTemplate; - - public: - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT - : m_descriptorUpdateTemplate(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorUpdateTemplate(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT - : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT - { - m_descriptorUpdateTemplate = descriptorUpdateTemplate; - return *this; - } -#endif - - DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorUpdateTemplate = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorUpdateTemplate const& ) const = default; -#else - bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; - } - - bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; - } - - bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate == VK_NULL_HANDLE; - } - - private: - VkDescriptorUpdateTemplate m_descriptorUpdateTemplate; - }; - static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = DescriptorUpdateTemplate; - }; - - template <> - struct CppType - { - using Type = DescriptorUpdateTemplate; - }; - using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; - - class Event - { - public: - using CType = VkEvent; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eEvent; - - public: - VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT - : m_event(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_event(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT - : m_event( event ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT - { - m_event = event; - return *this; - } -#endif - - Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_event = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Event const& ) const = default; -#else - bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_event == rhs.m_event; - } - - bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_event != rhs.m_event; - } - - bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_event < rhs.m_event; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT - { - return m_event; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_event != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_event == VK_NULL_HANDLE; - } - - private: - VkEvent m_event; - }; - static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Event; - }; - - template <> - struct CppType - { - using Type = Event; - }; - - class CommandBuffer - { - public: - using CType = VkCommandBuffer; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandBuffer; - - public: - VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT - : m_commandBuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_commandBuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT - : m_commandBuffer( commandBuffer ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT - { - m_commandBuffer = commandBuffer; - return *this; - } -#endif - - CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_commandBuffer = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBuffer const& ) const = default; -#else - bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer == rhs.m_commandBuffer; - } - - bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer != rhs.m_commandBuffer; - } - - bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer < rhs.m_commandBuffer; - } -#endif - - template - Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, ArrayProxy strides, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void debugMarkerEndEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endConditionalRenderingEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endRenderPass(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void executeCommands( ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setBlendConstants( const float blendConstants[4], Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDeviceMask( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setLineWidth( float lineWidth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setScissorWithCountEXT( ArrayProxy scissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportWithCountEXT( ArrayProxy viewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void waitEvents( ArrayProxy events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer == VK_NULL_HANDLE; - } - - private: - VkCommandBuffer m_commandBuffer; - }; - static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = CommandBuffer; - }; - - template <> - struct CppType - { - using Type = CommandBuffer; - }; - - class DeviceMemory - { - public: - using CType = VkDeviceMemory; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDeviceMemory; - - public: - VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT - : m_deviceMemory(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_deviceMemory(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT - : m_deviceMemory( deviceMemory ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT - { - m_deviceMemory = deviceMemory; - return *this; - } -#endif - - DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_deviceMemory = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceMemory const& ) const = default; -#else - bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory == rhs.m_deviceMemory; - } - - bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory != rhs.m_deviceMemory; - } - - bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory < rhs.m_deviceMemory; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory == VK_NULL_HANDLE; - } - - private: - VkDeviceMemory m_deviceMemory; - }; - static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = DeviceMemory; - }; - - template <> - struct CppType - { - using Type = DeviceMemory; - }; - - class BufferView - { - public: - using CType = VkBufferView; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBufferView; - - public: - VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT - : m_bufferView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_bufferView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT - : m_bufferView( bufferView ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT - { - m_bufferView = bufferView; - return *this; - } -#endif - - BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_bufferView = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferView const& ) const = default; -#else - bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView == rhs.m_bufferView; - } - - bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView != rhs.m_bufferView; - } - - bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView < rhs.m_bufferView; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT - { - return m_bufferView; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_bufferView != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_bufferView == VK_NULL_HANDLE; - } - - private: - VkBufferView m_bufferView; - }; - static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = BufferView; - }; - - template <> - struct CppType - { - using Type = BufferView; - }; - - class CommandPool - { - public: - using CType = VkCommandPool; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandPool; - - public: - VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT - : m_commandPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_commandPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT - : m_commandPool( commandPool ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT - { - m_commandPool = commandPool; - return *this; - } -#endif - - CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_commandPool = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandPool const& ) const = default; -#else - bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool == rhs.m_commandPool; - } - - bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool != rhs.m_commandPool; - } - - bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool < rhs.m_commandPool; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT - { - return m_commandPool; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_commandPool != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_commandPool == VK_NULL_HANDLE; - } - - private: - VkCommandPool m_commandPool; - }; - static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = CommandPool; - }; - - template <> - struct CppType - { - using Type = CommandPool; - }; - - class PipelineCache - { - public: - using CType = VkPipelineCache; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineCache; - - public: - VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT - : m_pipelineCache(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_pipelineCache(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT - : m_pipelineCache( pipelineCache ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT - { - m_pipelineCache = pipelineCache; - return *this; - } -#endif - - PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_pipelineCache = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCache const& ) const = default; -#else - bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache == rhs.m_pipelineCache; - } - - bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache != rhs.m_pipelineCache; - } - - bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache < rhs.m_pipelineCache; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache == VK_NULL_HANDLE; - } - - private: - VkPipelineCache m_pipelineCache; - }; - static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = PipelineCache; - }; - - template <> - struct CppType - { - using Type = PipelineCache; - }; - -#ifdef VK_ENABLE_BETA_EXTENSIONS - class DeferredOperationKHR - { - public: - using CType = VkDeferredOperationKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDeferredOperationKHR; - - public: - VULKAN_HPP_CONSTEXPR DeferredOperationKHR() VULKAN_HPP_NOEXCEPT - : m_deferredOperationKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_deferredOperationKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT - : m_deferredOperationKHR( deferredOperationKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DeferredOperationKHR & operator=(VkDeferredOperationKHR deferredOperationKHR) VULKAN_HPP_NOEXCEPT - { - m_deferredOperationKHR = deferredOperationKHR; - return *this; - } -#endif - - DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_deferredOperationKHR = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeferredOperationKHR const& ) const = default; -#else - bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR == rhs.m_deferredOperationKHR; - } - - bool operator!=(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR != rhs.m_deferredOperationKHR; - } - - bool operator<(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR < rhs.m_deferredOperationKHR; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR == VK_NULL_HANDLE; - } - - private: - VkDeferredOperationKHR m_deferredOperationKHR; - }; - static_assert( sizeof( DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = DeferredOperationKHR; - }; - - template <> - struct CppType - { - using Type = DeferredOperationKHR; - }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - class DescriptorPool - { - public: - using CType = VkDescriptorPool; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorPool; - - public: - VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT - : m_descriptorPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT - : m_descriptorPool( descriptorPool ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT - { - m_descriptorPool = descriptorPool; - return *this; - } -#endif - - DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorPool = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorPool const& ) const = default; -#else - bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool == rhs.m_descriptorPool; - } - - bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool != rhs.m_descriptorPool; - } - - bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool < rhs.m_descriptorPool; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool == VK_NULL_HANDLE; - } - - private: - VkDescriptorPool m_descriptorPool; - }; - static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = DescriptorPool; - }; - - template <> - struct CppType - { - using Type = DescriptorPool; - }; - - class DescriptorSetLayout - { - public: - using CType = VkDescriptorSetLayout; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSetLayout; - - public: - VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT - : m_descriptorSetLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_descriptorSetLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT - : m_descriptorSetLayout( descriptorSetLayout ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT - { - m_descriptorSetLayout = descriptorSetLayout; - return *this; - } -#endif - - DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorSetLayout = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetLayout const& ) const = default; -#else - bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout == rhs.m_descriptorSetLayout; - } - - bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout != rhs.m_descriptorSetLayout; - } - - bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout < rhs.m_descriptorSetLayout; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout == VK_NULL_HANDLE; - } - - private: - VkDescriptorSetLayout m_descriptorSetLayout; - }; - static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = DescriptorSetLayout; - }; - - template <> - struct CppType - { - using Type = DescriptorSetLayout; - }; - - class Framebuffer - { - public: - using CType = VkFramebuffer; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFramebuffer; - - public: - VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT - : m_framebuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_framebuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT - : m_framebuffer( framebuffer ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT - { - m_framebuffer = framebuffer; - return *this; - } -#endif - - Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_framebuffer = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Framebuffer const& ) const = default; -#else - bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer == rhs.m_framebuffer; - } - - bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer != rhs.m_framebuffer; - } - - bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer < rhs.m_framebuffer; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer == VK_NULL_HANDLE; - } - - private: - VkFramebuffer m_framebuffer; - }; - static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Framebuffer; - }; - - template <> - struct CppType - { - using Type = Framebuffer; - }; - - class IndirectCommandsLayoutNV - { - public: - using CType = VkIndirectCommandsLayoutNV; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eIndirectCommandsLayoutNV; - - public: - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNV(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNV(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT - { - m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; - return *this; - } -#endif - - IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_indirectCommandsLayoutNV = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsLayoutNV const& ) const = default; -#else - bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV; - } - - bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV; - } - - bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV == VK_NULL_HANDLE; - } - - private: - VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV; - }; - static_assert( sizeof( IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = IndirectCommandsLayoutNV; - }; - - template <> - struct CppType - { - using Type = IndirectCommandsLayoutNV; - }; - - class PrivateDataSlotEXT - { - public: - using CType = VkPrivateDataSlotEXT; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePrivateDataSlotEXT; - - public: - VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT() VULKAN_HPP_NOEXCEPT - : m_privateDataSlotEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_privateDataSlotEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlotEXT( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT - : m_privateDataSlotEXT( privateDataSlotEXT ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PrivateDataSlotEXT & operator=(VkPrivateDataSlotEXT privateDataSlotEXT) VULKAN_HPP_NOEXCEPT - { - m_privateDataSlotEXT = privateDataSlotEXT; - return *this; - } -#endif - - PrivateDataSlotEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_privateDataSlotEXT = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PrivateDataSlotEXT const& ) const = default; -#else - bool operator==( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlotEXT == rhs.m_privateDataSlotEXT; - } - - bool operator!=(PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlotEXT != rhs.m_privateDataSlotEXT; - } - - bool operator<(PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlotEXT < rhs.m_privateDataSlotEXT; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlotEXT() const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlotEXT; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlotEXT != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlotEXT == VK_NULL_HANDLE; - } - - private: - VkPrivateDataSlotEXT m_privateDataSlotEXT; - }; - static_assert( sizeof( PrivateDataSlotEXT ) == sizeof( VkPrivateDataSlotEXT ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = PrivateDataSlotEXT; - }; - - template <> - struct CppType - { - using Type = PrivateDataSlotEXT; - }; - - class RenderPass - { - public: - using CType = VkRenderPass; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eRenderPass; - - public: - VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT - : m_renderPass(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_renderPass(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT - : m_renderPass( renderPass ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT - { - m_renderPass = renderPass; - return *this; - } -#endif - - RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_renderPass = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPass const& ) const = default; -#else - bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass == rhs.m_renderPass; - } - - bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass != rhs.m_renderPass; - } - - bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass < rhs.m_renderPass; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT - { - return m_renderPass; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_renderPass != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_renderPass == VK_NULL_HANDLE; - } - - private: - VkRenderPass m_renderPass; - }; - static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = RenderPass; - }; - - template <> - struct CppType - { - using Type = RenderPass; - }; - - class Sampler - { - public: - using CType = VkSampler; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSampler; - - public: - VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT - : m_sampler(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_sampler(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT - : m_sampler( sampler ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT - { - m_sampler = sampler; - return *this; - } -#endif - - Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_sampler = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Sampler const& ) const = default; -#else - bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler == rhs.m_sampler; - } - - bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler != rhs.m_sampler; - } - - bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler < rhs.m_sampler; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT - { - return m_sampler; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_sampler != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_sampler == VK_NULL_HANDLE; - } - - private: - VkSampler m_sampler; - }; - static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Sampler; - }; - - template <> - struct CppType - { - using Type = Sampler; - }; - - class SamplerYcbcrConversion - { - public: - using CType = VkSamplerYcbcrConversion; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSamplerYcbcrConversion; - - public: - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT - : m_samplerYcbcrConversion(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_samplerYcbcrConversion(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT - : m_samplerYcbcrConversion( samplerYcbcrConversion ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT - { - m_samplerYcbcrConversion = samplerYcbcrConversion; - return *this; - } -#endif - - SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_samplerYcbcrConversion = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerYcbcrConversion const& ) const = default; -#else - bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; - } - - bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; - } - - bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion == VK_NULL_HANDLE; - } - - private: - VkSamplerYcbcrConversion m_samplerYcbcrConversion; - }; - static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = SamplerYcbcrConversion; - }; - - template <> - struct CppType - { - using Type = SamplerYcbcrConversion; - }; - using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; - - class ShaderModule - { - public: - using CType = VkShaderModule; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eShaderModule; - - public: - VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT - : m_shaderModule(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_shaderModule(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT - : m_shaderModule( shaderModule ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT - { - m_shaderModule = shaderModule; - return *this; - } -#endif - - ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_shaderModule = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShaderModule const& ) const = default; -#else - bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule == rhs.m_shaderModule; - } - - bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule != rhs.m_shaderModule; - } - - bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule < rhs.m_shaderModule; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule == VK_NULL_HANDLE; - } - - private: - VkShaderModule m_shaderModule; - }; - static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = ShaderModule; - }; - - template <> - struct CppType - { - using Type = ShaderModule; - }; - - class ValidationCacheEXT - { - public: - using CType = VkValidationCacheEXT; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eValidationCacheEXT; - - public: - VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT - : m_validationCacheEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_validationCacheEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT - : m_validationCacheEXT( validationCacheEXT ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT - { - m_validationCacheEXT = validationCacheEXT; - return *this; - } -#endif - - ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_validationCacheEXT = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ValidationCacheEXT const& ) const = default; -#else - bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT == rhs.m_validationCacheEXT; - } - - bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT != rhs.m_validationCacheEXT; - } - - bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT < rhs.m_validationCacheEXT; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT == VK_NULL_HANDLE; - } - - private: - VkValidationCacheEXT m_validationCacheEXT; - }; - static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = ValidationCacheEXT; - }; - - template <> - struct CppType - { - using Type = ValidationCacheEXT; - }; - - class Queue - { - public: - using CType = VkQueue; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueue; - - public: - VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT - : m_queue(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_queue(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT - : m_queue( queue ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT - { - m_queue = queue; - return *this; - } -#endif - - Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_queue = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Queue const& ) const = default; -#else - bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queue == rhs.m_queue; - } - - bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queue != rhs.m_queue; - } - - bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queue < rhs.m_queue; - } -#endif - - template - void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getCheckpointDataNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindSparse( ArrayProxy bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type submit( ArrayProxy submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT - { - return m_queue; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_queue != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_queue == VK_NULL_HANDLE; - } - - private: - VkQueue m_queue; - }; - static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Queue; - }; - - template <> - struct CppType - { - using Type = Queue; - }; - -#ifndef VULKAN_HPP_NO_SMART_HANDLE - class Device; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueAccelerationStructureKHR = UniqueHandle; - using UniqueAccelerationStructureNV = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueBuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueBufferView = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = PoolFree; }; - using UniqueCommandBuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueCommandPool = UniqueHandle; -#ifdef VK_ENABLE_BETA_EXTENSIONS - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDeferredOperationKHR = UniqueHandle; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDescriptorPool = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = PoolFree; }; - using UniqueDescriptorSet = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDescriptorSetLayout = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDescriptorUpdateTemplate = UniqueHandle; - using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectFree; }; - using UniqueDeviceMemory = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueEvent = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueFence = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueFramebuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueImage = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueImageView = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueIndirectCommandsLayoutNV = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniquePipeline = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniquePipelineCache = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniquePipelineLayout = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniquePrivateDataSlotEXT = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueQueryPool = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueRenderPass = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSampler = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSamplerYcbcrConversion = UniqueHandle; - using UniqueSamplerYcbcrConversionKHR = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSemaphore = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueShaderModule = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSwapchainKHR = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueValidationCacheEXT = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - - class Device - { - public: - using CType = VkDevice; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDevice; - - public: - VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT - : m_device(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_device(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT - : m_device( device ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT - { - m_device = device; - return *this; - } -#endif - - Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_device = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Device const& ) const = default; -#else - bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device == rhs.m_device; - } - - bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device != rhs.m_device; - } - - bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device < rhs.m_device; - } -#endif - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValue acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValue acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindAccelerationStructureMemoryKHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createBuffer( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDeferredOperationKHR( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDeferredOperationKHRUnique( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createEvent( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createEventUnique( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createFence( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createImage( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImageUnique( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createImageView( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT* pPrivateDataSlot, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - ResultValue,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValue> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSampler( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - void getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - template - DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getGroupPresentCapabilitiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - template - Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void releaseProfilingLockKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type resetFences( ArrayProxy fences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void uninitializePerformanceApiINTEL(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - - template - void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitForFences( ArrayProxy fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy data, size_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT - { - return m_device; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_device != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_device == VK_NULL_HANDLE; - } - - private: - VkDevice m_device; - }; - static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Device; - }; - - template <> - struct CppType - { - using Type = Device; - }; - - class DisplayModeKHR - { - public: - using CType = VkDisplayModeKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayModeKHR; - - public: - VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT - : m_displayModeKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_displayModeKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT - : m_displayModeKHR( displayModeKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT - { - m_displayModeKHR = displayModeKHR; - return *this; - } -#endif - - DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_displayModeKHR = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayModeKHR const& ) const = default; -#else - bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR == rhs.m_displayModeKHR; - } - - bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR != rhs.m_displayModeKHR; - } - - bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR < rhs.m_displayModeKHR; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR == VK_NULL_HANDLE; - } - - private: - VkDisplayModeKHR m_displayModeKHR; - }; - static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = DisplayModeKHR; - }; - - template <> - struct CppType - { - using Type = DisplayModeKHR; - }; - -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDevice = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - - class PhysicalDevice - { - public: - using CType = VkPhysicalDevice; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePhysicalDevice; - - public: - VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT - : m_physicalDevice(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice( physicalDevice ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT - { - m_physicalDevice = physicalDevice; - return *this; - } -#endif - - PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_physicalDevice = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevice const& ) const = default; -#else - bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice == rhs.m_physicalDevice; - } - - bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice != rhs.m_physicalDevice; - } - - bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice < rhs.m_physicalDevice; - } -#endif - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - template - Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDevice( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlanePropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; - template - StructureChain getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint32_t getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template - typename ResultValueType>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getToolPropertiesEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_XCB_KHR - template - Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - template - Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice == VK_NULL_HANDLE; - } - - private: - VkPhysicalDevice m_physicalDevice; - }; - static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = PhysicalDevice; - }; - - template <> - struct CppType - { - using Type = PhysicalDevice; - }; - -#ifndef VULKAN_HPP_NO_SMART_HANDLE - class Instance; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDebugReportCallbackEXT = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDebugUtilsMessengerEXT = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSurfaceKHR = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - - class Instance - { - public: - using CType = VkInstance; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eInstance; - - public: - VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT - : m_instance(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - : m_instance(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT - : m_instance( instance ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT - { - m_instance = instance; - return *this; - } -#endif - - Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_instance = VK_NULL_HANDLE; - return *this; - } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Instance const& ) const = default; -#else - bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance == rhs.m_instance; - } - - bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance != rhs.m_instance; - } - - bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance < rhs.m_instance; - } -#endif - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - template - Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_IOS_MVK - template - Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - -#ifdef VK_USE_PLATFORM_FUCHSIA - template - Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#ifdef VK_USE_PLATFORM_MACOS_MVK - template - Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - -#ifdef VK_USE_PLATFORM_METAL_EXT - template - Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - -#ifdef VK_USE_PLATFORM_GGP - template - Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_GGP*/ - -#ifdef VK_USE_PLATFORM_VI_NN - template - Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_VI_NN*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_XCB_KHR - template - Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - template - Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - - template - void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroups(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDevices(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT - { - return m_instance; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_instance != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_instance == VK_NULL_HANDLE; - } - - private: - VkInstance m_instance; - }; - static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" ); - - template <> - struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type - { - using type = Instance; - }; - - template <> - struct CppType - { - using Type = Instance; - }; - -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueInstance = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - - template - Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); - template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type enumerateInstanceVersion(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - struct AabbPositionsKHR { - - +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AabbPositionsKHR( float minX_ = {}, float minY_ = {}, float minZ_ = {}, @@ -21505,14 +20266,18 @@ namespace VULKAN_HPP_NAMESPACE , maxZ( maxZ_ ) {} - AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - AabbPositionsKHR& operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AabbPositionsKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -21552,39 +20317,31 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkAabbPositionsKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AabbPositionsKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AabbPositionsKHR const & ) const = default; #else - bool operator==( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( minX == rhs.minX ) - && ( minY == rhs.minY ) - && ( minZ == rhs.minZ ) - && ( maxX == rhs.maxX ) - && ( maxY == rhs.maxY ) - && ( maxZ == rhs.maxZ ); + return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) && + ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ ); } - bool operator!=( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: float minX = {}; float minY = {}; @@ -21592,344 +20349,466 @@ namespace VULKAN_HPP_NAMESPACE float maxX = {}; float maxY = {}; float maxZ = {}; - }; - static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using AabbPositionsNV = AabbPositionsKHR; + + class AccelerationStructureKHR + { + public: + using CType = VkAccelerationStructureKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + public: + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() = default; + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( accelerationStructureKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + AccelerationStructureKHR & operator=( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = accelerationStructureKHR; + return *this; + } +#endif + + AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureKHR const & ) const = default; +#else + bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR; + } + + bool operator!=( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR; + } + + bool operator<( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureKHR m_accelerationStructureKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; -#ifdef VK_ENABLE_BETA_EXTENSIONS union DeviceOrHostAddressConstKHR { - DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const& rhs ) VULKAN_HPP_NOEXCEPT + DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); } DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} - DeviceOrHostAddressConstKHR( const void* hostAddress_ ) - : hostAddress( hostAddress_ ) - {} + DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} - DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + DeviceOrHostAddressConstKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } - DeviceOrHostAddressConstKHR & setHostAddress( const void* hostAddress_ ) VULKAN_HPP_NOEXCEPT + DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return *this; } - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR & + operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) ); return *this; } - operator VkDeviceOrHostAddressConstKHR const&() const + operator VkDeviceOrHostAddressConstKHR const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkDeviceOrHostAddressConstKHR &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; - const void* hostAddress; + const void * hostAddress; #else - VkDeviceAddress deviceAddress; - const void* hostAddress; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + VkDeviceAddress deviceAddress; + const void * hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureGeometryTrianglesDataKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureGeometryTrianglesDataKHR; - AccelerationStructureGeometryTrianglesDataKHR( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryTrianglesDataKHR( + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + uint32_t maxVertex_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {} ) VULKAN_HPP_NOEXCEPT : vertexFormat( vertexFormat_ ) , vertexData( vertexData_ ) , vertexStride( vertexStride_ ) + , maxVertex( maxVertex_ ) , indexType( indexType_ ) , indexData( indexData_ ) , transformData( transformData_ ) {} - AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryTrianglesDataKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryTrianglesDataKHR & + operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryTrianglesDataKHR & + operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryTrianglesDataKHR ) - offsetof( AccelerationStructureGeometryTrianglesDataKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - AccelerationStructureGeometryTrianglesDataKHR& operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT + { + maxVertex = maxVertex_; + return *this; + } + + AccelerationStructureGeometryTrianglesDataKHR & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT { indexData = indexData_; return *this; } - AccelerationStructureGeometryTrianglesDataKHR & setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryTrianglesDataKHR & + setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT { transformData = transformData_; return *this; } - - operator VkAccelerationStructureGeometryTrianglesDataKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; - VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; - VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + uint32_t maxVertex = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {}; - }; - static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) == sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) == + sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = AccelerationStructureGeometryTrianglesDataKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureGeometryAabbsDataKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureGeometryAabbsDataKHR; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {} ) VULKAN_HPP_NOEXCEPT : data( data_ ) , stride( stride_ ) {} - AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryAabbsDataKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryAabbsDataKHR & + operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryAabbsDataKHR & + operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryAabbsDataKHR ) - offsetof( AccelerationStructureGeometryAabbsDataKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - AccelerationStructureGeometryAabbsDataKHR& operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - AccelerationStructureGeometryAabbsDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureGeometryAabbsDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryAabbsDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } - AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryAabbsDataKHR & + setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } - - operator VkAccelerationStructureGeometryAabbsDataKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; - VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; }; - static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) == + sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = AccelerationStructureGeometryAabbsDataKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureGeometryInstancesDataKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureGeometryInstancesDataKHR; - AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {} ) + VULKAN_HPP_NOEXCEPT : arrayOfPointers( arrayOfPointers_ ) , data( data_ ) {} - AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryInstancesDataKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryInstancesDataKHR & + operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryInstancesDataKHR & + operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryInstancesDataKHR ) - offsetof( AccelerationStructureGeometryInstancesDataKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - AccelerationStructureGeometryInstancesDataKHR& operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - AccelerationStructureGeometryInstancesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR & + setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT { arrayOfPointers = arrayOfPointers_; return *this; } - AccelerationStructureGeometryInstancesDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryInstancesDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } - - operator VkAccelerationStructureGeometryInstancesDataKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; - }; - static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) == sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) == + sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = AccelerationStructureGeometryInstancesDataKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS union AccelerationStructureGeometryDataKHR { - AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const& rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); } - AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) + AccelerationStructureGeometryDataKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) : triangles( triangles_ ) {} @@ -21937,100 +20816,106 @@ namespace VULKAN_HPP_NAMESPACE : aabbs( aabbs_ ) {} - AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) + AccelerationStructureGeometryDataKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) : instances( instances_ ) {} - AccelerationStructureGeometryDataKHR & setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryDataKHR & setTriangles( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT { - memcpy( &triangles, &triangles_, sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR)); + triangles = triangles_; return *this; } - AccelerationStructureGeometryDataKHR & setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryDataKHR & + setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT { - memcpy( &aabbs, &aabbs_, sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR)); + aabbs = aabbs_; return *this; } - AccelerationStructureGeometryDataKHR & setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryDataKHR & setInstances( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT { - memcpy( &instances, &instances_, sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR)); + instances = instances_; return *this; } - VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR & + operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) ); return *this; } - operator VkAccelerationStructureGeometryDataKHR const&() const + operator VkAccelerationStructureGeometryDataKHR const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryDataKHR &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles; - VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances; #else VkAccelerationStructureGeometryTrianglesDataKHR triangles; - VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; VkAccelerationStructureGeometryInstancesDataKHR instances; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureGeometryKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR; - AccelerationStructureGeometryKHR( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, - VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, - VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureGeometryKHR( + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT : geometryType( geometryType_ ) , geometry( geometry_ ) , flags( flags_ ) {} - AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureGeometryKHR ) - offsetof( AccelerationStructureGeometryKHR, pNext ) ); - return *this; - } + AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AccelerationStructureGeometryKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AccelerationStructureGeometryKHR& operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryKHR & + operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureGeometryKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryKHR & + setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return *this; } - AccelerationStructureGeometryKHR & setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureGeometryKHR & + setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT { geometry = geometry_; return *this; @@ -22042,53 +20927,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkAccelerationStructureGeometryKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {}; - VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; - + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; }; - static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = AccelerationStructureGeometryKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS union DeviceOrHostAddressKHR { - DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const& rhs ) VULKAN_HPP_NOEXCEPT + DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); } - DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) - : deviceAddress( deviceAddress_ ) + DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} - DeviceOrHostAddressKHR( void* hostAddress_ ) - : hostAddress( hostAddress_ ) - {} + DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {} DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { @@ -22096,557 +20973,694 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceOrHostAddressKHR & setHostAddress( void* hostAddress_ ) VULKAN_HPP_NOEXCEPT + DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return *this; } - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR & + operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) ); return *this; } - operator VkDeviceOrHostAddressKHR const&() const + operator VkDeviceOrHostAddressKHR const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkDeviceOrHostAddressKHR &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; - void* hostAddress; + void * hostAddress; #else - VkDeviceAddress deviceAddress; - void* hostAddress; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + VkDeviceAddress deviceAddress; + void * hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureBuildGeometryInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureBuildGeometryInfoKHR; - AccelerationStructureBuildGeometryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 update_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ = {}, - uint32_t geometryCount_ = {}, - const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const* ppGeometries_ = {}, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , flags( flags_ ) - , update( update_ ) + , mode( mode_ ) , srcAccelerationStructure( srcAccelerationStructure_ ) , dstAccelerationStructure( dstAccelerationStructure_ ) - , geometryArrayOfPointers( geometryArrayOfPointers_ ) , geometryCount( geometryCount_ ) + , pGeometries( pGeometries_ ) , ppGeometries( ppGeometries_ ) , scratchData( scratchData_ ) {} - AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildGeometryInfoKHR( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) + : type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , srcAccelerationStructure( srcAccelerationStructure_ ) + , dstAccelerationStructure( dstAccelerationStructure_ ) + , geometryCount( static_cast( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) ) + , pGeometries( geometries_.data() ) + , ppGeometries( pGeometries_.data() ) + , scratchData( scratchData_ ) { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureBuildGeometryInfoKHR ) - offsetof( AccelerationStructureBuildGeometryInfoKHR, pNext ) ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1 ); +# else + if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureBuildGeometryInfoKHR & + operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildGeometryInfoKHR & + operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - AccelerationStructureBuildGeometryInfoKHR& operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - AccelerationStructureBuildGeometryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setUpdate( VULKAN_HPP_NAMESPACE::Bool32 update_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT { - update = update_; + mode = mode_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT { srcAccelerationStructure = srcAccelerationStructure_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT { dstAccelerationStructure = dstAccelerationStructure_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setGeometryArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers_ ) VULKAN_HPP_NOEXCEPT - { - geometryArrayOfPointers = geometryArrayOfPointers_; - return *this; - } - AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT { geometryCount = geometryCount_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const* ppGeometries_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildGeometryInfoKHR & + setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + pGeometries = pGeometries_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR & setGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT { ppGeometries = ppGeometries_; return *this; } - AccelerationStructureBuildGeometryInfoKHR & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR & setPGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( pGeometries_.size() ); + ppGeometries = pGeometries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureBuildGeometryInfoKHR & + setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return *this; } - - operator VkAccelerationStructureBuildGeometryInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 update = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; - VULKAN_HPP_NAMESPACE::Bool32 geometryArrayOfPointers = {}; - uint32_t geometryCount = {}; - const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const* ppGeometries = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; - + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; }; - static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) == + sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = AccelerationStructureBuildGeometryInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureBuildOffsetInfoKHR + struct AccelerationStructureBuildRangeInfoKHR { - - - VULKAN_HPP_CONSTEXPR AccelerationStructureBuildOffsetInfoKHR( uint32_t primitiveCount_ = {}, - uint32_t primitiveOffset_ = {}, - uint32_t firstVertex_ = {}, - uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( uint32_t primitiveCount_ = {}, + uint32_t primitiveOffset_ = {}, + uint32_t firstVertex_ = {}, + uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT : primitiveCount( primitiveCount_ ) , primitiveOffset( primitiveOffset_ ) , firstVertex( firstVertex_ ) , transformOffset( transformOffset_ ) {} - AccelerationStructureBuildOffsetInfoKHR( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - AccelerationStructureBuildOffsetInfoKHR& operator=( VkAccelerationStructureBuildOffsetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildRangeInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & + operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildRangeInfoKHR & + operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureBuildOffsetInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT { primitiveCount = primitiveCount_; return *this; } - AccelerationStructureBuildOffsetInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT { primitiveOffset = primitiveOffset_; return *this; } - AccelerationStructureBuildOffsetInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT { firstVertex = firstVertex_; return *this; } - AccelerationStructureBuildOffsetInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT { transformOffset = transformOffset_; return *this; } - - operator VkAccelerationStructureBuildOffsetInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkAccelerationStructureBuildOffsetInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureBuildOffsetInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default; #else - bool operator==( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( primitiveCount == rhs.primitiveCount ) - && ( primitiveOffset == rhs.primitiveOffset ) - && ( firstVertex == rhs.firstVertex ) - && ( transformOffset == rhs.transformOffset ); + return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) && + ( firstVertex == rhs.firstVertex ) && ( transformOffset == rhs.transformOffset ); } - bool operator!=( AccelerationStructureBuildOffsetInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t primitiveCount = {}; + uint32_t primitiveCount = {}; uint32_t primitiveOffset = {}; - uint32_t firstVertex = {}; + uint32_t firstVertex = {}; uint32_t transformOffset = {}; - }; - static_assert( sizeof( AccelerationStructureBuildOffsetInfoKHR ) == sizeof( VkAccelerationStructureBuildOffsetInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + static_assert( sizeof( AccelerationStructureBuildRangeInfoKHR ) == sizeof( VkAccelerationStructureBuildRangeInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureCreateGeometryTypeInfoKHR + struct AccelerationStructureBuildSizesInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureBuildSizesInfoKHR; - VULKAN_HPP_CONSTEXPR AccelerationStructureCreateGeometryTypeInfoKHR( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, - uint32_t maxPrimitiveCount_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, - uint32_t maxVertexCount_ = {}, - VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ = {} ) VULKAN_HPP_NOEXCEPT - : geometryType( geometryType_ ) - , maxPrimitiveCount( maxPrimitiveCount_ ) - , indexType( indexType_ ) - , maxVertexCount( maxVertexCount_ ) - , vertexFormat( vertexFormat_ ) - , allowsTransforms( allowsTransforms_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructureSize( accelerationStructureSize_ ) + , updateScratchSize( updateScratchSize_ ) + , buildScratchSize( buildScratchSize_ ) {} - AccelerationStructureCreateGeometryTypeInfoKHR & operator=( AccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildSizesInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & + operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildSizesInfoKHR & + operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) - offsetof( AccelerationStructureCreateGeometryTypeInfoKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - AccelerationStructureCreateGeometryTypeInfoKHR& operator=( VkAccelerationStructureCreateGeometryTypeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - AccelerationStructureCreateGeometryTypeInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildSizesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildSizesInfoKHR & + setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT { - geometryType = geometryType_; + accelerationStructureSize = accelerationStructureSize_; return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR & setMaxPrimitiveCount( uint32_t maxPrimitiveCount_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildSizesInfoKHR & + setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT { - maxPrimitiveCount = maxPrimitiveCount_; + updateScratchSize = updateScratchSize_; return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureBuildSizesInfoKHR & + setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT { - indexType = indexType_; + buildScratchSize = buildScratchSize_; return *this; } - AccelerationStructureCreateGeometryTypeInfoKHR & setMaxVertexCount( uint32_t maxVertexCount_ ) VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - maxVertexCount = maxVertexCount_; - return *this; + return *reinterpret_cast( this ); } - AccelerationStructureCreateGeometryTypeInfoKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT { - vertexFormat = vertexFormat_; - return *this; + return *reinterpret_cast( this ); } - AccelerationStructureCreateGeometryTypeInfoKHR & setAllowsTransforms( VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms_ ) VULKAN_HPP_NOEXCEPT - { - allowsTransforms = allowsTransforms_; - return *this; - } - - - operator VkAccelerationStructureCreateGeometryTypeInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkAccelerationStructureCreateGeometryTypeInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureCreateGeometryTypeInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default; #else - bool operator==( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( geometryType == rhs.geometryType ) - && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) - && ( indexType == rhs.indexType ) - && ( maxVertexCount == rhs.maxVertexCount ) - && ( vertexFormat == rhs.vertexFormat ) - && ( allowsTransforms == rhs.allowsTransforms ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructureSize == rhs.accelerationStructureSize ) && + ( updateScratchSize == rhs.updateScratchSize ) && ( buildScratchSize == rhs.buildScratchSize ); } - bool operator!=( AccelerationStructureCreateGeometryTypeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateGeometryTypeInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; - uint32_t maxPrimitiveCount = {}; - VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; - uint32_t maxVertexCount = {}; - VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::Bool32 allowsTransforms = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {}; }; - static_assert( sizeof( AccelerationStructureCreateGeometryTypeInfoKHR ) == sizeof( VkAccelerationStructureCreateGeometryTypeInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureBuildSizesInfoKHR ) == sizeof( VkAccelerationStructureBuildSizesInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = AccelerationStructureCreateGeometryTypeInfoKHR; + using Type = AccelerationStructureBuildSizesInfoKHR; + }; + + class Buffer + { + public: + using CType = VkBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + + public: + VULKAN_HPP_CONSTEXPR Buffer() = default; + VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT : m_buffer( buffer ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Buffer & operator=( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT + { + m_buffer = buffer; + return *this; + } +#endif + + Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_buffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Buffer const & ) const = default; +#else + bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer == rhs.m_buffer; + } + + bool operator!=( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer != rhs.m_buffer; + } + + bool operator<( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer < rhs.m_buffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_buffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_buffer == VK_NULL_HANDLE; + } + + private: + VkBuffer m_buffer = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureCreateInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureCreateInfoKHR; - VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, - uint32_t maxGeometryCount_ = {}, - const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ = {}, - VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT - : compactedSize( compactedSize_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : createFlags( createFlags_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) , type( type_ ) - , flags( flags_ ) - , maxGeometryCount( maxGeometryCount_ ) - , pGeometryInfos( pGeometryInfos_ ) , deviceAddress( deviceAddress_ ) {} - AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureCreateInfoKHR ) - offsetof( AccelerationStructureCreateInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AccelerationStructureCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AccelerationStructureCreateInfoKHR& operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & + operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoKHR & + operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureCreateInfoKHR & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & + setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT { - compactedSize = compactedSize_; + createFlags = createFlags_; return *this; } - AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + AccelerationStructureCreateInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + AccelerationStructureCreateInfoKHR & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } - AccelerationStructureCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - AccelerationStructureCreateInfoKHR & setMaxGeometryCount( uint32_t maxGeometryCount_ ) VULKAN_HPP_NOEXCEPT - { - maxGeometryCount = maxGeometryCount_; - return *this; - } - - AccelerationStructureCreateInfoKHR & setPGeometryInfos( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos_ ) VULKAN_HPP_NOEXCEPT - { - pGeometryInfos = pGeometryInfos_; - return *this; - } - - AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } - - operator VkAccelerationStructureCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureCreateInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default; #else - bool operator==( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( compactedSize == rhs.compactedSize ) - && ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( maxGeometryCount == rhs.maxGeometryCount ) - && ( pGeometryInfos == rhs.pGeometryInfos ) - && ( deviceAddress == rhs.deviceAddress ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && + ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ) && ( type == rhs.type ) && + ( deviceAddress == rhs.deviceAddress ); } - bool operator!=( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; - uint32_t maxGeometryCount = {}; - const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; - }; - static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = AccelerationStructureCreateInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct GeometryTrianglesNV { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV; - VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, - uint32_t vertexCount_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, - VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, - uint32_t indexCount_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, - VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, + uint32_t vertexCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, + uint32_t indexCount_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT : vertexData( vertexData_ ) , vertexOffset( vertexOffset_ ) , vertexCount( vertexCount_ ) @@ -22660,24 +21674,23 @@ namespace VULKAN_HPP_NAMESPACE , transformOffset( transformOffset_ ) {} - GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( GeometryTrianglesNV ) - offsetof( GeometryTrianglesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : GeometryTrianglesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GeometryTrianglesNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -22749,63 +21762,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkGeometryTrianglesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeometryTrianglesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryTrianglesNV const & ) const = default; #else - bool operator==( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vertexData == rhs.vertexData ) - && ( vertexOffset == rhs.vertexOffset ) - && ( vertexCount == rhs.vertexCount ) - && ( vertexStride == rhs.vertexStride ) - && ( vertexFormat == rhs.vertexFormat ) - && ( indexData == rhs.indexData ) - && ( indexOffset == rhs.indexOffset ) - && ( indexCount == rhs.indexCount ) - && ( indexType == rhs.indexType ) - && ( transformData == rhs.transformData ) - && ( transformOffset == rhs.transformOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) && + ( vertexOffset == rhs.vertexOffset ) && ( vertexCount == rhs.vertexCount ) && + ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) && + ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) && + ( indexType == rhs.indexType ) && ( transformData == rhs.transformData ) && + ( transformOffset == rhs.transformOffset ); } - bool operator!=( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer vertexData = {}; - VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {}; - uint32_t vertexCount = {}; - VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; - VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::Buffer indexData = {}; - VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {}; - uint32_t indexCount = {}; - VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; - VULKAN_HPP_NAMESPACE::Buffer transformData = {}; - VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {}; + uint32_t vertexCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {}; + uint32_t indexCount = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::Buffer transformData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; }; - static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -22816,37 +21818,36 @@ namespace VULKAN_HPP_NAMESPACE struct GeometryAABBNV { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV; - VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, - uint32_t numAABBs_ = {}, - uint32_t stride_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, + uint32_t numAABBs_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT : aabbData( aabbData_ ) , numAABBs( numAABBs_ ) , stride( stride_ ) , offset( offset_ ) {} - GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( GeometryAABBNV ) - offsetof( GeometryAABBNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : GeometryAABBNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GeometryAABBNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -22876,47 +21877,38 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkGeometryAABBNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeometryAABBNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryAABBNV const & ) const = default; #else - bool operator==( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( aabbData == rhs.aabbData ) - && ( numAABBs == rhs.numAABBs ) - && ( stride == rhs.stride ) - && ( offset == rhs.offset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) && + ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) && ( offset == rhs.offset ); } - bool operator!=( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer aabbData = {}; - uint32_t numAABBs = {}; - uint32_t stride = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer aabbData = {}; + uint32_t numAABBs = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; }; static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -22929,22 +21921,25 @@ namespace VULKAN_HPP_NAMESPACE struct GeometryDataNV { - - +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, - VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT : triangles( triangles_ ) , aabbs( aabbs_ ) {} - GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - GeometryDataNV& operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeometryDataNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -22960,74 +21955,68 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkGeometryDataNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeometryDataNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryDataNV const & ) const = default; #else - bool operator==( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( triangles == rhs.triangles ) - && ( aabbs == rhs.aabbs ); + return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs ); } - bool operator!=( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {}; - VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; - + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; }; static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct GeometryNV { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV; - VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, - VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, - VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryNV( + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT : geometryType( geometryType_ ) , geometry( geometry_ ) , flags( flags_ ) {} - GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( GeometryNV ) - offsetof( GeometryNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : GeometryNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - GeometryNV& operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GeometryNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -23051,45 +22040,37 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkGeometryNV const&() const VULKAN_HPP_NOEXCEPT + operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeometryNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryNV const & ) const = default; #else - bool operator==( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( geometryType == rhs.geometryType ) - && ( geometry == rhs.geometry ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && + ( geometry == rhs.geometry ) && ( flags == rhs.flags ); } - bool operator!=( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; - VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; - VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; }; static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -23102,14 +22083,16 @@ namespace VULKAN_HPP_NAMESPACE struct AccelerationStructureInfoNV { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV; - VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, - uint32_t instanceCount_ = {}, - uint32_t geometryCount_ = {}, - const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, + uint32_t instanceCount_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , flags( flags_ ) , instanceCount( instanceCount_ ) @@ -23117,24 +22100,38 @@ namespace VULKAN_HPP_NAMESPACE , pGeometries( pGeometries_ ) {} - AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureInfoNV ) - offsetof( AccelerationStructureInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AccelerationStructureInfoNV( *reinterpret_cast( &rhs ) ) + {} - AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, + uint32_t instanceCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) + : type( type_ ) + , flags( flags_ ) + , instanceCount( instanceCount_ ) + , geometryCount( static_cast( geometries_.size() ) ) + , pGeometries( geometries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & + operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -23146,7 +22143,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -23164,58 +22162,63 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInfoNV & + setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT { pGeometries = pGeometries_; return *this; } - - operator VkAccelerationStructureInfoNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureInfoNV & setGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInfoNV const & ) const = default; #else - bool operator==( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( instanceCount == rhs.instanceCount ) - && ( geometryCount == rhs.geometryCount ) - && ( pGeometries == rhs.pGeometries ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) && + ( instanceCount == rhs.instanceCount ) && ( geometryCount == rhs.geometryCount ) && + ( pGeometries == rhs.pGeometries ); } - bool operator!=( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; - VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; - uint32_t instanceCount = {}; - uint32_t geometryCount = {}; - const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; + uint32_t instanceCount = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries = {}; }; - static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -23225,90 +22228,90 @@ namespace VULKAN_HPP_NAMESPACE struct AccelerationStructureCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureCreateInfoNV; - VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT : compactedSize( compactedSize_ ) , info( info_ ) {} - AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureCreateInfoNV ) - offsetof( AccelerationStructureCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AccelerationStructureCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & + operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoNV & + setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT { compactedSize = compactedSize_; return *this; } - AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureCreateInfoNV & + setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT { info = info_; return *this; } - - operator VkAccelerationStructureCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default; #else - bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( compactedSize == rhs.compactedSize ) - && ( info == rhs.info ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && + ( info == rhs.info ); } - bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; }; - static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -23316,161 +22319,164 @@ namespace VULKAN_HPP_NAMESPACE using Type = AccelerationStructureCreateInfoNV; }; -#ifdef VK_ENABLE_BETA_EXTENSIONS struct AccelerationStructureDeviceAddressInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureDeviceAddressInfoKHR; - VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT : accelerationStructure( accelerationStructure_ ) {} - AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( + AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureDeviceAddressInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & + operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureDeviceAddressInfoKHR & + operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureDeviceAddressInfoKHR ) - offsetof( AccelerationStructureDeviceAddressInfoKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - AccelerationStructureDeviceAddressInfoKHR& operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - AccelerationStructureDeviceAddressInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } - - operator VkAccelerationStructureDeviceAddressInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default; #else - bool operator==( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( accelerationStructure == rhs.accelerationStructure ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ); } - bool operator!=( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; - }; - static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) == + sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = AccelerationStructureDeviceAddressInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct TransformMatrixKHR { - - - VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array,3> const& matrix_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix( matrix_ ) {} - TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - TransformMatrixKHR& operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TransformMatrixKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & + operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - TransformMatrixKHR & setMatrix( std::array,3> matrix_ ) VULKAN_HPP_NOEXCEPT + TransformMatrixKHR & setMatrix( std::array, 3> matrix_ ) VULKAN_HPP_NOEXCEPT { matrix = matrix_; return *this; } - - operator VkTransformMatrixKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( TransformMatrixKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TransformMatrixKHR const & ) const = default; #else - bool operator==( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( matrix == rhs.matrix ); } - bool operator!=( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::ArrayWrapper2D matrix = {}; - }; - static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using TransformMatrixNV = TransformMatrixKHR; struct AccelerationStructureInstanceKHR { - - - VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, - uint32_t instanceCustomIndex_ = {}, - uint32_t mask_ = {}, - uint32_t instanceShaderBindingTableRecordOffset_ = {}, - VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, - uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT : transform( transform_ ) , instanceCustomIndex( instanceCustomIndex_ ) , mask( mask_ ) @@ -23479,18 +22485,25 @@ namespace VULKAN_HPP_NAMESPACE , accelerationStructureReference( accelerationStructureReference_ ) {} - AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - AccelerationStructureInstanceKHR& operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureInstanceKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & + operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInstanceKHR & + setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; @@ -23508,260 +22521,258 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInstanceKHR & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; } - AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInstanceKHR & + setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - flags = *reinterpret_cast(&flags_); + flags = *reinterpret_cast( &flags_ ); return *this; } - AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureInstanceKHR & + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return *this; } - - operator VkAccelerationStructureInstanceKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureInstanceKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default; #else - bool operator==( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( transform == rhs.transform ) - && ( instanceCustomIndex == rhs.instanceCustomIndex ) - && ( mask == rhs.mask ) - && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) - && ( flags == rhs.flags ) - && ( accelerationStructureReference == rhs.accelerationStructureReference ); + return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && + ( mask == rhs.mask ) && + ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && + ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference ); } - bool operator!=( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; - uint32_t instanceCustomIndex : 24; - uint32_t mask : 8; - uint32_t instanceShaderBindingTableRecordOffset : 24; - VkGeometryInstanceFlagsKHR flags : 8; - uint64_t accelerationStructureReference = {}; - + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; }; - static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureMemoryRequirementsInfoKHR + class AccelerationStructureNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoKHR; + public: + using CType = VkAccelerationStructureNV; - VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject, - VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , buildType( buildType_ ) - , accelerationStructure( accelerationStructure_ ) + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + public: + VULKAN_HPP_CONSTEXPR AccelerationStructureNV() = default; + VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV( accelerationStructureNV ) {} - AccelerationStructureMemoryRequirementsInfoKHR & operator=( AccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + AccelerationStructureNV & operator=( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) - offsetof( AccelerationStructureMemoryRequirementsInfoKHR, pNext ) ); + m_accelerationStructureNV = accelerationStructureNV; return *this; } - - AccelerationStructureMemoryRequirementsInfoKHR( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - AccelerationStructureMemoryRequirementsInfoKHR& operator=( VkAccelerationStructureMemoryRequirementsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - AccelerationStructureMemoryRequirementsInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - AccelerationStructureMemoryRequirementsInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - AccelerationStructureMemoryRequirementsInfoKHR & setBuildType( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType_ ) VULKAN_HPP_NOEXCEPT - { - buildType = buildType_; - return *this; - } - - AccelerationStructureMemoryRequirementsInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT - { - accelerationStructure = accelerationStructure_; - return *this; - } - - - operator VkAccelerationStructureMemoryRequirementsInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkAccelerationStructureMemoryRequirementsInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureMemoryRequirementsInfoKHR const& ) const = default; -#else - bool operator==( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( buildType == rhs.buildType ) - && ( accelerationStructure == rhs.accelerationStructure ); - } - - bool operator!=( AccelerationStructureMemoryRequirementsInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } #endif + AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = {}; + return *this; + } +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureNV const & ) const = default; +#else + bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == rhs.m_accelerationStructureNV; + } - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeKHR::eObject; - VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType = VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR::eHost; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + bool operator!=( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != rhs.m_accelerationStructureNV; + } + bool operator<( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV < rhs.m_accelerationStructureNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureNV m_accelerationStructureNV = {}; }; - static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoKHR ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), + "handle and wrapper have different size!" ); template <> - struct CppType + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type { - using Type = AccelerationStructureMemoryRequirementsInfoKHR; + using type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct AccelerationStructureMemoryRequirementsInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureMemoryRequirementsInfoNV; - VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , accelerationStructure( accelerationStructure_ ) {} - AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( + AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureMemoryRequirementsInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & + operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMemoryRequirementsInfoNV & + operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureMemoryRequirementsInfoNV ) - offsetof( AccelerationStructureMemoryRequirementsInfoNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoNV & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } - AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } - - operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default; #else - bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( accelerationStructure == rhs.accelerationStructure ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( accelerationStructure == rhs.accelerationStructure ); } - bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject; VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; - }; - static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == + sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -23769,100 +22780,382 @@ namespace VULKAN_HPP_NAMESPACE using Type = AccelerationStructureMemoryRequirementsInfoNV; }; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct AccelerationStructureVersionKHR + struct AccelerationStructureVersionInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureVersionInfoKHR; - VULKAN_HPP_CONSTEXPR AccelerationStructureVersionKHR( const uint8_t* versionData_ = {} ) VULKAN_HPP_NOEXCEPT - : versionData( versionData_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {} ) VULKAN_HPP_NOEXCEPT + : pVersionData( pVersionData_ ) {} - AccelerationStructureVersionKHR & operator=( AccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureVersionInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & + operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureVersionInfoKHR & + operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureVersionKHR ) - offsetof( AccelerationStructureVersionKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - AccelerationStructureVersionKHR( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - AccelerationStructureVersionKHR& operator=( VkAccelerationStructureVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - AccelerationStructureVersionKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AccelerationStructureVersionKHR & setVersionData( const uint8_t* versionData_ ) VULKAN_HPP_NOEXCEPT + AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT { - versionData = versionData_; + pVersionData = pVersionData_; return *this; } - - operator VkAccelerationStructureVersionKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkAccelerationStructureVersionKHR &() VULKAN_HPP_NOEXCEPT + operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AccelerationStructureVersionKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default; #else - bool operator==( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( versionData == rhs.versionData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData ); } - bool operator!=( AccelerationStructureVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionKHR; - const void* pNext = {}; - const uint8_t* versionData = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR; + const void * pNext = {}; + const uint8_t * pVersionData = {}; }; - static_assert( sizeof( AccelerationStructureVersionKHR ) == sizeof( VkAccelerationStructureVersionKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AccelerationStructureVersionInfoKHR ) == sizeof( VkAccelerationStructureVersionInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = AccelerationStructureVersionKHR; + using Type = AccelerationStructureVersionInfoKHR; + }; + + class SwapchainKHR + { + public: + using CType = VkSwapchainKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + public: + VULKAN_HPP_CONSTEXPR SwapchainKHR() = default; + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT + : m_swapchainKHR( swapchainKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SwapchainKHR & operator=( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = swapchainKHR; + return *this; + } +#endif + + SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainKHR const & ) const = default; +#else + bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == rhs.m_swapchainKHR; + } + + bool operator!=( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != rhs.m_swapchainKHR; + } + + bool operator<( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR < rhs.m_swapchainKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == VK_NULL_HANDLE; + } + + private: + VkSwapchainKHR m_swapchainKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Semaphore + { + public: + using CType = VkSemaphore; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + + public: + VULKAN_HPP_CONSTEXPR Semaphore() = default; + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT : m_semaphore( semaphore ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Semaphore & operator=( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = semaphore; + return *this; + } +#endif + + Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Semaphore const & ) const = default; +#else + bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == rhs.m_semaphore; + } + + bool operator!=( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != rhs.m_semaphore; + } + + bool operator<( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore < rhs.m_semaphore; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == VK_NULL_HANDLE; + } + + private: + VkSemaphore m_semaphore = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Fence + { + public: + using CType = VkFence; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eFence; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + + public: + VULKAN_HPP_CONSTEXPR Fence() = default; + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT : m_fence( fence ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Fence & operator=( VkFence fence ) VULKAN_HPP_NOEXCEPT + { + m_fence = fence; + return *this; + } +#endif + + Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_fence = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Fence const & ) const = default; +#else + bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence == rhs.m_fence; + } + + bool operator!=( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence != rhs.m_fence; + } + + bool operator<( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence < rhs.m_fence; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_fence != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_fence == VK_NULL_HANDLE; + } + + private: + VkFence m_fence = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct AcquireNextImageInfoKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, - uint64_t timeout_ = {}, - VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + uint64_t timeout_ = {}, + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT : swapchain( swapchain_ ) , timeout( timeout_ ) @@ -23871,24 +23164,23 @@ namespace VULKAN_HPP_NAMESPACE , deviceMask( deviceMask_ ) {} - AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AcquireNextImageInfoKHR ) - offsetof( AcquireNextImageInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AcquireNextImageInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & + operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AcquireNextImageInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -23924,51 +23216,43 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkAcquireNextImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AcquireNextImageInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireNextImageInfoKHR const & ) const = default; #else - bool operator==( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchain == rhs.swapchain ) - && ( timeout == rhs.timeout ) - && ( semaphore == rhs.semaphore ) - && ( fence == rhs.fence ) - && ( deviceMask == rhs.deviceMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && + ( timeout == rhs.timeout ) && ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && + ( deviceMask == rhs.deviceMask ); } - bool operator!=( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; - uint64_t timeout = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - uint32_t deviceMask = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint64_t timeout = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + uint32_t deviceMask = {}; }; - static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -23979,39 +23263,41 @@ namespace VULKAN_HPP_NAMESPACE struct AcquireProfilingLockInfoKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , timeout( timeout_ ) {} - AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AcquireProfilingLockInfoKHR ) - offsetof( AcquireProfilingLockInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AcquireProfilingLockInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AcquireProfilingLockInfoKHR& operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & + operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AcquireProfilingLockInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + AcquireProfilingLockInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -24023,46 +23309,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkAcquireProfilingLockInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AcquireProfilingLockInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default; #else - bool operator==( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( timeout == rhs.timeout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout ); } - bool operator!=( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {}; - uint64_t timeout = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {}; + uint64_t timeout = {}; }; - static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -24072,12 +23352,11 @@ namespace VULKAN_HPP_NAMESPACE struct AllocationCallbacks { - - - VULKAN_HPP_CONSTEXPR AllocationCallbacks( void* pUserData_ = {}, - PFN_vkAllocationFunction pfnAllocation_ = {}, - PFN_vkReallocationFunction pfnReallocation_ = {}, - PFN_vkFreeFunction pfnFree_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, + PFN_vkAllocationFunction pfnAllocation_ = {}, + PFN_vkReallocationFunction pfnReallocation_ = {}, + PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT : pUserData( pUserData_ ) @@ -24088,18 +23367,23 @@ namespace VULKAN_HPP_NAMESPACE , pfnInternalFree( pfnInternalFree_ ) {} - AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; - AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + : AllocationCallbacks( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & + operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AllocationCallbacks & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; @@ -24123,7 +23407,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + AllocationCallbacks & + setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT { pfnInternalAllocation = pfnInternalAllocation_; return *this; @@ -24135,73 +23420,71 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkAllocationCallbacks const&() const VULKAN_HPP_NOEXCEPT + operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AllocationCallbacks const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AllocationCallbacks const & ) const = default; #else - bool operator==( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( pUserData == rhs.pUserData ) - && ( pfnAllocation == rhs.pfnAllocation ) - && ( pfnReallocation == rhs.pfnReallocation ) - && ( pfnFree == rhs.pfnFree ) - && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) - && ( pfnInternalFree == rhs.pfnInternalFree ); + return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && + ( pfnReallocation == rhs.pfnReallocation ) && ( pfnFree == rhs.pfnFree ) && + ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree ); } - bool operator!=( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - void* pUserData = {}; - PFN_vkAllocationFunction pfnAllocation = {}; - PFN_vkReallocationFunction pfnReallocation = {}; - PFN_vkFreeFunction pfnFree = {}; + void * pUserData = {}; + PFN_vkAllocationFunction pfnAllocation = {}; + PFN_vkReallocationFunction pfnReallocation = {}; + PFN_vkFreeFunction pfnFree = {}; PFN_vkInternalAllocationNotification pfnInternalAllocation = {}; - PFN_vkInternalFreeNotification pfnInternalFree = {}; - + PFN_vkInternalFreeNotification pfnInternalFree = {}; }; - static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" ); + static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ComponentMapping { - - - VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, - VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, - VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, - VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) + VULKAN_HPP_NOEXCEPT : r( r_ ) , g( g_ ) , b( b_ ) , a( a_ ) {} - ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ComponentMapping& operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + : ComponentMapping( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -24229,61 +23512,59 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkComponentMapping const&() const VULKAN_HPP_NOEXCEPT + operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ComponentMapping const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComponentMapping const & ) const = default; #else - bool operator==( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( r == rhs.r ) - && ( g == rhs.g ) - && ( b == rhs.b ) - && ( a == rhs.a ); + return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a ); } - bool operator!=( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; - }; - static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" ); + static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#ifdef VK_USE_PLATFORM_ANDROID_KHR +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) struct AndroidHardwareBufferFormatPropertiesANDROID { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; - VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - uint64_t externalFormat_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, - VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = + VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT : format( format_ ) , externalFormat( externalFormat_ ) , formatFeatures( formatFeatures_ ) @@ -24294,75 +23575,73 @@ namespace VULKAN_HPP_NAMESPACE , suggestedYChromaOffset( suggestedYChromaOffset_ ) {} - AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( + AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatPropertiesANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferFormatPropertiesANDROID & + operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatPropertiesANDROID & + operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) - offsetof( AndroidHardwareBufferFormatPropertiesANDROID, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - AndroidHardwareBufferFormatPropertiesANDROID& operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const& ) const = default; -#else - bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( externalFormat == rhs.externalFormat ) - && ( formatFeatures == rhs.formatFeatures ) - && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) - && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) - && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) - && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) - && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && + ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) && + ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && + ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); } - bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - uint64_t externalFormat = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; - VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; - VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; }; - static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == + sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -24371,75 +23650,74 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) struct AndroidHardwareBufferPropertiesANDROID { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAndroidHardwareBufferPropertiesANDROID; +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT : allocationSize( allocationSize_ ) , memoryTypeBits( memoryTypeBits_ ) {} - AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AndroidHardwareBufferPropertiesANDROID ) - offsetof( AndroidHardwareBufferPropertiesANDROID, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AndroidHardwareBufferPropertiesANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AndroidHardwareBufferPropertiesANDROID& operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferPropertiesANDROID & + operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferPropertiesANDROID & + operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkAndroidHardwareBufferPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT + operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AndroidHardwareBufferPropertiesANDROID const& ) const = default; -#else - bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( allocationSize == rhs.allocationSize ) - && ( memoryTypeBits == rhs.memoryTypeBits ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && + ( memoryTypeBits == rhs.memoryTypeBits ); } - bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; - uint32_t memoryTypeBits = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeBits = {}; }; - static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -24448,71 +23726,70 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) struct AndroidHardwareBufferUsageANDROID { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAndroidHardwareBufferUsageANDROID; - VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT : androidHardwareBufferUsage( androidHardwareBufferUsage_ ) {} - AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AndroidHardwareBufferUsageANDROID ) - offsetof( AndroidHardwareBufferUsageANDROID, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AndroidHardwareBufferUsageANDROID( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AndroidHardwareBufferUsageANDROID& operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferUsageANDROID & + operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkAndroidHardwareBufferUsageANDROID const&() const VULKAN_HPP_NOEXCEPT + operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AndroidHardwareBufferUsageANDROID const& ) const = default; -#else - bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); } - bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; - void* pNext = {}; - uint64_t androidHardwareBufferUsage = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; + void * pNext = {}; + uint64_t androidHardwareBufferUsage = {}; }; - static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -24521,93 +23798,89 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) struct AndroidSurfaceCreateInfoKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR; +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, - struct ANativeWindow* window_ = {} ) VULKAN_HPP_NOEXCEPT + struct ANativeWindow * window_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , window( window_ ) {} - AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AndroidSurfaceCreateInfoKHR ) - offsetof( AndroidSurfaceCreateInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AndroidSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & + operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AndroidSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + AndroidSurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow* window_ ) VULKAN_HPP_NOEXCEPT + AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT { window = window_; return *this; } - - operator VkAndroidSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AndroidSurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( window == rhs.window ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); } - bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; - struct ANativeWindow* window = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; + struct ANativeWindow * window = {}; }; - static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -24618,14 +23891,15 @@ namespace VULKAN_HPP_NAMESPACE struct ApplicationInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo; - VULKAN_HPP_CONSTEXPR ApplicationInfo( const char* pApplicationName_ = {}, - uint32_t applicationVersion_ = {}, - const char* pEngineName_ = {}, - uint32_t engineVersion_ = {}, - uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_ = {}, + uint32_t applicationVersion_ = {}, + const char * pEngineName_ = {}, + uint32_t engineVersion_ = {}, + uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT : pApplicationName( pApplicationName_ ) , applicationVersion( applicationVersion_ ) , pEngineName( pEngineName_ ) @@ -24633,30 +23907,28 @@ namespace VULKAN_HPP_NAMESPACE , apiVersion( apiVersion_ ) {} - ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ApplicationInfo ) - offsetof( ApplicationInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : ApplicationInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ApplicationInfo& operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ApplicationInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ApplicationInfo & setPApplicationName( const char* pApplicationName_ ) VULKAN_HPP_NOEXCEPT + ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT { pApplicationName = pApplicationName_; return *this; @@ -24668,7 +23940,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ApplicationInfo & setPEngineName( const char* pEngineName_ ) VULKAN_HPP_NOEXCEPT + ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT { pEngineName = pEngineName_; return *this; @@ -24686,49 +23958,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkApplicationInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ApplicationInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ApplicationInfo const & ) const = default; #else - bool operator==( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pApplicationName == rhs.pApplicationName ) - && ( applicationVersion == rhs.applicationVersion ) - && ( pEngineName == rhs.pEngineName ) - && ( engineVersion == rhs.engineVersion ) - && ( apiVersion == rhs.apiVersion ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pApplicationName == rhs.pApplicationName ) && + ( applicationVersion == rhs.applicationVersion ) && ( pEngineName == rhs.pEngineName ) && + ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); } - bool operator!=( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; - const void* pNext = {}; - const char* pApplicationName = {}; - uint32_t applicationVersion = {}; - const char* pEngineName = {}; - uint32_t engineVersion = {}; - uint32_t apiVersion = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; + const void * pNext = {}; + const char * pApplicationName = {}; + uint32_t applicationVersion = {}; + const char * pEngineName = {}; + uint32_t engineVersion = {}; + uint32_t apiVersion = {}; }; static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -24741,17 +24004,18 @@ namespace VULKAN_HPP_NAMESPACE struct AttachmentDescription { - - - VULKAN_HPP_CONSTEXPR AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescription( + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) , format( format_ ) , samples( samples_ ) @@ -24763,14 +24027,19 @@ namespace VULKAN_HPP_NAMESPACE , finalLayout( finalLayout_ ) {} - AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; - AttachmentDescription& operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescription( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -24804,13 +24073,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescription & + setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT { stencilLoadOp = stencilLoadOp_; return *this; } - AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescription & + setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT { stencilStoreOp = stencilStoreOp_; return *this; @@ -24828,71 +24099,65 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkAttachmentDescription const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentDescription const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription const & ) const = default; #else - bool operator==( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) - && ( format == rhs.format ) - && ( samples == rhs.samples ) - && ( loadOp == rhs.loadOp ) - && ( storeOp == rhs.storeOp ) - && ( stencilLoadOp == rhs.stencilLoadOp ) - && ( stencilStoreOp == rhs.stencilStoreOp ) - && ( initialLayout == rhs.initialLayout ) - && ( finalLayout == rhs.finalLayout ); + return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && + ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && + ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) && + ( finalLayout == rhs.finalLayout ); } - bool operator!=( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; - VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; - VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" ); + static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AttachmentDescription2 { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2; - VULKAN_HPP_CONSTEXPR AttachmentDescription2( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescription2( + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) , format( format_ ) , samples( samples_ ) @@ -24904,24 +24169,23 @@ namespace VULKAN_HPP_NAMESPACE , finalLayout( finalLayout_ ) {} - AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AttachmentDescription2 ) - offsetof( AttachmentDescription2, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AttachmentDescription2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AttachmentDescription2& operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AttachmentDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -24957,13 +24221,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescription2 & + setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT { stencilLoadOp = stencilLoadOp_; return *this; } - AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescription2 & + setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT { stencilStoreOp = stencilStoreOp_; return *this; @@ -24981,59 +24247,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkAttachmentDescription2 const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentDescription2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription2 const & ) const = default; #else - bool operator==( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( format == rhs.format ) - && ( samples == rhs.samples ) - && ( loadOp == rhs.loadOp ) - && ( storeOp == rhs.storeOp ) - && ( stencilLoadOp == rhs.stencilLoadOp ) - && ( stencilStoreOp == rhs.stencilStoreOp ) - && ( initialLayout == rhs.initialLayout ) - && ( finalLayout == rhs.finalLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && + ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && + ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && + ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout ); } - bool operator!=( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; - VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; - VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -25041,118 +24296,127 @@ namespace VULKAN_HPP_NAMESPACE { using Type = AttachmentDescription2; }; + using AttachmentDescription2KHR = AttachmentDescription2; struct AttachmentDescriptionStencilLayout { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAttachmentDescriptionStencilLayout; - VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( + VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT : stencilInitialLayout( stencilInitialLayout_ ) , stencilFinalLayout( stencilFinalLayout_ ) {} - AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AttachmentDescriptionStencilLayout ) - offsetof( AttachmentDescriptionStencilLayout, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) + VULKAN_HPP_NOEXCEPT = default; AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AttachmentDescriptionStencilLayout( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AttachmentDescriptionStencilLayout& operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & + operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescriptionStencilLayout & + operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AttachmentDescriptionStencilLayout & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AttachmentDescriptionStencilLayout & setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescriptionStencilLayout & + setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT { stencilInitialLayout = stencilInitialLayout_; return *this; } - AttachmentDescriptionStencilLayout & setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT + AttachmentDescriptionStencilLayout & + setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT { stencilFinalLayout = stencilFinalLayout_; return *this; } - - operator VkAttachmentDescriptionStencilLayout const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentDescriptionStencilLayout const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default; #else - bool operator==( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stencilInitialLayout == rhs.stencilInitialLayout ) - && ( stencilFinalLayout == rhs.stencilFinalLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) && + ( stencilFinalLayout == rhs.stencilFinalLayout ); } - bool operator!=( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = AttachmentDescriptionStencilLayout; }; + using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; struct AttachmentReference { - - - VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReference( + uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT : attachment( attachment_ ) , layout( layout_ ) {} - AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; - AttachmentReference& operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReference( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference & + operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25168,74 +24432,70 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkAttachmentReference const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentReference const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference const & ) const = default; #else - bool operator==( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( attachment == rhs.attachment ) - && ( layout == rhs.layout ); + return ( attachment == rhs.attachment ) && ( layout == rhs.layout ); } - bool operator!=( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t attachment = {}; - VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" ); + static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct AttachmentReference2 { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2; - VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentReference2( uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT : attachment( attachment_ ) , layout( layout_ ) , aspectMask( aspectMask_ ) {} - AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AttachmentReference2 ) - offsetof( AttachmentReference2, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AttachmentReference2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AttachmentReference2& operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & + operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AttachmentReference2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -25259,47 +24519,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkAttachmentReference2 const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentReference2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference2 const & ) const = default; #else - bool operator==( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachment == rhs.attachment ) - && ( layout == rhs.layout ) - && ( aspectMask == rhs.aspectMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && + ( layout == rhs.layout ) && ( aspectMask == rhs.aspectMask ); } - bool operator!=( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2; - const void* pNext = {}; - uint32_t attachment = {}; - VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2; + const void * pNext = {}; + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - }; - static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -25307,108 +24560,109 @@ namespace VULKAN_HPP_NAMESPACE { using Type = AttachmentReference2; }; + using AttachmentReference2KHR = AttachmentReference2; struct AttachmentReferenceStencilLayout { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout; - VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = + VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT : stencilLayout( stencilLayout_ ) {} - AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( AttachmentReferenceStencilLayout ) - offsetof( AttachmentReferenceStencilLayout, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : AttachmentReferenceStencilLayout( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - AttachmentReferenceStencilLayout& operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & + operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - AttachmentReferenceStencilLayout & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT + AttachmentReferenceStencilLayout & + setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT { stencilLayout = stencilLayout_; return *this; } - - operator VkAttachmentReferenceStencilLayout const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentReferenceStencilLayout const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default; #else - bool operator==( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stencilLayout == rhs.stencilLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout ); } - bool operator!=( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = AttachmentReferenceStencilLayout; }; + using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; struct Extent2D { - - - VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, - uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT : width( width_ ) , height( height_ ) {} - Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; - Extent2D& operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT + Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25424,61 +24678,58 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkExtent2D const&() const VULKAN_HPP_NOEXCEPT + operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExtent2D &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Extent2D const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent2D const & ) const = default; #else - bool operator==( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( width == rhs.width ) - && ( height == rhs.height ); + return ( width == rhs.width ) && ( height == rhs.height ); } - bool operator!=( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t width = {}; + uint32_t width = {}; uint32_t height = {}; - }; static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SampleLocationEXT { - - - VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, - float y_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT : x( x_ ) , y( y_ ) {} - SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SampleLocationEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & + operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25494,88 +24745,100 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkSampleLocationEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SampleLocationEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationEXT const & ) const = default; #else - bool operator==( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ); + return ( x == rhs.x ) && ( y == rhs.y ); } - bool operator!=( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: float x = {}; float y = {}; - }; - static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SampleLocationsInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT; - VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, - uint32_t sampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, + uint32_t sampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) , sampleLocationGridSize( sampleLocationGridSize_ ) , sampleLocationsCount( sampleLocationsCount_ ) , pSampleLocations( pSampleLocations_ ) {} - SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SampleLocationsInfoEXT ) - offsetof( SampleLocationsInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SampleLocationsInfoEXT( *reinterpret_cast( &rhs ) ) + {} - SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SampleLocationsInfoEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) + : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SampleLocationsInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SampleLocationsInfoEXT & setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT + SampleLocationsInfoEXT & setSampleLocationsPerPixel( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsPerPixel = sampleLocationsPerPixel_; return *this; } - SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT + SampleLocationsInfoEXT & + setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT { sampleLocationGridSize = sampleLocationGridSize_; return *this; @@ -25587,55 +24850,61 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SampleLocationsInfoEXT & setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + SampleLocationsInfoEXT & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return *this; } - - operator VkSampleLocationsInfoEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SampleLocationsInfoEXT & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + sampleLocationsCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SampleLocationsInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationsInfoEXT const & ) const = default; #else - bool operator==( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) - && ( sampleLocationGridSize == rhs.sampleLocationGridSize ) - && ( sampleLocationsCount == rhs.sampleLocationsCount ) - && ( pSampleLocations == rhs.pSampleLocations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) && + ( sampleLocationGridSize == rhs.sampleLocationGridSize ) && + ( sampleLocationsCount == rhs.sampleLocationsCount ) && ( pSampleLocations == rhs.pSampleLocations ); } - bool operator!=( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; - uint32_t sampleLocationsCount = {}; - const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations = {}; - + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; + uint32_t sampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {}; }; - static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -25646,22 +24915,28 @@ namespace VULKAN_HPP_NAMESPACE struct AttachmentSampleLocationsEXT { - - - VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( + uint32_t attachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT : attachmentIndex( attachmentIndex_ ) , sampleLocationsInfo( sampleLocationsInfo_ ) {} - AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & + operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -25671,181 +24946,275 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + AttachmentSampleLocationsEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } - - operator VkAttachmentSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( AttachmentSampleLocationsEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default; #else - bool operator==( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( attachmentIndex == rhs.attachmentIndex ) - && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); } - bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t attachmentIndex = {}; + uint32_t attachmentIndex = {}; VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; - }; - static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BaseInStructure { - - - BaseInStructure() VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = + VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo ) VULKAN_HPP_NOEXCEPT : sType( sType_ ) {} - BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; - BaseInStructure& operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + : BaseInStructure( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext_ ) VULKAN_HPP_NOEXCEPT + BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - - operator VkBaseInStructure const&() const VULKAN_HPP_NOEXCEPT + operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BaseInStructure const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseInStructure const & ) const = default; #else - bool operator==( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); } - bool operator!=( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = {}; - const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; + const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {}; }; static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BaseOutStructure { - - - BaseOutStructure() VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = + VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo ) VULKAN_HPP_NOEXCEPT : sType( sType_ ) {} - BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; - BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + : BaseOutStructure( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext_ ) VULKAN_HPP_NOEXCEPT + BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - - operator VkBaseOutStructure const&() const VULKAN_HPP_NOEXCEPT + operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BaseOutStructure const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseOutStructure const & ) const = default; #else - bool operator==( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); } - bool operator!=( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = {}; - struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; + struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {}; }; - static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" ); + static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct BindAccelerationStructureMemoryInfoKHR + class DeviceMemory { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoKHR; + public: + using CType = VkDeviceMemory; - VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, - uint32_t deviceIndexCount_ = {}, - const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + + public: + VULKAN_HPP_CONSTEXPR DeviceMemory() = default; + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT + : m_deviceMemory( deviceMemory ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DeviceMemory & operator=( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = deviceMemory; + return *this; + } +#endif + + DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemory const & ) const = default; +#else + bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == rhs.m_deviceMemory; + } + + bool operator!=( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != rhs.m_deviceMemory; + } + + bool operator<( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory < rhs.m_deviceMemory; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == VK_NULL_HANDLE; + } + + private: + VkDeviceMemory m_deviceMemory = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct BindAccelerationStructureMemoryInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBindAccelerationStructureMemoryInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT : accelerationStructure( accelerationStructure_ ) , memory( memory_ ) , memoryOffset( memoryOffset_ ) @@ -25853,141 +25222,171 @@ namespace VULKAN_HPP_NAMESPACE , pDeviceIndices( pDeviceIndices_ ) {} - BindAccelerationStructureMemoryInfoKHR & operator=( BindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindAccelerationStructureMemoryInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindAccelerationStructureMemoryInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) + : accelerationStructure( accelerationStructure_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , deviceIndexCount( static_cast( deviceIndices_.size() ) ) + , pDeviceIndices( deviceIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & + operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindAccelerationStructureMemoryInfoNV & + operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( BindAccelerationStructureMemoryInfoKHR ) - offsetof( BindAccelerationStructureMemoryInfoKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - BindAccelerationStructureMemoryInfoKHR( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - BindAccelerationStructureMemoryInfoKHR& operator=( VkBindAccelerationStructureMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - BindAccelerationStructureMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - BindAccelerationStructureMemoryInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } - BindAccelerationStructureMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } - BindAccelerationStructureMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoNV & + setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } - BindAccelerationStructureMemoryInfoKHR & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return *this; } - BindAccelerationStructureMemoryInfoKHR & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } - - operator VkBindAccelerationStructureMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindAccelerationStructureMemoryInfoNV & setDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } - operator VkBindAccelerationStructureMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindAccelerationStructureMemoryInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default; #else - bool operator==( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( accelerationStructure == rhs.accelerationStructure ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ) - && ( deviceIndexCount == rhs.deviceIndexCount ) - && ( pDeviceIndices == rhs.pDeviceIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ); } - bool operator!=( BindAccelerationStructureMemoryInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; - uint32_t deviceIndexCount = {}; - const uint32_t* pDeviceIndices = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; }; - static_assert( sizeof( BindAccelerationStructureMemoryInfoKHR ) == sizeof( VkBindAccelerationStructureMemoryInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = BindAccelerationStructureMemoryInfoKHR; + using Type = BindAccelerationStructureMemoryInfoNV; }; struct BindBufferMemoryDeviceGroupInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo; - VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, - const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT : deviceIndexCount( deviceIndexCount_ ) , pDeviceIndices( pDeviceIndices_ ) {} - BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BindBufferMemoryDeviceGroupInfo ) - offsetof( BindBufferMemoryDeviceGroupInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) + {} - BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindBufferMemoryDeviceGroupInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) + : deviceIndexCount( static_cast( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & + operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindBufferMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -25999,90 +25398,96 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } - - operator VkBindBufferMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindBufferMemoryDeviceGroupInfo & setDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindBufferMemoryDeviceGroupInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default; #else - bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceIndexCount == rhs.deviceIndexCount ) - && ( pDeviceIndices == rhs.pDeviceIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ); } - bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; - const void* pNext = {}; - uint32_t deviceIndexCount = {}; - const uint32_t* pDeviceIndices = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; }; - static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = BindBufferMemoryDeviceGroupInfo; }; + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; struct BindBufferMemoryInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo; - VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) , memory( memory_ ) , memoryOffset( memoryOffset_ ) {} - BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BindBufferMemoryInfo ) - offsetof( BindBufferMemoryInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BindBufferMemoryInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & + operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindBufferMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -26106,47 +25511,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBindBufferMemoryInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindBufferMemoryInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryInfo const & ) const = default; #else - bool operator==( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ); } - bool operator!=( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; }; - static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -26154,25 +25552,26 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BindBufferMemoryInfo; }; + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; struct Offset2D { - - - VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, - int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT : x( x_ ) , y( y_ ) {} - Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; - Offset2D& operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT + Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -26188,61 +25587,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkOffset2D const&() const VULKAN_HPP_NOEXCEPT + operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkOffset2D &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Offset2D const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset2D const & ) const = default; #else - bool operator==( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ); + return ( x == rhs.x ) && ( y == rhs.y ); } - bool operator!=( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: int32_t x = {}; int32_t y = {}; - }; static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct Rect2D { - - +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT : offset( offset_ ) , extent( extent_ ) {} - Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; - Rect2D& operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT + Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -26258,76 +25652,84 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkRect2D const&() const VULKAN_HPP_NOEXCEPT + operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRect2D &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Rect2D const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Rect2D const & ) const = default; #else - bool operator==( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( offset == rhs.offset ) - && ( extent == rhs.extent ); + return ( offset == rhs.offset ) && ( extent == rhs.extent ); } - bool operator!=( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::Offset2D offset = {}; VULKAN_HPP_NAMESPACE::Extent2D extent = {}; - }; static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BindImageMemoryDeviceGroupInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo; - VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, - const uint32_t* pDeviceIndices_ = {}, - uint32_t splitInstanceBindRegionCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( + uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {}, + uint32_t splitInstanceBindRegionCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT : deviceIndexCount( deviceIndexCount_ ) , pDeviceIndices( pDeviceIndices_ ) , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) {} - BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemoryDeviceGroupInfo ) - offsetof( BindImageMemoryDeviceGroupInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BindImageMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) + {} - BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + splitInstanceBindRegions_ = {} ) + : deviceIndexCount( static_cast( deviceIndices_.size() ) ) + , pDeviceIndices( deviceIndices_.data() ) + , splitInstanceBindRegionCount( static_cast( splitInstanceBindRegions_.size() ) ) + , pSplitInstanceBindRegions( splitInstanceBindRegions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & + operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindImageMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -26339,106 +25741,218 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } - BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo & setDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindImageMemoryDeviceGroupInfo & + setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT { splitInstanceBindRegionCount = splitInstanceBindRegionCount_; return *this; } - BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT { pSplitInstanceBindRegions = pSplitInstanceBindRegions_; return *this; } - - operator VkBindImageMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + splitInstanceBindRegionCount = static_cast( splitInstanceBindRegions_.size() ); + pSplitInstanceBindRegions = splitInstanceBindRegions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindImageMemoryDeviceGroupInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default; #else - bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceIndexCount == rhs.deviceIndexCount ) - && ( pDeviceIndices == rhs.pDeviceIndices ) - && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) - && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ) && + ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && + ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); } - bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; - const void* pNext = {}; - uint32_t deviceIndexCount = {}; - const uint32_t* pDeviceIndices = {}; - uint32_t splitInstanceBindRegionCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + uint32_t splitInstanceBindRegionCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {}; }; - static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = BindImageMemoryDeviceGroupInfo; }; + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + + class Image + { + public: + using CType = VkImage; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + + public: + VULKAN_HPP_CONSTEXPR Image() = default; + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT : m_image( image ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Image & operator=( VkImage image ) VULKAN_HPP_NOEXCEPT + { + m_image = image; + return *this; + } +#endif + + Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_image = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Image const & ) const = default; +#else + bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image == rhs.m_image; + } + + bool operator!=( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image != rhs.m_image; + } + + bool operator<( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image < rhs.m_image; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_image != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_image == VK_NULL_HANDLE; + } + + private: + VkImage m_image = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; struct BindImageMemoryInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo; - VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT : image( image_ ) , memory( memory_ ) , memoryOffset( memoryOffset_ ) {} - BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemoryInfo ) - offsetof( BindImageMemoryInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BindImageMemoryInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & + operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindImageMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -26462,47 +25976,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBindImageMemoryInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindImageMemoryInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryInfo const & ) const = default; #else - bool operator==( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ); } - bool operator!=( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; }; - static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -26510,36 +26017,38 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BindImageMemoryInfo; }; + using BindImageMemoryInfoKHR = BindImageMemoryInfo; struct BindImageMemorySwapchainInfoKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {} ) VULKAN_HPP_NOEXCEPT : swapchain( swapchain_ ) , imageIndex( imageIndex_ ) {} - BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemorySwapchainInfoKHR ) - offsetof( BindImageMemorySwapchainInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BindImageMemorySwapchainInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & + operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindImageMemorySwapchainInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -26557,46 +26066,41 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBindImageMemorySwapchainInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindImageMemorySwapchainInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default; #else - bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchain == rhs.swapchain ) - && ( imageIndex == rhs.imageIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && + ( imageIndex == rhs.imageIndex ); } - bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; - uint32_t imageIndex = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint32_t imageIndex = {}; }; - static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -26606,79 +26110,76 @@ namespace VULKAN_HPP_NAMESPACE struct BindImagePlaneMemoryInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo; - VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT : planeAspect( planeAspect_ ) {} - BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BindImagePlaneMemoryInfo ) - offsetof( BindImagePlaneMemoryInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BindImagePlaneMemoryInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & + operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindImagePlaneMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + BindImagePlaneMemoryInfo & + setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return *this; } - - operator VkBindImagePlaneMemoryInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindImagePlaneMemoryInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default; #else - bool operator==( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( planeAspect == rhs.planeAspect ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); } - bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; - }; - static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -26686,31 +26187,39 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BindImagePlaneMemoryInfo; }; + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; struct BindIndexBufferIndirectCommandNV { - - - VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, - uint32_t size_ = {}, - VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT : bufferAddress( bufferAddress_ ) , size( size_ ) , indexType( indexType_ ) {} - BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - BindIndexBufferIndirectCommandNV& operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindIndexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & + operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + BindIndexBufferIndirectCommandNV & + setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; @@ -26728,61 +26237,61 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBindIndexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindIndexBufferIndirectCommandNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default; #else - bool operator==( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( bufferAddress == rhs.bufferAddress ) - && ( size == rhs.size ) - && ( indexType == rhs.indexType ); + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); } - bool operator!=( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; - uint32_t size = {}; - VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; - + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; }; - static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BindShaderGroupIndirectCommandNV { - - +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT : groupIndex( groupIndex_ ) {} - BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - BindShaderGroupIndirectCommandNV& operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindShaderGroupIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & + operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -26792,49 +26301,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBindShaderGroupIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindShaderGroupIndirectCommandNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default; #else - bool operator==( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( groupIndex == rhs.groupIndex ); } - bool operator!=( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: uint32_t groupIndex = {}; - }; - static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SparseMemoryBind { - - - VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : resourceOffset( resourceOffset_ ) , size( size_ ) @@ -26843,14 +26348,18 @@ namespace VULKAN_HPP_NAMESPACE , flags( flags_ ) {} - SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseMemoryBind( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -26884,69 +26393,76 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkSparseMemoryBind const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseMemoryBind const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseMemoryBind const & ) const = default; #else - bool operator==( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( resourceOffset == rhs.resourceOffset ) - && ( size == rhs.size ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ) - && ( flags == rhs.flags ); + return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); } - bool operator!=( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; - VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; - + VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; }; - static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" ); + static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SparseBufferMemoryBindInfo { - - - VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - uint32_t bindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) , bindCount( bindCount_ ) , pBinds( pBinds_ ) {} - SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseBufferMemoryBindInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseBufferMemoryBindInfo( + VULKAN_HPP_NAMESPACE::Buffer buffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : buffer( buffer_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & + operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -26962,71 +26478,91 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT + SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } - - operator VkSparseBufferMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseBufferMemoryBindInfo & setBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseBufferMemoryBindInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default; #else - bool operator==( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( buffer == rhs.buffer ) - && ( bindCount == rhs.bindCount ) - && ( pBinds == rhs.pBinds ); + return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); } - bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - uint32_t bindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {}; - + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; }; - static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct SparseImageOpaqueMemoryBindInfo { - - - VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - uint32_t bindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT : image( image_ ) , bindCount( bindCount_ ) , pBinds( pBinds_ ) {} - SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageOpaqueMemoryBindInfo( + VULKAN_HPP_NAMESPACE::Image image_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & + operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -27042,71 +26578,81 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT + SparseImageOpaqueMemoryBindInfo & + setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } - - operator VkSparseImageOpaqueMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageOpaqueMemoryBindInfo & setBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageOpaqueMemoryBindInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default; #else - bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( image == rhs.image ) - && ( bindCount == rhs.bindCount ) - && ( pBinds == rhs.pBinds ); + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); } - bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::Image image = {}; - uint32_t bindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {}; - + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; }; - static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ImageSubresource { - - +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t mipLevel_ = {}, - uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t mipLevel_ = {}, + uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask( aspectMask_ ) , mipLevel( mipLevel_ ) , arrayLayer( arrayLayer_ ) {} - ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageSubresource& operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresource( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -27128,72 +26674,60 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkImageSubresource const&() const VULKAN_HPP_NOEXCEPT + operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageSubresource const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresource const & ) const = default; #else - bool operator==( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( aspectMask == rhs.aspectMask ) - && ( mipLevel == rhs.mipLevel ) - && ( arrayLayer == rhs.arrayLayer ); + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer ); } - bool operator!=( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t mipLevel = {}; - uint32_t arrayLayer = {}; - + uint32_t mipLevel = {}; + uint32_t arrayLayer = {}; }; - static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct Offset3D { - - - VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, - int32_t y_ = {}, - int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT : x( x_ ) , y( y_ ) , z( z_ ) {} - explicit Offset3D( Offset2D const& offset2D, - int32_t z_ = {} ) - : x( offset2D.x ) - , y( offset2D.y ) - , z( z_ ) - {} + VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; - Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast( &rhs ) ) {} - Offset3D& operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT + explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -27215,72 +26749,62 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkOffset3D const&() const VULKAN_HPP_NOEXCEPT + operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkOffset3D &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Offset3D const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset3D const & ) const = default; #else - bool operator==( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( z == rhs.z ); + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); } - bool operator!=( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: int32_t x = {}; int32_t y = {}; int32_t z = {}; - }; static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct Extent3D { - - - VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT : width( width_ ) , height( height_ ) , depth( depth_ ) {} - explicit Extent3D( Extent2D const& extent2D, - uint32_t depth_ = {} ) - : width( extent2D.width ) - , height( extent2D.height ) - , depth( depth_ ) + VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast( &rhs ) ) {} + + explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR_14 Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; - Extent3D& operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT + Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -27302,55 +26826,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkExtent3D const&() const VULKAN_HPP_NOEXCEPT + operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkExtent3D &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Extent3D const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent3D const & ) const = default; #else - bool operator==( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( width == rhs.width ) - && ( height == rhs.height ) - && ( depth == rhs.depth ); + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); } - bool operator!=( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t width = {}; + uint32_t width = {}; uint32_t height = {}; - uint32_t depth = {}; - + uint32_t depth = {}; }; static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SparseImageMemoryBind { - - - VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, - VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : subresource( subresource_ ) , offset( offset_ ) , extent( extent_ ) @@ -27359,18 +26876,24 @@ namespace VULKAN_HPP_NAMESPACE , flags( flags_ ) {} - SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryBind( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & + operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBind & + setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT { subresource = subresource_; return *this; @@ -27406,71 +26929,77 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkSparseImageMemoryBind const&() const VULKAN_HPP_NOEXCEPT + operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageMemoryBind const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBind const & ) const = default; #else - bool operator==( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( subresource == rhs.subresource ) - && ( offset == rhs.offset ) - && ( extent == rhs.extent ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ) - && ( flags == rhs.flags ); + return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) && + ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); } - bool operator!=( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D offset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; - VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; - + VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D offset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; }; - static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" ); + static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SparseImageMemoryBindInfo { - - - VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - uint32_t bindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT : image( image_ ) , bindCount( bindCount_ ) , pBinds( pBinds_ ) {} - SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryBindInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageMemoryBindInfo( + VULKAN_HPP_NAMESPACE::Image image_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & + operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -27486,66 +27015,75 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryBindInfo & + setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } - - operator VkSparseImageMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageMemoryBindInfo & setBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageMemoryBindInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBindInfo const & ) const = default; #else - bool operator==( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( image == rhs.image ) - && ( bindCount == rhs.bindCount ) - && ( pBinds == rhs.pBinds ); + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); } - bool operator!=( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::Image image = {}; - uint32_t bindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds = {}; - + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds = {}; }; - static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct BindSparseInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo; - VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, - uint32_t bufferBindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {}, - uint32_t imageOpaqueBindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {}, - uint32_t imageBindCount_ = {}, - const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t bufferBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {}, + uint32_t imageOpaqueBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {}, + uint32_t imageBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT : waitSemaphoreCount( waitSemaphoreCount_ ) , pWaitSemaphores( pWaitSemaphores_ ) , bufferBindCount( bufferBindCount_ ) @@ -27558,24 +27096,46 @@ namespace VULKAN_HPP_NAMESPACE , pSignalSemaphores( pSignalSemaphores_ ) {} - BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BindSparseInfo ) - offsetof( BindSparseInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BindSparseInfo( *reinterpret_cast( &rhs ) ) + {} - BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageOpaqueBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphores_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , bufferBindCount( static_cast( bufferBinds_.size() ) ) + , pBufferBinds( bufferBinds_.data() ) + , imageOpaqueBindCount( static_cast( imageOpaqueBinds_.size() ) ) + , pImageOpaqueBinds( imageOpaqueBinds_.data() ) + , imageBindCount( static_cast( imageBinds_.size() ) ) + , pImageBinds( imageBinds_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -27587,113 +27147,161 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT { pWaitSemaphores = pWaitSemaphores_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setWaitSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT { bufferBindCount = bufferBindCount_; return *this; } - BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & + setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT { pBufferBinds = pBufferBinds_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setBufferBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = static_cast( bufferBinds_.size() ); + pBufferBinds = bufferBinds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT { imageOpaqueBindCount = imageOpaqueBindCount_; return *this; } - BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & setPImageOpaqueBinds( + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT { pImageOpaqueBinds = pImageOpaqueBinds_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setImageOpaqueBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = static_cast( imageOpaqueBinds_.size() ); + pImageOpaqueBinds = imageOpaqueBinds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT { imageBindCount = imageBindCount_; return *this; } - BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & + setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT { pImageBinds = pImageBinds_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setImageBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageBinds_ ) VULKAN_HPP_NOEXCEPT + { + imageBindCount = static_cast( imageBinds_.size() ); + pImageBinds = imageBinds_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = signalSemaphoreCount_; return *this; } - BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + BindSparseInfo & + setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT { pSignalSemaphores = pSignalSemaphores_; return *this; } - - operator VkBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setSignalSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindSparseInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindSparseInfo const & ) const = default; #else - bool operator==( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( bufferBindCount == rhs.bufferBindCount ) - && ( pBufferBinds == rhs.pBufferBinds ) - && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) - && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) - && ( imageBindCount == rhs.imageBindCount ) - && ( pImageBinds == rhs.pImageBinds ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphores == rhs.pSignalSemaphores ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) && + ( pBufferBinds == rhs.pBufferBinds ) && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) && + ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) && ( imageBindCount == rhs.imageBindCount ) && + ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphores == rhs.pSignalSemaphores ); } - bool operator!=( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; - uint32_t bufferBindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds = {}; - uint32_t imageOpaqueBindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds = {}; - uint32_t imageBindCount = {}; - const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds = {}; - uint32_t signalSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t bufferBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds = {}; + uint32_t imageOpaqueBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds = {}; + uint32_t imageBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; }; static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -27706,28 +27314,34 @@ namespace VULKAN_HPP_NAMESPACE struct BindVertexBufferIndirectCommandNV { - - +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, - uint32_t size_ = {}, + uint32_t size_ = {}, uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT : bufferAddress( bufferAddress_ ) , size( size_ ) , stride( stride_ ) {} - BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - BindVertexBufferIndirectCommandNV& operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindVertexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & + operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + BindVertexBufferIndirectCommandNV & + setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; @@ -27745,65 +27359,424 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBindVertexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BindVertexBufferIndirectCommandNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default; #else - bool operator==( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( bufferAddress == rhs.bufferAddress ) - && ( size == rhs.size ) - && ( stride == rhs.stride ); + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); } - bool operator!=( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; - uint32_t size = {}; - uint32_t stride = {}; - + uint32_t size = {}; + uint32_t stride = {}; + }; + static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct ImageSubresourceLayers + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t mipLevel_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceLayers( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & + operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceLayers const & ) const = default; +#else + bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && + ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageBlit2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ImageBlit2KHR( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffsets( srcOffsets_ ) + , dstSubresource( dstSubresource_ ) + , dstOffsets( dstOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit2KHR( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageBlit2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR & operator=( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit2KHR & operator=( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageBlit2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageBlit2KHR & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageBlit2KHR & + setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + ImageBlit2KHR & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageBlit2KHR & + setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } + + operator VkImageBlit2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit2KHR const & ) const = default; +#else + bool operator==( ImageBlit2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && + ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffsets == rhs.dstOffsets ); + } + + bool operator!=( ImageBlit2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + }; + static_assert( sizeof( ImageBlit2KHR ) == sizeof( VkImageBlit2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageBlit2KHR; + }; + + struct BlitImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageBlit2KHR * pRegions_ = {}, + VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + , filter( filter_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageInfo2KHR( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BlitImageInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BlitImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + , filter( filter_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR & + operator=( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageInfo2KHR & operator=( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BlitImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BlitImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + BlitImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + BlitImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + BlitImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + BlitImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + BlitImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BlitImageInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BlitImageInfo2KHR & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT + { + filter = filter_; + return *this; + } + + operator VkBlitImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBlitImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BlitImageInfo2KHR const & ) const = default; +#else + bool operator==( BlitImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && + ( pRegions == rhs.pRegions ) && ( filter == rhs.filter ); + } + + bool operator!=( BlitImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageBlit2KHR * pRegions = {}; + VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + }; + static_assert( sizeof( BlitImageInfo2KHR ) == sizeof( VkBlitImageInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BlitImageInfo2KHR; }; - static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct BufferCopy { - - +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : srcOffset( srcOffset_ ) , dstOffset( dstOffset_ ) , size( size_ ) {} - BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; - BufferCopy& operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCopy( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -27825,56 +27798,145 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBufferCopy const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferCopy const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy const & ) const = default; #else - bool operator==( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( srcOffset == rhs.srcOffset ) - && ( dstOffset == rhs.dstOffset ) - && ( size == rhs.size ); + return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); } - bool operator!=( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct BufferCopy2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy2KHR( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcOffset( srcOffset_ ) + , dstOffset( dstOffset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCopy2KHR( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy2KHR( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCopy2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2KHR & operator=( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy2KHR & operator=( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferCopy2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + BufferCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + BufferCopy2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkBufferCopy2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy2KHR const & ) const = default; +#else + bool operator==( BufferCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) && + ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); + } + + bool operator!=( BufferCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( BufferCopy2KHR ) == sizeof( VkBufferCopy2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferCopy2KHR; + }; + struct BufferCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo; - VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , size( size_ ) , usage( usage_ ) @@ -27883,24 +27945,37 @@ namespace VULKAN_HPP_NAMESPACE , pQueueFamilyIndices( pQueueFamilyIndices_ ) {} - BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BufferCreateInfo ) - offsetof( BufferCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BufferCreateInfo( *reinterpret_cast( &rhs ) ) + {} - BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::DeviceSize size_, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) + : flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -27936,59 +28011,61 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BufferCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } - - operator VkBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BufferCreateInfo & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCreateInfo const & ) const = default; #else - bool operator==( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( size == rhs.size ) - && ( usage == rhs.usage ) - && ( sharingMode == rhs.sharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && + ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); } - bool operator!=( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t* pQueueFamilyIndices = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; }; - static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -27999,80 +28076,78 @@ namespace VULKAN_HPP_NAMESPACE struct BufferDeviceAddressCreateInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT; - VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT : deviceAddress( deviceAddress_ ) {} - BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BufferDeviceAddressCreateInfoEXT ) - offsetof( BufferDeviceAddressCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & + operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferDeviceAddressCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + BufferDeviceAddressCreateInfoEXT & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } - - operator VkBufferDeviceAddressCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferDeviceAddressCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default; #else - bool operator==( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceAddress == rhs.deviceAddress ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ); } - bool operator!=( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; - }; - static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -28082,31 +28157,31 @@ namespace VULKAN_HPP_NAMESPACE struct BufferDeviceAddressInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) {} - BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BufferDeviceAddressInfo ) - offsetof( BufferDeviceAddressInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BufferDeviceAddressInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BufferDeviceAddressInfo& operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & + operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferDeviceAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28118,43 +28193,37 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBufferDeviceAddressInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferDeviceAddressInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressInfo const & ) const = default; #else - bool operator==( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); } - bool operator!=( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; - static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -28162,106 +28231,17 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BufferDeviceAddressInfo; }; - - struct ImageSubresourceLayers - { - - - VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t mipLevel_ = {}, - uint32_t baseArrayLayer_ = {}, - uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , mipLevel( mipLevel_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) - {} - - ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT - { - aspectMask = aspectMask_; - return *this; - } - - ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT - { - mipLevel = mipLevel_; - return *this; - } - - ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT - { - baseArrayLayer = baseArrayLayer_; - return *this; - } - - ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT - { - layerCount = layerCount_; - return *this; - } - - - operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageSubresourceLayers const& ) const = default; -#else - bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( aspectMask == rhs.aspectMask ) - && ( mipLevel == rhs.mipLevel ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); - } - - bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t mipLevel = {}; - uint32_t baseArrayLayer = {}; - uint32_t layerCount = {}; - - }; - static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; + using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; struct BufferImageCopy { - - - VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, - uint32_t bufferRowLength_ = {}, - uint32_t bufferImageHeight_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT : bufferOffset( bufferOffset_ ) , bufferRowLength( bufferRowLength_ ) @@ -28271,14 +28251,18 @@ namespace VULKAN_HPP_NAMESPACE , imageExtent( imageExtent_ ) {} - BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; - BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferImageCopy( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -28300,7 +28284,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + BufferImageCopy & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; @@ -28318,63 +28303,182 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBufferImageCopy const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferImageCopy const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy const & ) const = default; #else - bool operator==( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( bufferOffset == rhs.bufferOffset ) - && ( bufferRowLength == rhs.bufferRowLength ) - && ( bufferImageHeight == rhs.bufferImageHeight ) - && ( imageSubresource == rhs.imageSubresource ) - && ( imageOffset == rhs.imageOffset ) - && ( imageExtent == rhs.imageExtent ); + return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && + ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && + ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); } - bool operator!=( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; - uint32_t bufferRowLength = {}; - uint32_t bufferImageHeight = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; - + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; }; static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct BufferImageCopy2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferOffset( bufferOffset_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy2KHR( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferImageCopy2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2KHR & + operator=( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy2KHR & operator=( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferImageCopy2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferImageCopy2KHR & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + BufferImageCopy2KHR & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + BufferImageCopy2KHR & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + BufferImageCopy2KHR & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + BufferImageCopy2KHR & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + BufferImageCopy2KHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + operator VkBufferImageCopy2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy2KHR const & ) const = default; +#else + bool operator==( BufferImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) && + ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && + ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( BufferImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + static_assert( sizeof( BufferImageCopy2KHR ) == sizeof( VkBufferImageCopy2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferImageCopy2KHR; + }; + struct BufferMemoryBarrier { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier; - VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - uint32_t srcQueueFamilyIndex_ = {}, - uint32_t dstQueueFamilyIndex_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : srcAccessMask( srcAccessMask_ ) , dstAccessMask( dstAccessMask_ ) , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) @@ -28384,24 +28488,23 @@ namespace VULKAN_HPP_NAMESPACE , size( size_ ) {} - BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BufferMemoryBarrier ) - offsetof( BufferMemoryBarrier, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BufferMemoryBarrier( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & + operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28449,55 +28552,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBufferMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferMemoryBarrier const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier const & ) const = default; #else - bool operator==( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) - && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) - && ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && + ( offset == rhs.offset ) && ( size == rhs.size ); } - bool operator!=( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - uint32_t srcQueueFamilyIndex = {}; - uint32_t dstQueueFamilyIndex = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; - static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" ); + static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -28506,33 +28600,191 @@ namespace VULKAN_HPP_NAMESPACE using Type = BufferMemoryBarrier; }; + struct BufferMemoryBarrier2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2KHR( BufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier2KHR( VkBufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryBarrier2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2KHR & + operator=( BufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier2KHR & operator=( VkBufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + BufferMemoryBarrier2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferMemoryBarrier2KHR & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + BufferMemoryBarrier2KHR & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + BufferMemoryBarrier2KHR & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + BufferMemoryBarrier2KHR & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + BufferMemoryBarrier2KHR & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier2KHR & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier2KHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + BufferMemoryBarrier2KHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + BufferMemoryBarrier2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkBufferMemoryBarrier2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier2KHR const & ) const = default; +#else + bool operator==( BufferMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && + ( offset == rhs.offset ) && ( size == rhs.size ); + } + + bool operator!=( BufferMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( BufferMemoryBarrier2KHR ) == sizeof( VkBufferMemoryBarrier2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = BufferMemoryBarrier2KHR; + }; + struct BufferMemoryRequirementsInfo2 { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT : buffer( buffer_ ) {} - BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BufferMemoryRequirementsInfo2 ) - offsetof( BufferMemoryRequirementsInfo2, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BufferMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & + operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28544,78 +28796,77 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBufferMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferMemoryRequirementsInfo2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default; #else - bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); } - bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; - static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = BufferMemoryRequirementsInfo2; }; + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; struct BufferOpaqueCaptureAddressCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBufferOpaqueCaptureAddressCreateInfo; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT : opaqueCaptureAddress( opaqueCaptureAddress_ ) {} - BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BufferOpaqueCaptureAddressCreateInfo ) - offsetof( BufferOpaqueCaptureAddressCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BufferOpaqueCaptureAddressCreateInfo& operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & + operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferOpaqueCaptureAddressCreateInfo & + operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferOpaqueCaptureAddressCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28627,61 +28878,59 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBufferOpaqueCaptureAddressCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default; #else - bool operator==( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); } - bool operator!=( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; - const void* pNext = {}; - uint64_t opaqueCaptureAddress = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; }; - static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = BufferOpaqueCaptureAddressCreateInfo; }; + using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; struct BufferViewCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo; - VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , buffer( buffer_ ) , format( format_ ) @@ -28689,24 +28938,23 @@ namespace VULKAN_HPP_NAMESPACE , range( range_ ) {} - BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( BufferViewCreateInfo ) - offsetof( BufferViewCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : BufferViewCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & + operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - BufferViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28742,51 +28990,42 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkBufferViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( BufferViewCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferViewCreateInfo const & ) const = default; #else - bool operator==( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( buffer == rhs.buffer ) - && ( format == rhs.format ) - && ( offset == rhs.offset ) - && ( range == rhs.range ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && + ( format == rhs.format ) && ( offset == rhs.offset ) && ( range == rhs.range ); } - bool operator!=( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize range = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; }; - static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -28797,31 +29036,34 @@ namespace VULKAN_HPP_NAMESPACE struct CalibratedTimestampInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT; - VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = + VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT : timeDomain( timeDomain_ ) {} - CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( CalibratedTimestampInfoEXT ) - offsetof( CalibratedTimestampInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : CalibratedTimestampInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & + operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CalibratedTimestampInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CalibratedTimestampInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -28833,44 +29075,39 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkCalibratedTimestampInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CalibratedTimestampInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CalibratedTimestampInfoEXT const & ) const = default; #else - bool operator==( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( timeDomain == rhs.timeDomain ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain ); } - bool operator!=( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice; - }; - static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -28878,73 +29115,136 @@ namespace VULKAN_HPP_NAMESPACE using Type = CalibratedTimestampInfoEXT; }; - struct CheckpointDataNV + struct CheckpointData2NV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV; - VULKAN_HPP_CONSTEXPR CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, - void* pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage_ = {}, + void * pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT : stage( stage_ ) , pCheckpointMarker( pCheckpointMarker_ ) {} - CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT + : CheckpointData2NV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CheckpointData2NV & + operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CheckpointDataNV ) - offsetof( CheckpointDataNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - CheckpointDataNV& operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - - operator VkCheckpointDataNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CheckpointDataNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointData2NV const & ) const = default; #else - bool operator==( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stage == rhs.stage ) - && ( pCheckpointMarker == rhs.pCheckpointMarker ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && + ( pCheckpointMarker == rhs.pCheckpointMarker ); } - bool operator!=( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage = {}; + void * pCheckpointMarker = {}; + }; + static_assert( sizeof( CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = CheckpointData2NV; + }; + + struct CheckpointDataNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CheckpointDataNV( + VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, + void * pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT + : stage( stage_ ) + , pCheckpointMarker( pCheckpointMarker_ ) + {} + + VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CheckpointDataNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointDataNV const & ) const = default; +#else + bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && + ( pCheckpointMarker == rhs.pCheckpointMarker ); + } + + bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; - void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; + void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; - void* pCheckpointMarker = {}; - + void * pCheckpointMarker = {}; }; - static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -28955,80 +29255,78 @@ namespace VULKAN_HPP_NAMESPACE union ClearColorValue { - ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const& rhs ) VULKAN_HPP_NOEXCEPT + ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); } - ClearColorValue( const std::array& float32_ = {} ) - : float32( float32_ ) - {} + ClearColorValue( const std::array & float32_ = {} ) : float32( float32_ ) {} - ClearColorValue( const std::array& int32_ ) - : int32( int32_ ) - {} + ClearColorValue( const std::array & int32_ ) : int32( int32_ ) {} - ClearColorValue( const std::array& uint32_ ) - : uint32( uint32_ ) - {} + ClearColorValue( const std::array & uint32_ ) : uint32( uint32_ ) {} - ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT + ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT { float32 = float32_; return *this; } - ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT + ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT { int32 = int32_; return *this; } - ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT + ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT { uint32 = uint32_; return *this; } - VULKAN_HPP_NAMESPACE::ClearColorValue & operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::ClearColorValue & + operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); return *this; } - operator VkClearColorValue const&() const + operator VkClearColorValue const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkClearColorValue &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } - VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; VULKAN_HPP_NAMESPACE::ArrayWrapper1D uint32; }; struct ClearDepthStencilValue { - - - VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, - uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT : depth( depth_ ) , stencil( stencil_ ) {} - ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + : ClearDepthStencilValue( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & + operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -29044,57 +29342,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkClearDepthStencilValue const&() const VULKAN_HPP_NOEXCEPT + operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ClearDepthStencilValue const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearDepthStencilValue const & ) const = default; #else - bool operator==( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( depth == rhs.depth ) - && ( stencil == rhs.stencil ); + return ( depth == rhs.depth ) && ( stencil == rhs.stencil ); } - bool operator!=( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - float depth = {}; + float depth = {}; uint32_t stencil = {}; - }; - static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" ); + static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); union ClearValue { - ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const& rhs ) VULKAN_HPP_NOEXCEPT + ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); } - ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) - : color( color_ ) - {} + ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {} - ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) - : depthStencil( depthStencil_ ) - {} + ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {} ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT { @@ -29102,7 +29391,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT + ClearValue & + setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT { depthStencil = depthStencil_; return *this; @@ -29110,49 +29400,52 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ClearValue & operator=( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); return *this; } - operator VkClearValue const&() const + operator VkClearValue const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkClearValue &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - VULKAN_HPP_NAMESPACE::ClearColorValue color; + VULKAN_HPP_NAMESPACE::ClearColorValue color; VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil; #else - VkClearColorValue color; + VkClearColorValue color; VkClearDepthStencilValue depthStencil; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct ClearAttachment { - - - ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t colorAttachment_ = {}, - VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t colorAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask( aspectMask_ ) , colorAttachment( colorAttachment_ ) , clearValue( clearValue_ ) {} - ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ClearAttachment& operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + : ClearAttachment( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -29174,49 +29467,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkClearAttachment const&() const VULKAN_HPP_NOEXCEPT + operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - - - public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t colorAttachment = {}; - VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; - + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t colorAttachment = {}; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; }; static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ClearRect { - - - VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, - uint32_t baseArrayLayer_ = {}, - uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT : rect( rect_ ) , baseArrayLayer( baseArrayLayer_ ) , layerCount( layerCount_ ) {} - ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ClearRect& operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -29238,65 +29528,61 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkClearRect const&() const VULKAN_HPP_NOEXCEPT + operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkClearRect &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ClearRect const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearRect const & ) const = default; #else - bool operator==( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( rect == rhs.rect ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); + return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); } - bool operator!=( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::Rect2D rect = {}; - uint32_t baseArrayLayer = {}; - uint32_t layerCount = {}; - + VULKAN_HPP_NAMESPACE::Rect2D rect = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; }; static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct CoarseSampleLocationNV { - - - VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, - uint32_t pixelY_ = {}, - uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT : pixelX( pixelX_ ) , pixelY( pixelY_ ) , sample( sample_ ) {} - CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CoarseSampleLocationNV& operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleLocationNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & + operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -29318,71 +29604,86 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkCoarseSampleLocationNV const&() const VULKAN_HPP_NOEXCEPT + operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CoarseSampleLocationNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleLocationNV const & ) const = default; #else - bool operator==( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( pixelX == rhs.pixelX ) - && ( pixelY == rhs.pixelY ) - && ( sample == rhs.sample ); + return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample ); } - bool operator!=( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: uint32_t pixelX = {}; uint32_t pixelY = {}; uint32_t sample = {}; - }; - static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct CoarseSampleOrderCustomNV { - - - VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, - uint32_t sampleCount_ = {}, - uint32_t sampleLocationCount_ = {}, - const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, + uint32_t sampleCount_ = {}, + uint32_t sampleLocationCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT : shadingRate( shadingRate_ ) , sampleCount( sampleCount_ ) , sampleLocationCount( sampleLocationCount_ ) , pSampleLocations( pSampleLocations_ ) {} - CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CoarseSampleOrderCustomNV& operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleOrderCustomNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV( + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, + uint32_t sampleCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & + operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + CoarseSampleOrderCustomNV & + setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT { shadingRate = shadingRate_; return *this; @@ -29400,84 +29701,191 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + CoarseSampleOrderCustomNV & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return *this; } - - operator VkCoarseSampleOrderCustomNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + sampleLocationCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CoarseSampleOrderCustomNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default; #else - bool operator==( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( shadingRate == rhs.shadingRate ) - && ( sampleCount == rhs.sampleCount ) - && ( sampleLocationCount == rhs.sampleLocationCount ) - && ( pSampleLocations == rhs.pSampleLocations ); + return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && + ( sampleLocationCount == rhs.sampleLocationCount ) && ( pSampleLocations == rhs.pSampleLocations ); } - bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; + uint32_t sampleCount = {}; + uint32_t sampleLocationCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {}; + }; + static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + class CommandPool + { + public: + using CType = VkCommandPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; public: - VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; - uint32_t sampleCount = {}; - uint32_t sampleLocationCount = {}; - const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations = {}; + VULKAN_HPP_CONSTEXPR CommandPool() = default; + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + : m_commandPool( commandPool ) + {} +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CommandPool & operator=( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = commandPool; + return *this; + } +#endif + + CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPool const & ) const = default; +#else + bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == rhs.m_commandPool; + } + + bool operator!=( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != rhs.m_commandPool; + } + + bool operator<( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool < rhs.m_commandPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == VK_NULL_HANDLE; + } + + private: + VkCommandPool m_commandPool = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct CommandBufferAllocateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo; - VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, - VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, - uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( + VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, + VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, + uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT : commandPool( commandPool_ ) , level( level_ ) , commandBufferCount( commandBufferCount_ ) {} - CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferAllocateInfo ) - offsetof( CommandBufferAllocateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : CommandBufferAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & + operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CommandBufferAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -29501,48 +29909,42 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkCommandBufferAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBufferAllocateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferAllocateInfo const & ) const = default; #else - bool operator==( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( commandPool == rhs.commandPool ) - && ( level == rhs.level ) - && ( commandBufferCount == rhs.commandBufferCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && + ( level == rhs.level ) && ( commandBufferCount == rhs.commandBufferCount ); } - bool operator!=( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; - VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; - uint32_t commandBufferCount = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; + uint32_t commandBufferCount = {}; }; - static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -29550,17 +29952,212 @@ namespace VULKAN_HPP_NAMESPACE using Type = CommandBufferAllocateInfo; }; + class RenderPass + { + public: + using CType = VkRenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + + public: + VULKAN_HPP_CONSTEXPR RenderPass() = default; + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + RenderPass & operator=( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = renderPass; + return *this; + } +#endif + + RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPass const & ) const = default; +#else + bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == rhs.m_renderPass; + } + + bool operator!=( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != rhs.m_renderPass; + } + + bool operator<( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass < rhs.m_renderPass; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == VK_NULL_HANDLE; + } + + private: + VkRenderPass m_renderPass = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Framebuffer + { + public: + using CType = VkFramebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + + public: + VULKAN_HPP_CONSTEXPR Framebuffer() = default; + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + : m_framebuffer( framebuffer ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Framebuffer & operator=( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = framebuffer; + return *this; + } +#endif + + Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Framebuffer const & ) const = default; +#else + bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == rhs.m_framebuffer; + } + + bool operator!=( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != rhs.m_framebuffer; + } + + bool operator<( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer < rhs.m_framebuffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == VK_NULL_HANDLE; + } + + private: + VkFramebuffer m_framebuffer = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + struct CommandBufferInheritanceInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo; - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t subpass_ = {}, - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, - VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT : renderPass( renderPass_ ) , subpass( subpass_ ) , framebuffer( framebuffer_ ) @@ -29569,24 +30166,24 @@ namespace VULKAN_HPP_NAMESPACE , pipelineStatistics( pipelineStatistics_ ) {} - CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferInheritanceInfo ) - offsetof( CommandBufferInheritanceInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : CommandBufferInheritanceInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & + operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CommandBufferInheritanceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -29610,72 +30207,68 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & + setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT { occlusionQueryEnable = occlusionQueryEnable_; return *this; } - CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & + setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT { queryFlags = queryFlags_; return *this; } - CommandBufferInheritanceInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT { pipelineStatistics = pipelineStatistics_; return *this; } - - operator VkCommandBufferInheritanceInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBufferInheritanceInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceInfo const & ) const = default; #else - bool operator==( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( renderPass == rhs.renderPass ) - && ( subpass == rhs.subpass ) - && ( framebuffer == rhs.framebuffer ) - && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) - && ( queryFlags == rhs.queryFlags ) - && ( pipelineStatistics == rhs.pipelineStatistics ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && + ( subpass == rhs.subpass ) && ( framebuffer == rhs.framebuffer ) && + ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) && + ( pipelineStatistics == rhs.pipelineStatistics ); } - bool operator!=( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t subpass = {}; - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; - VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; }; - static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -29685,33 +30278,34 @@ namespace VULKAN_HPP_NAMESPACE struct CommandBufferBeginInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo; - VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, - const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pInheritanceInfo( pInheritanceInfo_ ) {} - CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferBeginInfo ) - offsetof( CommandBufferBeginInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : CommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & + operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -29723,51 +30317,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - CommandBufferBeginInfo & setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT + CommandBufferBeginInfo & setPInheritanceInfo( + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT { pInheritanceInfo = pInheritanceInfo_; return *this; } - - operator VkCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBufferBeginInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferBeginInfo const & ) const = default; #else - bool operator==( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pInheritanceInfo == rhs.pInheritanceInfo ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pInheritanceInfo == rhs.pInheritanceInfo ); } - bool operator!=( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; - const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {}; }; - static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -29778,80 +30367,85 @@ namespace VULKAN_HPP_NAMESPACE struct CommandBufferInheritanceConditionalRenderingInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT : conditionalRenderingEnable( conditionalRenderingEnable_ ) {} - CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( + CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceConditionalRenderingInfoEXT( + VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceConditionalRenderingInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) - offsetof( CommandBufferInheritanceConditionalRenderingInfoEXT, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceConditionalRenderingInfoEXT & + setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT { conditionalRenderingEnable = conditionalRenderingEnable_; return *this; } - - operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default; #else - bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); } - bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; }; - static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == + sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -29861,90 +30455,97 @@ namespace VULKAN_HPP_NAMESPACE struct CommandBufferInheritanceRenderPassTransformInfoQCOM { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {} ) VULKAN_HPP_NOEXCEPT : transform( transform_ ) , renderArea( renderArea_ ) {} - CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( + CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderPassTransformInfoQCOM( + VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceRenderPassTransformInfoQCOM( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) - offsetof( CommandBufferInheritanceRenderPassTransformInfoQCOM, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - CommandBufferInheritanceRenderPassTransformInfoQCOM& operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } - CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM & + setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT { renderArea = renderArea_; return *this; } - - operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default; #else - bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( transform == rhs.transform ) - && ( renderArea == rhs.renderArea ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && + ( renderArea == rhs.renderArea ); } - bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; - }; - static_assert( sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) == sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) == + sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -29952,35 +30553,8627 @@ namespace VULKAN_HPP_NAMESPACE using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; }; + struct Viewport + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Viewport( float x_ = {}, + float y_ = {}, + float width_ = {}, + float height_ = {}, + float minDepth_ = {}, + float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , width( width_ ) + , height( height_ ) + , minDepth( minDepth_ ) + , maxDepth( maxDepth_ ) + {} + + VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT + { + minDepth = minDepth_; + return *this; + } + + Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxDepth = maxDepth_; + return *this; + } + + operator VkViewport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Viewport const & ) const = default; +#else + bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( minDepth == rhs.minDepth ) && ( maxDepth == rhs.maxDepth ); + } + + bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + float width = {}; + float height = {}; + float minDepth = {}; + float maxDepth = {}; + }; + static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CommandBufferInheritanceViewportScissorInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, + uint32_t viewportDepthCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {} ) VULKAN_HPP_NOEXCEPT + : viewportScissor2D( viewportScissor2D_ ) + , viewportDepthCount( viewportDepthCount_ ) + , pViewportDepths( pViewportDepths_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( + CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceViewportScissorInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceViewportScissorInfoNV & + operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceViewportScissorInfoNV & + setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + viewportScissor2D = viewportScissor2D_; + return *this; + } + + CommandBufferInheritanceViewportScissorInfoNV & + setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportDepthCount = viewportDepthCount_; + return *this; + } + + CommandBufferInheritanceViewportScissorInfoNV & + setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT + { + pViewportDepths = pViewportDepths_; + return *this; + } + + operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default; +#else + bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) && + ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths ); + } + + bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {}; + uint32_t viewportDepthCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {}; + }; + static_assert( sizeof( CommandBufferInheritanceViewportScissorInfoNV ) == + sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceViewportScissorInfoNV; + }; + + struct ConditionalRenderingBeginInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ConditionalRenderingBeginInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & + operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + ConditionalRenderingBeginInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default; +#else + bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && + ( flags == rhs.flags ); + } + + bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; + }; + static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ConditionalRenderingBeginInfoEXT; + }; + + struct DebugUtilsLabelEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char * pLabelName_ = {}, + std::array const & color_ = {} ) VULKAN_HPP_NOEXCEPT + : pLabelName( pLabelName_ ) + , color( color_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsLabelEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & + operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT + { + pLabelName = pLabelName_; + return *this; + } + + DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } + + operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsLabelEXT const & ) const = default; +#else + bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pLabelName == rhs.pLabelName ) && + ( color == rhs.color ); + } + + bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; + const void * pNext = {}; + const char * pLabelName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; + static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugUtilsLabelEXT; + }; + + class QueryPool + { + public: + using CType = VkQueryPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + + public: + VULKAN_HPP_CONSTEXPR QueryPool() = default; + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT : m_queryPool( queryPool ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + QueryPool & operator=( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = queryPool; + return *this; + } +#endif + + QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPool const & ) const = default; +#else + bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == rhs.m_queryPool; + } + + bool operator!=( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != rhs.m_queryPool; + } + + bool operator<( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool < rhs.m_queryPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == VK_NULL_HANDLE; + } + + private: + VkQueryPool m_queryPool = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct RenderPassBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t clearValueCount_ = {}, + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT + : renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( clearValueCount_ ) + , pClearValues( pClearValues_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo( + VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) + : renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( static_cast( clearValues_.size() ) ) + , pClearValues( clearValues_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & + operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT + { + clearValueCount = clearValueCount_; + return *this; + } + + RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT + { + pClearValues = pClearValues_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo & setClearValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) + VULKAN_HPP_NOEXCEPT + { + clearValueCount = static_cast( clearValues_.size() ); + pClearValues = clearValues_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassBeginInfo const & ) const = default; +#else + bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && + ( framebuffer == rhs.framebuffer ) && ( renderArea == rhs.renderArea ) && + ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues ); + } + + bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t clearValueCount = {}; + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {}; + }; + static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassBeginInfo; + }; + + struct SubpassBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = + VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT + : contents( contents_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassBeginInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT + { + contents = contents_; + return *this; + } + + operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassBeginInfo const & ) const = default; +#else + bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents ); + } + + bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; + }; + static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassBeginInfo; + }; + using SubpassBeginInfoKHR = SubpassBeginInfo; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class VideoSessionKHR + { + public: + using CType = VkVideoSessionKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR VideoSessionKHR() = default; + VULKAN_HPP_CONSTEXPR VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT + : m_videoSessionKHR( videoSessionKHR ) + {} + +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + VideoSessionKHR & operator=( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = videoSessionKHR; + return *this; + } +# endif + + VideoSessionKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = {}; + return *this; + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionKHR const & ) const = default; +# else + bool operator==( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR == rhs.m_videoSessionKHR; + } + + bool operator!=( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR != rhs.m_videoSessionKHR; + } + + bool operator<( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR < rhs.m_videoSessionKHR; + } +# endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionKHR m_videoSessionKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class VideoSessionParametersKHR + { + public: + using CType = VkVideoSessionParametersKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR() = default; + VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + : m_videoSessionParametersKHR( videoSessionParametersKHR ) + {} + +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + VideoSessionParametersKHR & operator=( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = videoSessionParametersKHR; + return *this; + } +# endif + + VideoSessionParametersKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = {}; + return *this; + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersKHR const & ) const = default; +# else + bool operator==( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR == rhs.m_videoSessionParametersKHR; + } + + bool operator!=( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR != rhs.m_videoSessionParametersKHR; + } + + bool operator<( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR < rhs.m_videoSessionParametersKHR; + } +# endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionParametersKHR m_videoSessionParametersKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == sizeof( VkVideoSessionParametersKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class ImageView + { + public: + using CType = VkImageView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + + public: + VULKAN_HPP_CONSTEXPR ImageView() = default; + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT : m_imageView( imageView ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ImageView & operator=( VkImageView imageView ) VULKAN_HPP_NOEXCEPT + { + m_imageView = imageView; + return *this; + } +#endif + + ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_imageView = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageView const & ) const = default; +#else + bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView == rhs.m_imageView; + } + + bool operator!=( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView != rhs.m_imageView; + } + + bool operator<( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView < rhs.m_imageView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_imageView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_imageView == VK_NULL_HANDLE; + } + + private: + VkImageView m_imageView = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoPictureResourceKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoPictureResourceKHR( VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + uint32_t baseArrayLayer_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {} ) VULKAN_HPP_NOEXCEPT + : codedOffset( codedOffset_ ) + , codedExtent( codedExtent_ ) + , baseArrayLayer( baseArrayLayer_ ) + , imageViewBinding( imageViewBinding_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoPictureResourceKHR( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoPictureResourceKHR( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoPictureResourceKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & + operator=( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoPictureResourceKHR & operator=( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoPictureResourceKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoPictureResourceKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT + { + codedOffset = codedOffset_; + return *this; + } + + VideoPictureResourceKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VideoPictureResourceKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VideoPictureResourceKHR & + setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT + { + imageViewBinding = imageViewBinding_; + return *this; + } + + operator VkVideoPictureResourceKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoPictureResourceKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoPictureResourceKHR const & ) const = default; +# else + bool operator==( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) && + ( codedExtent == rhs.codedExtent ) && ( baseArrayLayer == rhs.baseArrayLayer ) && + ( imageViewBinding == rhs.imageViewBinding ); + } + + bool operator!=( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + uint32_t baseArrayLayer = {}; + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {}; + }; + static_assert( sizeof( VideoPictureResourceKHR ) == sizeof( VkVideoPictureResourceKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoPictureResourceKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoReferenceSlotKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR( + int8_t slotIndex_ = {}, + const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ = {} ) VULKAN_HPP_NOEXCEPT + : slotIndex( slotIndex_ ) + , pPictureResource( pPictureResource_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoReferenceSlotKHR( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoReferenceSlotKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & + operator=( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoReferenceSlotKHR & operator=( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoReferenceSlotKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoReferenceSlotKHR & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + { + slotIndex = slotIndex_; + return *this; + } + + VideoReferenceSlotKHR & + setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + pPictureResource = pPictureResource_; + return *this; + } + + operator VkVideoReferenceSlotKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoReferenceSlotKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoReferenceSlotKHR const & ) const = default; +# else + bool operator==( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && + ( pPictureResource == rhs.pPictureResource ); + } + + bool operator!=( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotKHR; + const void * pNext = {}; + int8_t slotIndex = {}; + const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource = {}; + }; + static_assert( sizeof( VideoReferenceSlotKHR ) == sizeof( VkVideoReferenceSlotKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoReferenceSlotKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoBeginCodingInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , codecQualityPreset( codecQualityPreset_ ) + , videoSession( videoSession_ ) + , videoSessionParameters( videoSessionParameters_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoBeginCodingInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR( + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) + : flags( flags_ ) + , codecQualityPreset( codecQualityPreset_ ) + , videoSession( videoSession_ ) + , videoSessionParameters( videoSessionParameters_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoBeginCodingInfoKHR & setCodecQualityPreset( + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ ) VULKAN_HPP_NOEXCEPT + { + codecQualityPreset = codecQualityPreset_; + return *this; + } + + VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT + { + videoSession = videoSession_; + return *this; + } + + VideoBeginCodingInfoKHR & setVideoSessionParameters( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParameters = videoSessionParameters_; + return *this; + } + + VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VideoBeginCodingInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default; +# else + bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( codecQualityPreset == rhs.codecQualityPreset ) && ( videoSession == rhs.videoSession ) && + ( videoSessionParameters == rhs.videoSessionParameters ) && + ( referenceSlotCount == rhs.referenceSlotCount ) && ( pReferenceSlots == rhs.pReferenceSlots ); + } + + bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {}; + }; + static_assert( sizeof( VideoBeginCodingInfoKHR ) == sizeof( VkVideoBeginCodingInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoBeginCodingInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class PipelineLayout + { + public: + using CType = VkPipelineLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + + public: + VULKAN_HPP_CONSTEXPR PipelineLayout() = default; + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT + : m_pipelineLayout( pipelineLayout ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PipelineLayout & operator=( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = pipelineLayout; + return *this; + } +#endif + + PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayout const & ) const = default; +#else + bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == rhs.m_pipelineLayout; + } + + bool operator!=( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != rhs.m_pipelineLayout; + } + + bool operator<( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout < rhs.m_pipelineLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == VK_NULL_HANDLE; + } + + private: + VkPipelineLayout m_pipelineLayout = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSet + { + public: + using CType = VkDescriptorSet; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSet() = default; + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT + : m_descriptorSet( descriptorSet ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorSet & operator=( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = descriptorSet; + return *this; + } +#endif + + DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSet const & ) const = default; +#else + bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == rhs.m_descriptorSet; + } + + bool operator!=( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != rhs.m_descriptorSet; + } + + bool operator<( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet < rhs.m_descriptorSet; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == VK_NULL_HANDLE; + } + + private: + VkDescriptorSet m_descriptorSet = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Pipeline + { + public: + using CType = VkPipeline; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + + public: + VULKAN_HPP_CONSTEXPR Pipeline() = default; + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT : m_pipeline( pipeline ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Pipeline & operator=( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = pipeline; + return *this; + } +#endif + + Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Pipeline const & ) const = default; +#else + bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == rhs.m_pipeline; + } + + bool operator!=( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != rhs.m_pipeline; + } + + bool operator<( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline < rhs.m_pipeline; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == VK_NULL_HANDLE; + } + + private: + VkPipeline m_pipeline = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct ImageBlit + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffsets( srcOffsets_ ) + , dstSubresource( dstSubresource_ ) + , dstOffsets( dstOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageBlit & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + ImageBlit & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } + + operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit const & ) const = default; +#else + bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets ); + } + + bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + }; + static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageSubresourceRange + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t baseMipLevel_ = {}, + uint32_t levelCount_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , baseMipLevel( baseMipLevel_ ) + , levelCount( levelCount_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceRange( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & + operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT + { + baseMipLevel = baseMipLevel_; + return *this; + } + + ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT + { + levelCount = levelCount_; + return *this; + } + + ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceRange const & ) const = default; +#else + bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && + ( levelCount == rhs.levelCount ) && ( baseArrayLayer == rhs.baseArrayLayer ) && + ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t baseMipLevel = {}; + uint32_t levelCount = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoCodingControlInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCodingControlInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & + operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoCodingControlInfoKHR const & ) const = default; +# else + bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {}; + }; + static_assert( sizeof( VideoCodingControlInfoKHR ) == sizeof( VkVideoCodingControlInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoCodingControlInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct CopyAccelerationStructureInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR + CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & + operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default; +#else + bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && + ( mode == rhs.mode ); + } + + bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyAccelerationStructureInfoKHR; + }; + + struct CopyAccelerationStructureToMemoryInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + CopyAccelerationStructureToMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) + VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureToMemoryInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyAccelerationStructureToMemoryInfoKHR & + operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureToMemoryInfoKHR & + operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & + setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyAccelerationStructureToMemoryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) == + sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyAccelerationStructureToMemoryInfoKHR; + }; + + struct CopyBufferInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CopyBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferCopy2KHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcBuffer( srcBuffer_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferInfo2KHR( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyBufferInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2KHR( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcBuffer( srcBuffer_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2KHR & + operator=( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferInfo2KHR & operator=( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyBufferInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyBufferInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + CopyBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + CopyBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkCopyBufferInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyBufferInfo2KHR const & ) const = default; +#else + bool operator==( CopyBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && + ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferCopy2KHR * pRegions = {}; + }; + static_assert( sizeof( CopyBufferInfo2KHR ) == sizeof( VkCopyBufferInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyBufferInfo2KHR; + }; + + struct CopyBufferToImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcBuffer( srcBuffer_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR + CopyBufferToImageInfo2KHR( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferToImageInfo2KHR( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyBufferToImageInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcBuffer( srcBuffer_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2KHR & + operator=( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferToImageInfo2KHR & operator=( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyBufferToImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyBufferToImageInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + CopyBufferToImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + CopyBufferToImageInfo2KHR & + setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + CopyBufferToImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyBufferToImageInfo2KHR & + setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkCopyBufferToImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferToImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyBufferToImageInfo2KHR const & ) const = default; +#else + bool operator==( CopyBufferToImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyBufferToImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions = {}; + }; + static_assert( sizeof( CopyBufferToImageInfo2KHR ) == sizeof( VkCopyBufferToImageInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyBufferToImageInfo2KHR; + }; + + struct ImageCopy + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageCopy & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageCopy & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy const & ) const = default; +#else + bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); + } + + bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageCopy2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy2KHR( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCopy2KHR( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy2KHR( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCopy2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2KHR & operator=( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy2KHR & operator=( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageCopy2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageCopy2KHR & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageCopy2KHR & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageCopy2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkImageCopy2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy2KHR const & ) const = default; +#else + bool operator==( ImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && + ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); + } + + bool operator!=( ImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + static_assert( sizeof( ImageCopy2KHR ) == sizeof( VkImageCopy2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageCopy2KHR; + }; + + struct CopyImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2KHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageInfo2KHR( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2KHR & + operator=( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageInfo2KHR & operator=( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + CopyImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + CopyImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + CopyImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + CopyImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkCopyImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageInfo2KHR const & ) const = default; +#else + bool operator==( CopyImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && + ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2KHR * pRegions = {}; + }; + static_assert( sizeof( CopyImageInfo2KHR ) == sizeof( VkCopyImageInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyImageInfo2KHR; + }; + + struct CopyImageToBufferInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR + CopyImageToBufferInfo2KHR( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToBufferInfo2KHR( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToBufferInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2KHR & + operator=( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToBufferInfo2KHR & operator=( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyImageToBufferInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyImageToBufferInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + CopyImageToBufferInfo2KHR & + setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + CopyImageToBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + CopyImageToBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + CopyImageToBufferInfo2KHR & + setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkCopyImageToBufferInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToBufferInfo2KHR const & ) const = default; +#else + bool operator==( CopyImageToBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstBuffer == rhs.dstBuffer ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + } + + bool operator!=( CopyImageToBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions = {}; + }; + static_assert( sizeof( CopyImageToBufferInfo2KHR ) == sizeof( VkCopyImageToBufferInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyImageToBufferInfo2KHR; + }; + + struct CopyMemoryToAccelerationStructureInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + CopyMemoryToAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) + VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : CopyMemoryToAccelerationStructureInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryToAccelerationStructureInfoKHR & + operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToAccelerationStructureInfoKHR & + operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & + setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + CopyMemoryToAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == + sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CopyMemoryToAccelerationStructureInfoKHR; + }; + + class CuFunctionNVX + { + public: + using CType = VkCuFunctionNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + public: + VULKAN_HPP_CONSTEXPR CuFunctionNVX() = default; + VULKAN_HPP_CONSTEXPR CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT + : m_cuFunctionNVX( cuFunctionNVX ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CuFunctionNVX & operator=( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = cuFunctionNVX; + return *this; + } +#endif + + CuFunctionNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuFunctionNVX const & ) const = default; +#else + bool operator==( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX == rhs.m_cuFunctionNVX; + } + + bool operator!=( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX != rhs.m_cuFunctionNVX; + } + + bool operator<( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX < rhs.m_cuFunctionNVX; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuFunctionNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX == VK_NULL_HANDLE; + } + + private: + VkCuFunctionNVX m_cuFunctionNVX = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CuLaunchInfoNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, + uint32_t gridDimX_ = {}, + uint32_t gridDimY_ = {}, + uint32_t gridDimZ_ = {}, + uint32_t blockDimX_ = {}, + uint32_t blockDimY_ = {}, + uint32_t blockDimZ_ = {}, + uint32_t sharedMemBytes_ = {}, + size_t paramCount_ = {}, + const void * const * pParams_ = {}, + size_t extraCount_ = {}, + const void * const * pExtras_ = {} ) VULKAN_HPP_NOEXCEPT + : function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( paramCount_ ) + , pParams( pParams_ ) + , extraCount( extraCount_ ) + , pExtras( pExtras_ ) + {} + + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuLaunchInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT + { + function = function_; + return *this; + } + + CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT + { + gridDimX = gridDimX_; + return *this; + } + + CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT + { + gridDimY = gridDimY_; + return *this; + } + + CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT + { + gridDimZ = gridDimZ_; + return *this; + } + + CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT + { + blockDimX = blockDimX_; + return *this; + } + + CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT + { + blockDimY = blockDimY_; + return *this; + } + + CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT + { + blockDimZ = blockDimZ_; + return *this; + } + + CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT + { + sharedMemBytes = sharedMemBytes_; + return *this; + } + + CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = paramCount_; + return *this; + } + + CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT + { + pParams = pParams_; + return *this; + } + + CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extraCount_; + return *this; + } + + CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT + { + pExtras = pExtras_; + return *this; + } + + operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuLaunchInfoNVX const & ) const = default; +#else + bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && + ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && ( gridDimZ == rhs.gridDimZ ) && + ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && + ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && + ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && ( pExtras == rhs.pExtras ); + } + + bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {}; + uint32_t gridDimX = {}; + uint32_t gridDimY = {}; + uint32_t gridDimZ = {}; + uint32_t blockDimX = {}; + uint32_t blockDimY = {}; + uint32_t blockDimZ = {}; + uint32_t sharedMemBytes = {}; + size_t paramCount = {}; + const void * const * pParams = {}; + size_t extraCount = {}; + const void * const * pExtras = {}; + }; + static_assert( sizeof( CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CuLaunchInfoNVX; + }; + + struct DebugMarkerMarkerInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, + std::array const & color_ = {} ) VULKAN_HPP_NOEXCEPT + : pMarkerName( pMarkerName_ ) + , color( color_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerMarkerInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & + operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT + { + pMarkerName = pMarkerName_; + return *this; + } + + DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } + + operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerMarkerInfoEXT const & ) const = default; +#else + bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pMarkerName == rhs.pMarkerName ) && + ( color == rhs.color ); + } + + bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; + const void * pNext = {}; + const char * pMarkerName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; + static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DebugMarkerMarkerInfoEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , codedOffset( codedOffset_ ) + , codedExtent( codedExtent_ ) + , srcBuffer( srcBuffer_ ) + , srcBufferOffset( srcBufferOffset_ ) + , srcBufferRange( srcBufferRange_ ) + , dstPictureResource( dstPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::Offset2D codedOffset_, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) + : flags( flags_ ) + , codedOffset( codedOffset_ ) + , codedExtent( codedExtent_ ) + , srcBuffer( srcBuffer_ ) + , srcBufferOffset( srcBufferOffset_ ) + , srcBufferRange( srcBufferRange_ ) + , dstPictureResource( dstPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoDecodeInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT + { + codedOffset = codedOffset_; + return *this; + } + + VideoDecodeInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcBufferOffset = srcBufferOffset_; + return *this; + } + + VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT + { + srcBufferRange = srcBufferRange_; + return *this; + } + + VideoDecodeInfoKHR & setDstPictureResource( + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + dstPictureResource = dstPictureResource_; + return *this; + } + + VideoDecodeInfoKHR & setPSetupReferenceSlot( + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + { + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VideoDecodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeInfoKHR const & ) const = default; +# else + bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) && + ( srcBuffer == rhs.srcBuffer ) && ( srcBufferOffset == rhs.srcBufferOffset ) && + ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) && + ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ); + } + + bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {}; + }; + static_assert( sizeof( VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, + uint32_t qualityLevel_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , qualityLevel( qualityLevel_ ) + , codedExtent( codedExtent_ ) + , dstBitstreamBuffer( dstBitstreamBuffer_ ) + , dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ) + , dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ) + , srcPictureResource( srcPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, + uint32_t qualityLevel_, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_, + VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) + : flags( flags_ ) + , qualityLevel( qualityLevel_ ) + , codedExtent( codedExtent_ ) + , dstBitstreamBuffer( dstBitstreamBuffer_ ) + , dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ) + , dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ) + , srcPictureResource( srcPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoEncodeInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevel = qualityLevel_; + return *this; + } + + VideoEncodeInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VideoEncodeInfoKHR & setDstBitstreamBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBitstreamBuffer = dstBitstreamBuffer_; + return *this; + } + + VideoEncodeInfoKHR & + setDstBitstreamBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstBitstreamBufferOffset = dstBitstreamBufferOffset_; + return *this; + } + + VideoEncodeInfoKHR & + setDstBitstreamBufferMaxRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ ) VULKAN_HPP_NOEXCEPT + { + dstBitstreamBufferMaxRange = dstBitstreamBufferMaxRange_; + return *this; + } + + VideoEncodeInfoKHR & setSrcPictureResource( + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + srcPictureResource = srcPictureResource_; + return *this; + } + + VideoEncodeInfoKHR & setPSetupReferenceSlot( + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + { + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VideoEncodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeInfoKHR const & ) const = default; +# else + bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( qualityLevel == rhs.qualityLevel ) && ( codedExtent == rhs.codedExtent ) && + ( dstBitstreamBuffer == rhs.dstBitstreamBuffer ) && + ( dstBitstreamBufferOffset == rhs.dstBitstreamBufferOffset ) && + ( dstBitstreamBufferMaxRange == rhs.dstBitstreamBufferMaxRange ) && + ( srcPictureResource == rhs.srcPictureResource ) && ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && + ( referenceSlotCount == rhs.referenceSlotCount ) && ( pReferenceSlots == rhs.pReferenceSlots ); + } + + bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {}; + uint32_t qualityLevel = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {}; + }; + static_assert( sizeof( VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct SubpassEndInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassEndInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassEndInfo const & ) const = default; +#else + bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); + } + + bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; + const void * pNext = {}; + }; + static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassEndInfo; + }; + using SubpassEndInfoKHR = SubpassEndInfo; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEndCodingInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEndCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEndCodingInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & + operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEndCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEndCodingInfoKHR const & ) const = default; +# else + bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEndCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {}; + }; + static_assert( sizeof( VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEndCodingInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class IndirectCommandsLayoutNV + { + public: + using CType = VkIndirectCommandsLayoutNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() = default; + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + IndirectCommandsLayoutNV & operator=( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; + return *this; + } +#endif + + IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV; + } + + bool operator!=( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV; + } + + bool operator<( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct IndirectCommandsStreamNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsStreamNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & + operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsStreamNV const & ) const = default; +#else + bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ); + } + + bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GeneratedCommandsInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t streamCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( streamCount_ ) + , pStreams( pStreams_ ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + streams_, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( static_cast( streams_.size() ) ) + , pStreams( streams_.data() ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeneratedCommandsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + GeneratedCommandsInfoNV & setIndirectCommandsLayout( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + GeneratedCommandsInfoNV & + setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT + { + pStreams = pStreams_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV & setStreams( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + streams_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streams_.size() ); + pStreams = streams_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCount = sequencesCount_; + return *this; + } + + GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT + { + preprocessBuffer = preprocessBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & + setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT + { + preprocessOffset = preprocessOffset_; + return *this; + } + + GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT + { + preprocessSize = preprocessSize_; + return *this; + } + + GeneratedCommandsInfoNV & + setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountBuffer = sequencesCountBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & + setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountOffset = sequencesCountOffset_; + return *this; + } + + GeneratedCommandsInfoNV & + setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexBuffer = sequencesIndexBuffer_; + return *this; + } + + GeneratedCommandsInfoNV & + setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexOffset = sequencesIndexOffset_; + return *this; + } + + operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsInfoNV const & ) const = default; +#else + bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && + ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) && + ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && + ( preprocessOffset == rhs.preprocessOffset ) && ( preprocessSize == rhs.preprocessSize ) && + ( sequencesCountBuffer == rhs.sequencesCountBuffer ) && + ( sequencesCountOffset == rhs.sequencesCountOffset ) && + ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) && + ( sequencesIndexOffset == rhs.sequencesIndexOffset ); + } + + bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t streamCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {}; + uint32_t sequencesCount = {}; + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; + }; + static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GeneratedCommandsInfoNV; + }; + + struct MemoryBarrier + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryBarrier( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier const & ) const = default; +#else + bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ); + } + + bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + }; + static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryBarrier; + }; + + struct ImageMemoryBarrier + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryBarrier( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + ImageMemoryBarrier & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } + + operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier const & ) const = default; +#else + bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && + ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && + ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageMemoryBarrier; + }; + + struct MemoryBarrier2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryBarrier2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT + : srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryBarrier2KHR( MemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier2KHR( VkMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryBarrier2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2KHR & + operator=( MemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier2KHR & operator=( VkMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryBarrier2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryBarrier2KHR & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + MemoryBarrier2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + MemoryBarrier2KHR & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + MemoryBarrier2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + operator VkMemoryBarrier2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier2KHR const & ) const = default; +#else + bool operator==( MemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( dstAccessMask == rhs.dstAccessMask ); + } + + bool operator!=( MemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {}; + }; + static_assert( sizeof( MemoryBarrier2KHR ) == sizeof( VkMemoryBarrier2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryBarrier2KHR; + }; + + struct ImageMemoryBarrier2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2KHR( + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + : srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2KHR( ImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier2KHR( VkImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryBarrier2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2KHR & + operator=( ImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier2KHR & operator=( VkImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageMemoryBarrier2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageMemoryBarrier2KHR & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + ImageMemoryBarrier2KHR & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + ImageMemoryBarrier2KHR & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + ImageMemoryBarrier2KHR & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + ImageMemoryBarrier2KHR & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + ImageMemoryBarrier2KHR & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + ImageMemoryBarrier2KHR & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier2KHR & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier2KHR & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + ImageMemoryBarrier2KHR & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } + + operator VkImageMemoryBarrier2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier2KHR const & ) const = default; +#else + bool operator==( ImageMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && + ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && + ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + static_assert( sizeof( ImageMemoryBarrier2KHR ) == sizeof( VkImageMemoryBarrier2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageMemoryBarrier2KHR; + }; + + struct DependencyInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DependencyInfoKHR( + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + uint32_t memoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR * pMemoryBarriers_ = {}, + uint32_t bufferMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR * pBufferMemoryBarriers_ = {}, + uint32_t imageMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR * pImageMemoryBarriers_ = {} ) VULKAN_HPP_NOEXCEPT + : dependencyFlags( dependencyFlags_ ) + , memoryBarrierCount( memoryBarrierCount_ ) + , pMemoryBarriers( pMemoryBarriers_ ) + , bufferMemoryBarrierCount( bufferMemoryBarrierCount_ ) + , pBufferMemoryBarriers( pBufferMemoryBarriers_ ) + , imageMemoryBarrierCount( imageMemoryBarrierCount_ ) + , pImageMemoryBarriers( pImageMemoryBarriers_ ) + {} + + VULKAN_HPP_CONSTEXPR DependencyInfoKHR( DependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DependencyInfoKHR( VkDependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DependencyInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfoKHR( + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + memoryBarriers_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferMemoryBarriers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageMemoryBarriers_ = {} ) + : dependencyFlags( dependencyFlags_ ) + , memoryBarrierCount( static_cast( memoryBarriers_.size() ) ) + , pMemoryBarriers( memoryBarriers_.data() ) + , bufferMemoryBarrierCount( static_cast( bufferMemoryBarriers_.size() ) ) + , pBufferMemoryBarriers( bufferMemoryBarriers_.data() ) + , imageMemoryBarrierCount( static_cast( imageMemoryBarriers_.size() ) ) + , pImageMemoryBarriers( imageMemoryBarriers_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DependencyInfoKHR & + operator=( DependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DependencyInfoKHR & operator=( VkDependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DependencyInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DependencyInfoKHR & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + DependencyInfoKHR & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + memoryBarrierCount = memoryBarrierCount_; + return *this; + } + + DependencyInfoKHR & + setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pMemoryBarriers = pMemoryBarriers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfoKHR & setMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + memoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + memoryBarrierCount = static_cast( memoryBarriers_.size() ); + pMemoryBarriers = memoryBarriers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DependencyInfoKHR & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = bufferMemoryBarrierCount_; + return *this; + } + + DependencyInfoKHR & setPBufferMemoryBarriers( + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pBufferMemoryBarriers = pBufferMemoryBarriers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfoKHR & setBufferMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = static_cast( bufferMemoryBarriers_.size() ); + pBufferMemoryBarriers = bufferMemoryBarriers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DependencyInfoKHR & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + imageMemoryBarrierCount = imageMemoryBarrierCount_; + return *this; + } + + DependencyInfoKHR & setPImageMemoryBarriers( + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pImageMemoryBarriers = pImageMemoryBarriers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfoKHR & setImageMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + imageMemoryBarrierCount = static_cast( imageMemoryBarriers_.size() ); + pImageMemoryBarriers = imageMemoryBarriers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDependencyInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDependencyInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DependencyInfoKHR const & ) const = default; +#else + bool operator==( DependencyInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) && + ( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) && + ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && + ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) && + ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && + ( pImageMemoryBarriers == rhs.pImageMemoryBarriers ); + } + + bool operator!=( DependencyInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + uint32_t memoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR * pMemoryBarriers = {}; + uint32_t bufferMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR * pBufferMemoryBarriers = {}; + uint32_t imageMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR * pImageMemoryBarriers = {}; + }; + static_assert( sizeof( DependencyInfoKHR ) == sizeof( VkDependencyInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DependencyInfoKHR; + }; + + class Sampler + { + public: + using CType = VkSampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: + VULKAN_HPP_CONSTEXPR Sampler() = default; + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT : m_sampler( sampler ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Sampler & operator=( VkSampler sampler ) VULKAN_HPP_NOEXCEPT + { + m_sampler = sampler; + return *this; + } +#endif + + Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_sampler = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Sampler const & ) const = default; +#else + bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler == rhs.m_sampler; + } + + bool operator!=( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler != rhs.m_sampler; + } + + bool operator<( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler < rhs.m_sampler; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_sampler != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_sampler == VK_NULL_HANDLE; + } + + private: + VkSampler m_sampler = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DescriptorImageInfo + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = + VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : sampler( sampler_ ) + , imageView( imageView_ ) + , imageLayout( imageLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorImageInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & + operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } + + DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } + + operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorImageInfo const & ) const = default; +#else + bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); + } + + bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorBufferInfo + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + , range( range_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & + operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } + + operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorBufferInfo const & ) const = default; +#else + bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range ); + } + + bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + class BufferView + { + public: + using CType = VkBufferView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + + public: + VULKAN_HPP_CONSTEXPR BufferView() = default; + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + BufferView & operator=( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = bufferView; + return *this; + } +#endif + + BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferView const & ) const = default; +#else + bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == rhs.m_bufferView; + } + + bool operator!=( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != rhs.m_bufferView; + } + + bool operator<( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView < rhs.m_bufferView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == VK_NULL_HANDLE; + } + + private: + VkBufferView m_bufferView = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct WriteDescriptorSet + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT + : dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , pImageInfo( pImageInfo_ ) + , pBufferInfo( pBufferInfo_ ) + , pTexelBufferView( pTexelBufferView_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSet( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet( + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, + uint32_t dstBinding_, + uint32_t dstArrayElement_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferInfo_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + texelBufferView_ = {} ) + : dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( static_cast( !imageInfo_.empty() ? imageInfo_.size() + : !bufferInfo_.empty() ? bufferInfo_.size() + : texelBufferView_.size() ) ) + , descriptorType( descriptorType_ ) + , pImageInfo( imageInfo_.data() ) + , pBufferInfo( bufferInfo_.data() ) + , pTexelBufferView( texelBufferView_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1 ); +# else + if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & + operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + WriteDescriptorSet & + setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT + { + pImageInfo = pImageInfo_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & setImageInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( imageInfo_.size() ); + pImageInfo = imageInfo_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSet & + setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + pBufferInfo = pBufferInfo_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & setBufferInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( bufferInfo_.size() ); + pBufferInfo = bufferInfo_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSet & + setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + pTexelBufferView = pTexelBufferView_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & setTexelBufferView( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ ) + VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( texelBufferView_.size() ); + pTexelBufferView = texelBufferView_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSet const & ) const = default; +#else + bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && + ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && + ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && + ( pTexelBufferView == rhs.pTexelBufferView ); + } + + bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {}; + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {}; + }; + static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSet; + }; + + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + public: + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() = default; + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorUpdateTemplate & operator=( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = descriptorUpdateTemplate; + return *this; + } +#endif + + DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplate const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; + } + + bool operator!=( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; + } + + bool operator<( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + } + + private: + VkDescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; + + class Event + { + public: + using CType = VkEvent; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + + public: + VULKAN_HPP_CONSTEXPR Event() = default; + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT : m_event( event ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Event & operator=( VkEvent event ) VULKAN_HPP_NOEXCEPT + { + m_event = event; + return *this; + } +#endif + + Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_event = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Event const & ) const = default; +#else + bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event == rhs.m_event; + } + + bool operator!=( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event != rhs.m_event; + } + + bool operator<( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event < rhs.m_event; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_event != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_event == VK_NULL_HANDLE; + } + + private: + VkEvent m_event = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct ImageResolve + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageResolve( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageResolve & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageResolve & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve const & ) const = default; +#else + bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); + } + + bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageResolve2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve2KHR( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageResolve2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageResolve2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageResolve2KHR & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageResolve2KHR & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkImageResolve2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve2KHR const & ) const = default; +#else + bool operator==( ImageResolve2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && + ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); + } + + bool operator!=( ImageResolve2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageResolve2KHR; + }; + + struct ResolveImageInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageResolve2KHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ResolveImageInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2KHR( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2KHR & + operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ResolveImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2KHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkResolveImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ResolveImageInfo2KHR const & ) const = default; +#else + bool operator==( ResolveImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && + ( pRegions == rhs.pRegions ); + } + + bool operator!=( ResolveImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageResolve2KHR * pRegions = {}; + }; + static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ResolveImageInfo2KHR; + }; + + struct PerformanceMarkerInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT : marker( marker_ ) {} + + VULKAN_HPP_CONSTEXPR + PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & + operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } + + operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); + } + + bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; + const void * pNext = {}; + uint64_t marker = {}; + }; + static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceMarkerInfoINTEL; + }; + + struct PerformanceOverrideInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, + VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, + uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , enable( enable_ ) + , parameter( parameter_ ) + {} + + VULKAN_HPP_CONSTEXPR + PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceOverrideInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & + operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceOverrideInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT + { + enable = enable_; + return *this; + } + + PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT + { + parameter = parameter_; + return *this; + } + + operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && + ( parameter == rhs.parameter ); + } + + bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; + VULKAN_HPP_NAMESPACE::Bool32 enable = {}; + uint64_t parameter = {}; + }; + static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceOverrideInfoINTEL; + }; + + struct PerformanceStreamMarkerInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {} ) VULKAN_HPP_NOEXCEPT + : marker( marker_ ) + {} + + VULKAN_HPP_CONSTEXPR + PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & + operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } + + operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); + } + + bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; + const void * pNext = {}; + uint32_t marker = {}; + }; + static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceStreamMarkerInfoINTEL; + }; + + struct VertexInputBindingDescription2EXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVertexInputBindingDescription2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( + uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, + uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , stride( stride_ ) + , inputRate( inputRate_ ) + , divisor( divisor_ ) + {} + + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription2EXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & + operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VertexInputBindingDescription2EXT & + setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } + + VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } + + operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default; +#else + bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) && + ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) && ( divisor == rhs.divisor ); + } + + bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT; + void * pNext = {}; + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + uint32_t divisor = {}; + }; + static_assert( sizeof( VertexInputBindingDescription2EXT ) == sizeof( VkVertexInputBindingDescription2EXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VertexInputBindingDescription2EXT; + }; + + struct VertexInputAttributeDescription2EXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVertexInputAttributeDescription2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( + uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT + : location( location_ ) + , binding( binding_ ) + , format( format_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription2EXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & + operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription2EXT & + operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default; +#else + bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) && + ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset ); + } + + bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT; + void * pNext = {}; + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + }; + static_assert( sizeof( VertexInputAttributeDescription2EXT ) == sizeof( VkVertexInputAttributeDescription2EXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VertexInputAttributeDescription2EXT; + }; + + struct ShadingRatePaletteNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( + uint32_t shadingRatePaletteEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) + , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) + {} + + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ShadingRatePaletteNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePaletteEntries_ ) + : shadingRatePaletteEntryCount( static_cast( shadingRatePaletteEntries_.size() ) ) + , pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & + operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; + return *this; + } + + ShadingRatePaletteNV & setPShadingRatePaletteEntries( + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePaletteEntries = pShadingRatePaletteEntries_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV & setShadingRatePaletteEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = static_cast( shadingRatePaletteEntries_.size() ); + pShadingRatePaletteEntries = shadingRatePaletteEntries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShadingRatePaletteNV const & ) const = default; +#else + bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) && + ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); + } + + bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t shadingRatePaletteEntryCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {}; + }; + static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ViewportWScalingNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT + : xcoeff( xcoeff_ ) + , ycoeff( ycoeff_ ) + {} + + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ViewportWScalingNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & + operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT + { + xcoeff = xcoeff_; + return *this; + } + + ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT + { + ycoeff = ycoeff_; + return *this; + } + + operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportWScalingNV const & ) const = default; +#else + bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff ); + } + + bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float xcoeff = {}; + float ycoeff = {}; + }; + static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct StridedDeviceAddressRegionKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) + , stride( stride_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR + StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : StridedDeviceAddressRegionKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & + operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + StridedDeviceAddressRegionKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default; +#else + bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size ); + } + + bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + public: + VULKAN_HPP_CONSTEXPR CommandBuffer() = default; + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + : m_commandBuffer( commandBuffer ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CommandBuffer & operator=( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = commandBuffer; + return *this; + } +#endif + + CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBuffer const & ) const = default; +#else + bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == rhs.m_commandBuffer; + } + + bool operator!=( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != rhs.m_commandBuffer; + } + + bool operator<( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer < rhs.m_commandBuffer; + } +#endif + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD Result + begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + begin( const CommandBufferBeginInfo & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewport( uint32_t firstViewport, + ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissor( uint32_t firstScissor, + ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setLineWidth( float lineWidth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setBlendConstants( const float blendConstants[4], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBounds( float minDepthBounds, + float maxDepthBounds, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + ArrayProxy const & descriptorSets, + ArrayProxy const & dynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearColorValue & color, + ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearDepthStencilValue & depthStencil, + ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearAttachments( ArrayProxy const & attachments, + ArrayProxy const & rects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents( ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy const & values, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeCommands( ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + void setDeviceMask( uint32_t deviceMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_2 === + + template + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_debug_marker === + + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + template + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + template + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindTransformFeedbackBuffersEXT( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NVX_binary_import === + + template + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_draw_indirect_count === + + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_device_group === + + template + void setDeviceMaskKHR( uint32_t deviceMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_push_descriptor === + + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + ArrayProxy const & descriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_conditional_rendering === + + template + void beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_clip_space_w_scaling === + + template + void setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWScalingNV( uint32_t firstViewport, + ArrayProxy const & viewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_discard_rectangles === + + template + void + setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + ArrayProxy const & discardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_create_renderpass2 === + + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_debug_utils === + + template + void + beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_sample_locations === + + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_acceleration_structure === + + template + void buildAccelerationStructuresKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructuresKHR( + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void buildAccelerationStructuresIndirectKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructuresIndirectKHR( + ArrayProxy const & infos, + ArrayProxy const & indirectDeviceAddresses, + ArrayProxy const & indirectStrides, + ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_shading_rate_image === + + template + void + bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportShadingRatePaletteNV( + uint32_t firstViewport, + ArrayProxy const & shadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy const & customSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_ray_tracing === + + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeAccelerationStructuresPropertiesNV( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesNV( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_draw_indirect_count === + + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_buffer_marker === + + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_mesh_shader === + + template + void drawMeshTasksNV( uint32_t taskCount, + uint32_t firstTask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_scissor_exclusive === + + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + ArrayProxy const & exclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_device_diagnostic_checkpoints === + + template + void setCheckpointNV( const void * pCheckpointMarker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_INTEL_performance_query === + + template + VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_fragment_shading_rate === + + template + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setFragmentShadingRateKHR( const Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_line_rasterization === + + template + void setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state === + + template + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setViewportWithCountEXT( ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + setScissorWithCountEXT( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setScissorWithCountEXT( ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers2EXT( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void + setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands === + + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + template + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const DependencyInfoKHR & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents2KHR( ArrayProxy const & events, + ArrayProxy const & dependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_fragment_shading_rate_enums === + + template + void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_copy_commands2 === + + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_ray_tracing_pipeline === + + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const StridedDeviceAddressRegionKHR & missShaderBindingTable, + const StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const StridedDeviceAddressRegionKHR & missShaderBindingTable, + const StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_vertex_input_dynamic_state === + + template + void + setVertexInputEXT( uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setVertexInputEXT( + ArrayProxy const & vertexBindingDescriptions, + ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_extended_dynamic_state2 === + + template + void + setPatchControlPointsEXT( uint32_t patchControlPoints, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_color_write_enable === + + template + void + setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setColorWriteEnableEXT( ArrayProxy const & colorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == VK_NULL_HANDLE; + } + + private: + VkCommandBuffer m_commandBuffer = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CommandBufferSubmitInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfoKHR( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, + uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : commandBuffer( commandBuffer_ ) + , deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + CommandBufferSubmitInfoKHR( CommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferSubmitInfoKHR( VkCommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferSubmitInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfoKHR & + operator=( CommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferSubmitInfoKHR & operator=( VkCommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CommandBufferSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferSubmitInfoKHR & + setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT + { + commandBuffer = commandBuffer_; + return *this; + } + + CommandBufferSubmitInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + operator VkCommandBufferSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferSubmitInfoKHR const & ) const = default; +#else + bool operator==( CommandBufferSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && + ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( CommandBufferSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {}; + uint32_t deviceMask = {}; + }; + static_assert( sizeof( CommandBufferSubmitInfoKHR ) == sizeof( VkCommandBufferSubmitInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CommandBufferSubmitInfoKHR; + }; + struct CommandPoolCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , queueFamilyIndex( queueFamilyIndex_ ) {} - CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( CommandPoolCreateInfo ) - offsetof( CommandPoolCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : CommandPoolCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & + operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CommandPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -29998,45 +39191,39 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CommandPoolCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPoolCreateInfo const & ) const = default; #else - bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ); } - bool operator!=( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; - uint32_t queueFamilyIndex = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; }; - static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -30045,26 +39232,126 @@ namespace VULKAN_HPP_NAMESPACE using Type = CommandPoolCreateInfo; }; + class ShaderModule + { + public: + using CType = VkShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + + public: + VULKAN_HPP_CONSTEXPR ShaderModule() = default; + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + : m_shaderModule( shaderModule ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ShaderModule & operator=( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = shaderModule; + return *this; + } +#endif + + ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModule const & ) const = default; +#else + bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == rhs.m_shaderModule; + } + + bool operator!=( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != rhs.m_shaderModule; + } + + bool operator<( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule < rhs.m_shaderModule; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == VK_NULL_HANDLE; + } + + private: + VkShaderModule m_shaderModule = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + struct SpecializationMapEntry { - - - VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, - uint32_t offset_ = {}, - size_t size_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT : constantID( constantID_ ) , offset( offset_ ) , size( size_ ) {} - SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : SpecializationMapEntry( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & + operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -30086,67 +39373,77 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT + operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SpecializationMapEntry const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationMapEntry const & ) const = default; #else - bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( constantID == rhs.constantID ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); + return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size ); } - bool operator!=( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: uint32_t constantID = {}; - uint32_t offset = {}; - size_t size = {}; - + uint32_t offset = {}; + size_t size = {}; }; - static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" ); + static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SpecializationInfo { - - - VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, - size_t dataSize_ = {}, - const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, + size_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT : mapEntryCount( mapEntryCount_ ) , pMapEntries( pMapEntries_ ) , dataSize( dataSize_ ) , pData( pData_ ) {} - SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SpecializationInfo& operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SpecializationInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ = {} ) + : mapEntryCount( static_cast( mapEntries_.size() ) ) + , pMapEntries( mapEntries_.data() ) + , dataSize( data_.size() * sizeof( T ) ) + , pData( data_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & + operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -30156,75 +39453,94 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT + SpecializationInfo & + setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT { pMapEntries = pMapEntries_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SpecializationInfo & setMapEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + mapEntries_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = static_cast( mapEntries_.size() ); + pMapEntries = mapEntries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return *this; } - SpecializationInfo & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT + SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } - - operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + SpecializationInfo & + setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SpecializationInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationInfo const & ) const = default; #else - bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( mapEntryCount == rhs.mapEntryCount ) - && ( pMapEntries == rhs.pMapEntries ) - && ( dataSize == rhs.dataSize ) - && ( pData == rhs.pData ); + return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && + ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); } - bool operator!=( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t mapEntryCount = {}; - const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {}; - size_t dataSize = {}; - const void* pData = {}; - + uint32_t mapEntryCount = {}; + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {}; + size_t dataSize = {}; + const void * pData = {}; }; - static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineShaderStageCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo; - VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, - VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, - const char* pName_ = {}, - const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, + const char * pName_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , stage( stage_ ) , module( module_ ) @@ -30232,30 +39548,31 @@ namespace VULKAN_HPP_NAMESPACE , pSpecializationInfo( pSpecializationInfo_ ) {} - PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineShaderStageCreateInfo ) - offsetof( PipelineShaderStageCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineShaderStageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & + operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -30273,64 +39590,57 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineShaderStageCreateInfo & setPName( const char* pName_ ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT { pName = pName_; return *this; } - PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & setPSpecializationInfo( + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT { pSpecializationInfo = pSpecializationInfo_; return *this; } - - operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageCreateInfo const & ) const = default; #else - bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stage == rhs.stage ) - && ( module == rhs.module ) - && ( pName == rhs.pName ) - && ( pSpecializationInfo == rhs.pSpecializationInfo ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && + ( module == rhs.module ) && ( pName == rhs.pName ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); } - bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; - VULKAN_HPP_NAMESPACE::ShaderModule module = {}; - const char* pName = {}; - const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderModule module = {}; + const char * pName = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; }; - static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -30340,13 +39650,14 @@ namespace VULKAN_HPP_NAMESPACE struct ComputePipelineCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo; - VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , stage( stage_ ) @@ -30355,24 +39666,24 @@ namespace VULKAN_HPP_NAMESPACE , basePipelineIndex( basePipelineIndex_ ) {} - ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ComputePipelineCreateInfo ) - offsetof( ComputePipelineCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : ComputePipelineCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & + operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ComputePipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -30384,7 +39695,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT + ComputePipelineCreateInfo & + setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT { stage = stage_; return *this; @@ -30396,7 +39708,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + ComputePipelineCreateInfo & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; @@ -30408,52 +39721,45 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ComputePipelineCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComputePipelineCreateInfo const & ) const = default; #else - bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stage == rhs.stage ) - && ( layout == rhs.layout ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && + ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); } - bool operator!=( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; }; - static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -30461,131 +39767,32 @@ namespace VULKAN_HPP_NAMESPACE using Type = ComputePipelineCreateInfo; }; - struct ConditionalRenderingBeginInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; - - VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , offset( offset_ ) - , flags( flags_ ) - {} - - ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ConditionalRenderingBeginInfoEXT ) - offsetof( ConditionalRenderingBeginInfoEXT, pNext ) ); - return *this; - } - - ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - - operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default; -#else - bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( flags == rhs.flags ); - } - - bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; - - }; - static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ConditionalRenderingBeginInfoEXT; - }; - struct ConformanceVersion { - - - VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, - uint8_t minor_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, + uint8_t minor_ = {}, uint8_t subminor_ = {}, - uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT + uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT : major( major_ ) , minor( minor_ ) , subminor( subminor_ ) , patch( patch_ ) {} - ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ConformanceVersion& operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + : ConformanceVersion( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & + operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -30613,60 +39820,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT + operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ConformanceVersion const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConformanceVersion const & ) const = default; #else - bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( major == rhs.major ) - && ( minor == rhs.minor ) - && ( subminor == rhs.subminor ) - && ( patch == rhs.patch ); + return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch ); } - bool operator!=( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint8_t major = {}; - uint8_t minor = {}; + uint8_t major = {}; + uint8_t minor = {}; uint8_t subminor = {}; - uint8_t patch = {}; - + uint8_t patch = {}; }; - static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" ); + static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using ConformanceVersionKHR = ConformanceVersion; struct CooperativeMatrixPropertiesNV { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV; - VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {}, - uint32_t NSize_ = {}, - uint32_t KSize_ = {}, - VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( + uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT : MSize( MSize_ ) , NSize( NSize_ ) , KSize( KSize_ ) @@ -30677,24 +39880,24 @@ namespace VULKAN_HPP_NAMESPACE , scope( scope_ ) {} - CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( CooperativeMatrixPropertiesNV ) - offsetof( CooperativeMatrixPropertiesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : CooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CooperativeMatrixPropertiesNV& operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & + operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + CooperativeMatrixPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -30748,58 +39951,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default; #else - bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( MSize == rhs.MSize ) - && ( NSize == rhs.NSize ) - && ( KSize == rhs.KSize ) - && ( AType == rhs.AType ) - && ( BType == rhs.BType ) - && ( CType == rhs.CType ) - && ( DType == rhs.DType ) - && ( scope == rhs.scope ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && + ( KSize == rhs.KSize ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && + ( DType == rhs.DType ) && ( scope == rhs.scope ); } - bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; - void* pNext = {}; - uint32_t MSize = {}; - uint32_t NSize = {}; - uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; + void * pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; - + VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; }; - static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -30807,209 +40000,101 @@ namespace VULKAN_HPP_NAMESPACE using Type = CooperativeMatrixPropertiesNV; }; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct CopyAccelerationStructureInfoKHR + struct CopyCommandTransformInfoQCOM { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM; - VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT - : src( src_ ) - , dst( dst_ ) - , mode( mode_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT + : transform( transform_ ) {} - CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyCommandTransformInfoQCOM( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & + operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CopyAccelerationStructureInfoKHR ) - offsetof( CopyAccelerationStructureInfoKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - CopyAccelerationStructureInfoKHR& operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - CopyAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + CopyCommandTransformInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { - src = src_; + transform = transform_; return *this; } - CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - dst = dst_; - return *this; + return *reinterpret_cast( this ); } - CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT { - mode = mode_; - return *this; + return *reinterpret_cast( this ); } - - operator VkCopyAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CopyAccelerationStructureInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default; #else - bool operator==( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( src == rhs.src ) - && ( dst == rhs.dst ) - && ( mode == rhs.mode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); } - bool operator!=( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; }; - static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = CopyAccelerationStructureInfoKHR; + using Type = CopyCommandTransformInfoQCOM; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct CopyAccelerationStructureToMemoryInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; - - CopyAccelerationStructureToMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT - : src( src_ ) - , dst( dst_ ) - , mode( mode_ ) - {} - - CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( CopyAccelerationStructureToMemoryInfoKHR ) - offsetof( CopyAccelerationStructureToMemoryInfoKHR, pNext ) ); - return *this; - } - - CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - CopyAccelerationStructureToMemoryInfoKHR& operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT - { - src = src_; - return *this; - } - - CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT - { - dst = dst_; - return *this; - } - - CopyAccelerationStructureToMemoryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT - { - mode = mode_; - return *this; - } - - - operator VkCopyAccelerationStructureToMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; - - }; - static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = CopyAccelerationStructureToMemoryInfoKHR; - }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct CopyDescriptorSet { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet; - VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, - uint32_t srcBinding_ = {}, - uint32_t srcArrayElement_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, - uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, + uint32_t srcBinding_ = {}, + uint32_t srcArrayElement_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT : srcSet( srcSet_ ) , srcBinding( srcBinding_ ) @@ -31020,24 +40105,23 @@ namespace VULKAN_HPP_NAMESPACE , descriptorCount( descriptorCount_ ) {} - CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( CopyDescriptorSet ) - offsetof( CopyDescriptorSet, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : CopyDescriptorSet( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & + operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - CopyDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -31085,55 +40169,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT + operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( CopyDescriptorSet const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyDescriptorSet const & ) const = default; #else - bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcSet == rhs.srcSet ) - && ( srcBinding == rhs.srcBinding ) - && ( srcArrayElement == rhs.srcArrayElement ) - && ( dstSet == rhs.dstSet ) - && ( dstBinding == rhs.dstBinding ) - && ( dstArrayElement == rhs.dstArrayElement ) - && ( descriptorCount == rhs.descriptorCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && + ( srcBinding == rhs.srcBinding ) && ( srcArrayElement == rhs.srcArrayElement ) && + ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && + ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ); } - bool operator!=( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; - uint32_t srcBinding = {}; - uint32_t srcArrayElement = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; - uint32_t dstBinding = {}; - uint32_t dstArrayElement = {}; - uint32_t descriptorCount = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; + uint32_t srcBinding = {}; + uint32_t srcArrayElement = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; }; - static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" ); + static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -31142,127 +40217,319 @@ namespace VULKAN_HPP_NAMESPACE using Type = CopyDescriptorSet; }; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct CopyMemoryToAccelerationStructureInfoKHR + class CuModuleNVX { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + public: + using CType = VkCuModuleNVX; - CopyMemoryToAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT - : src( src_ ) - , dst( dst_ ) - , mode( mode_ ) + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + + public: + VULKAN_HPP_CONSTEXPR CuModuleNVX() = default; + VULKAN_HPP_CONSTEXPR CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT + : m_cuModuleNVX( cuModuleNVX ) {} - CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CuModuleNVX & operator=( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( CopyMemoryToAccelerationStructureInfoKHR ) - offsetof( CopyMemoryToAccelerationStructureInfoKHR, pNext ) ); + m_cuModuleNVX = cuModuleNVX; + return *this; + } +#endif + + CuModuleNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = {}; return *this; } - CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleNVX const & ) const = default; +#else + bool operator==( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return m_cuModuleNVX == rhs.m_cuModuleNVX; } - CopyMemoryToAccelerationStructureInfoKHR& operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + bool operator!=( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + return m_cuModuleNVX != rhs.m_cuModuleNVX; + } + + bool operator<( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX < rhs.m_cuModuleNVX; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuModuleNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX == VK_NULL_HANDLE; + } + + private: + VkCuModuleNVX m_cuModuleNVX = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::CuModuleNVX ) == sizeof( VkCuModuleNVX ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct CuFunctionCreateInfoNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, + const char * pName_ = {} ) VULKAN_HPP_NOEXCEPT + : module( module_ ) + , pName( pName_ ) + {} + + VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuFunctionCreateInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & + operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT { - src = src_; + module = module_; return *this; } - CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT { - dst = dst_; + pName = pName_; return *this; } - CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT { - mode = mode_; - return *this; + return *reinterpret_cast( this ); } - - operator VkCopyMemoryToAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuFunctionCreateInfoNVX const & ) const = default; +#else + bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( pName == rhs.pName ); } - - + bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX module = {}; + const char * pName = {}; }; - static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = CopyMemoryToAccelerationStructureInfoKHR; + using Type = CuFunctionCreateInfoNVX; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR + struct CuModuleCreateInfoNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuModuleCreateInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & + operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + + operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleCreateInfoNVX const & ) const = default; +#else + bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); + } + + bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX; + const void * pNext = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + static_assert( sizeof( CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = CuModuleCreateInfoNVX; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) struct D3D12FenceSubmitInfoKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR; - VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, - const uint64_t* pWaitSemaphoreValues_ = {}, - uint32_t signalSemaphoreValuesCount_ = {}, - const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValuesCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) {} - D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( D3D12FenceSubmitInfoKHR ) - offsetof( D3D12FenceSubmitInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : D3D12FenceSubmitInfoKHR( *reinterpret_cast( &rhs ) ) + {} - D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {} ) + : waitSemaphoreValuesCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValuesCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & + operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -31274,67 +40541,82 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { pWaitSemaphoreValues = pWaitSemaphoreValues_; return *this; } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT { signalSemaphoreValuesCount = signalSemaphoreValuesCount_; return *this; } - D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { pSignalSemaphoreValues = pSignalSemaphoreValues_; return *this; } - - operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + signalSemaphoreValuesCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default; -#else - bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default; +# else + bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) - && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) - && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) - && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && + ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); } - bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; - const void* pNext = {}; - uint32_t waitSemaphoreValuesCount = {}; - const uint64_t* pWaitSemaphoreValues = {}; - uint32_t signalSemaphoreValuesCount = {}; - const uint64_t* pSignalSemaphoreValues = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreValuesCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValuesCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; }; - static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -31344,136 +40626,46 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct DebugMarkerMarkerInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; - - VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = {}, - std::array const& color_ = {} ) VULKAN_HPP_NOEXCEPT - : pMarkerName( pMarkerName_ ) - , color( color_ ) - {} - - DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DebugMarkerMarkerInfoEXT ) - offsetof( DebugMarkerMarkerInfoEXT, pNext ) ); - return *this; - } - - DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DebugMarkerMarkerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DebugMarkerMarkerInfoEXT & setPMarkerName( const char* pMarkerName_ ) VULKAN_HPP_NOEXCEPT - { - pMarkerName = pMarkerName_; - return *this; - } - - DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT - { - color = color_; - return *this; - } - - - operator VkDebugMarkerMarkerInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugMarkerMarkerInfoEXT const& ) const = default; -#else - bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pMarkerName == rhs.pMarkerName ) - && ( color == rhs.color ); - } - - bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; - const void* pNext = {}; - const char* pMarkerName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; - - }; - static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DebugMarkerMarkerInfoEXT; - }; - struct DebugMarkerObjectNameInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT; - VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = {}, - const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT : objectType( objectType_ ) , object( object_ ) , pObjectName( pObjectName_ ) {} - DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DebugMarkerObjectNameInfoEXT ) - offsetof( DebugMarkerObjectNameInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DebugMarkerObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & + operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugMarkerObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectNameInfoEXT & + setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; @@ -31485,54 +40677,49 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugMarkerObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT { pObjectName = pObjectName_; return *this; } - - operator VkDebugMarkerObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugMarkerObjectNameInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerObjectNameInfoEXT const & ) const = default; #else - bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( object == rhs.object ) - && ( pObjectName == rhs.pObjectName ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( object == rhs.object ) && ( pObjectName == rhs.pObjectName ); } - bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - uint64_t object = {}; - const char* pObjectName = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + const char * pObjectName = {}; }; - static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -31542,14 +40729,16 @@ namespace VULKAN_HPP_NAMESPACE struct DebugMarkerObjectTagInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT; - VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = {}, - uint64_t tagName_ = {}, - size_t tagSize_ = {}, - const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {} ) VULKAN_HPP_NOEXCEPT : objectType( objectType_ ) , object( object_ ) , tagName( tagName_ ) @@ -31557,30 +40746,45 @@ namespace VULKAN_HPP_NAMESPACE , pTag( pTag_ ) {} - DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DebugMarkerObjectTagInfoEXT ) - offsetof( DebugMarkerObjectTagInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DebugMarkerObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) + {} - DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) + : objectType( objectType_ ) + , object( object_ ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & + operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugMarkerObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectTagInfoEXT & + setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; @@ -31604,58 +40808,63 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugMarkerObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT { pTag = pTag_; return *this; } - - operator VkDebugMarkerObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT & + setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugMarkerObjectTagInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default; #else - bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( object == rhs.object ) - && ( tagName == rhs.tagName ) - && ( tagSize == rhs.tagSize ) - && ( pTag == rhs.pTag ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( object == rhs.object ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && + ( pTag == rhs.pTag ); } - bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - uint64_t object = {}; - uint64_t tagName = {}; - size_t tagSize = {}; - const void* pTag = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; }; - static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -31665,35 +40874,36 @@ namespace VULKAN_HPP_NAMESPACE struct DebugReportCallbackCreateInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; - VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, - PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, - void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, + PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pfnCallback( pfnCallback_ ) , pUserData( pUserData_ ) {} - DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DebugReportCallbackCreateInfoEXT ) - offsetof( DebugReportCallbackCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DebugReportCallbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & + operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugReportCallbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -31711,54 +40921,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugReportCallbackCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } - - operator VkDebugReportCallbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugReportCallbackCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugReportCallbackCreateInfoEXT const & ) const = default; #else - bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pfnCallback == rhs.pfnCallback ) - && ( pUserData == rhs.pUserData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData ); } - bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; - PFN_vkDebugReportCallbackEXT pfnCallback = {}; - void* pUserData = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; + PFN_vkDebugReportCallbackEXT pfnCallback = {}; + void * pUserData = {}; }; - static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -31766,130 +40970,39 @@ namespace VULKAN_HPP_NAMESPACE using Type = DebugReportCallbackCreateInfoEXT; }; - struct DebugUtilsLabelEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; - - VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char* pLabelName_ = {}, - std::array const& color_ = {} ) VULKAN_HPP_NOEXCEPT - : pLabelName( pLabelName_ ) - , color( color_ ) - {} - - DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsLabelEXT ) - offsetof( DebugUtilsLabelEXT, pNext ) ); - return *this; - } - - DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DebugUtilsLabelEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) VULKAN_HPP_NOEXCEPT - { - pLabelName = pLabelName_; - return *this; - } - - DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT - { - color = color_; - return *this; - } - - - operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsLabelEXT const& ) const = default; -#else - bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pLabelName == rhs.pLabelName ) - && ( color == rhs.color ); - } - - bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; - const void* pNext = {}; - const char* pLabelName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; - - }; - static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DebugUtilsLabelEXT; - }; - struct DebugUtilsObjectNameInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT; - VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, - uint64_t objectHandle_ = {}, - const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT : objectType( objectType_ ) , objectHandle( objectHandle_ ) , pObjectName( pObjectName_ ) {} - DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsObjectNameInfoEXT ) - offsetof( DebugUtilsObjectNameInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DebugUtilsObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & + operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugUtilsObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -31907,54 +41020,48 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugUtilsObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT { pObjectName = pObjectName_; return *this; } - - operator VkDebugUtilsObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsObjectNameInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsObjectNameInfoEXT const & ) const = default; #else - bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( objectHandle == rhs.objectHandle ) - && ( pObjectName == rhs.pObjectName ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && ( pObjectName == rhs.pObjectName ); } - bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; - uint64_t objectHandle = {}; - const char* pObjectName = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + const char * pObjectName = {}; }; - static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -31964,19 +41071,22 @@ namespace VULKAN_HPP_NAMESPACE struct DebugUtilsMessengerCallbackDataEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDebugUtilsMessengerCallbackDataEXT; - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, - const char* pMessageIdName_ = {}, - int32_t messageIdNumber_ = {}, - const char* pMessage_ = {}, - uint32_t queueLabelCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {}, - uint32_t cmdBufLabelCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {}, - uint32_t objectCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, + const char * pMessageIdName_ = {}, + int32_t messageIdNumber_ = {}, + const char * pMessage_ = {}, + uint32_t queueLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, + uint32_t cmdBufLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, + uint32_t objectCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pMessageIdName( pMessageIdName_ ) , messageIdNumber( messageIdNumber_ ) @@ -31989,36 +41099,63 @@ namespace VULKAN_HPP_NAMESPACE , pObjects( pObjects_ ) {} - DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsMessengerCallbackDataEXT ) - offsetof( DebugUtilsMessengerCallbackDataEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast( &rhs ) ) + {} - DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, + const char * pMessageIdName_, + int32_t messageIdNumber_, + const char * pMessage_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueLabels_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + cmdBufLabels_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + objects_ = {} ) + : flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( static_cast( queueLabels_.size() ) ) + , pQueueLabels( queueLabels_.data() ) + , cmdBufLabelCount( static_cast( cmdBufLabels_.size() ) ) + , pCmdBufLabels( cmdBufLabels_.data() ) + , objectCount( static_cast( objects_.size() ) ) + , pObjects( objects_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCallbackDataEXT & + operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugUtilsMessengerCallbackDataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char* pMessageIdName_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT { pMessageIdName = pMessageIdName_; return *this; @@ -32030,7 +41167,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugUtilsMessengerCallbackDataEXT & setPMessage( const char* pMessage_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT { pMessage = pMessage_; return *this; @@ -32042,92 +41179,119 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & + setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT { pQueueLabels = pQueueLabels_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & setQueueLabels( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueLabels_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = static_cast( queueLabels_.size() ); + pQueueLabels = queueLabels_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT { cmdBufLabelCount = cmdBufLabelCount_; return *this; } - DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & + setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT { pCmdBufLabels = pCmdBufLabels_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = static_cast( cmdBufLabels_.size() ); + pCmdBufLabels = cmdBufLabels_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT { objectCount = objectCount_; return *this; } - DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & + setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT { pObjects = pObjects_; return *this; } - - operator VkDebugUtilsMessengerCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & setObjects( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + objects_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + objectCount = static_cast( objects_.size() ); + pObjects = objects_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsMessengerCallbackDataEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerCallbackDataEXT const & ) const = default; #else - bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pMessageIdName == rhs.pMessageIdName ) - && ( messageIdNumber == rhs.messageIdNumber ) - && ( pMessage == rhs.pMessage ) - && ( queueLabelCount == rhs.queueLabelCount ) - && ( pQueueLabels == rhs.pQueueLabels ) - && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) - && ( pCmdBufLabels == rhs.pCmdBufLabels ) - && ( objectCount == rhs.objectCount ) - && ( pObjects == rhs.pObjects ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pMessageIdName == rhs.pMessageIdName ) && ( messageIdNumber == rhs.messageIdNumber ) && + ( pMessage == rhs.pMessage ) && ( queueLabelCount == rhs.queueLabelCount ) && + ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) && + ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && + ( pObjects == rhs.pObjects ); } - bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; - const char* pMessageIdName = {}; - int32_t messageIdNumber = {}; - const char* pMessage = {}; - uint32_t queueLabelCount = {}; - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels = {}; - uint32_t cmdBufLabelCount = {}; - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels = {}; - uint32_t objectCount = {}; - const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; + const char * pMessageIdName = {}; + int32_t messageIdNumber = {}; + const char * pMessage = {}; + uint32_t queueLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {}; + uint32_t cmdBufLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {}; + uint32_t objectCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {}; }; - static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -32137,14 +41301,16 @@ namespace VULKAN_HPP_NAMESPACE struct DebugUtilsMessengerCreateInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, - void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , messageSeverity( messageSeverity_ ) , messageType( messageType_ ) @@ -32152,105 +41318,102 @@ namespace VULKAN_HPP_NAMESPACE , pUserData( pUserData_ ) {} - DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsMessengerCreateInfoEXT ) - offsetof( DebugUtilsMessengerCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugUtilsMessengerCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & + setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT { messageSeverity = messageSeverity_; return *this; } - DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & + setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT { messageType = messageType_; return *this; } - DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & + setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { pfnUserCallback = pfnUserCallback_; return *this; } - DebugUtilsMessengerCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } - - operator VkDebugUtilsMessengerCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsMessengerCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerCreateInfoEXT const & ) const = default; #else - bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( messageSeverity == rhs.messageSeverity ) - && ( messageType == rhs.messageType ) - && ( pfnUserCallback == rhs.pfnUserCallback ) - && ( pUserData == rhs.pUserData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( messageSeverity == rhs.messageSeverity ) && ( messageType == rhs.messageType ) && + ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); } - bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; - void* pUserData = {}; - + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; }; - static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -32260,14 +41423,16 @@ namespace VULKAN_HPP_NAMESPACE struct DebugUtilsObjectTagInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT; - VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, - uint64_t objectHandle_ = {}, - uint64_t tagName_ = {}, - size_t tagSize_ = {}, - const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {} ) VULKAN_HPP_NOEXCEPT : objectType( objectType_ ) , objectHandle( objectHandle_ ) , tagName( tagName_ ) @@ -32275,24 +41440,38 @@ namespace VULKAN_HPP_NAMESPACE , pTag( pTag_ ) {} - DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsObjectTagInfoEXT ) - offsetof( DebugUtilsObjectTagInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DebugUtilsObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) + {} - DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & + operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DebugUtilsObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -32322,58 +41501,62 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DebugUtilsObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT { pTag = pTag_; return *this; } - - operator VkDebugUtilsObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugUtilsObjectTagInfoEXT & + setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DebugUtilsObjectTagInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default; #else - bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( objectHandle == rhs.objectHandle ) - && ( tagName == rhs.tagName ) - && ( tagSize == rhs.tagSize ) - && ( pTag == rhs.pTag ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && + ( pTag == rhs.pTag ); } - bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; - uint64_t objectHandle = {}; - uint64_t tagName = {}; - size_t tagSize = {}; - const void* pTag = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; }; - static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -32383,80 +41566,80 @@ namespace VULKAN_HPP_NAMESPACE struct DedicatedAllocationBufferCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDedicatedAllocationBufferCreateInfoNV; - VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT - : dedicatedAllocation( dedicatedAllocation_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) + VULKAN_HPP_NOEXCEPT : dedicatedAllocation( dedicatedAllocation_ ) {} - DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DedicatedAllocationBufferCreateInfoNV ) - offsetof( DedicatedAllocationBufferCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DedicatedAllocationBufferCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & + operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationBufferCreateInfoNV & + operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DedicatedAllocationBufferCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationBufferCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT { dedicatedAllocation = dedicatedAllocation_; return *this; } - - operator VkDedicatedAllocationBufferCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DedicatedAllocationBufferCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default; #else - bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dedicatedAllocation == rhs.dedicatedAllocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); } - bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; }; - static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -32466,80 +41649,80 @@ namespace VULKAN_HPP_NAMESPACE struct DedicatedAllocationImageCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDedicatedAllocationImageCreateInfoNV; - VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT : dedicatedAllocation( dedicatedAllocation_ ) {} - DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DedicatedAllocationImageCreateInfoNV ) - offsetof( DedicatedAllocationImageCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & + operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationImageCreateInfoNV & + operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DedicatedAllocationImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationImageCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT { dedicatedAllocation = dedicatedAllocation_; return *this; } - - operator VkDedicatedAllocationImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DedicatedAllocationImageCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default; #else - bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dedicatedAllocation == rhs.dedicatedAllocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); } - bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; }; - static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -32549,33 +41732,38 @@ namespace VULKAN_HPP_NAMESPACE struct DedicatedAllocationMemoryAllocateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDedicatedAllocationMemoryAllocateInfoNV; - VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT : image( image_ ) , buffer( buffer_ ) {} - DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) - offsetof( DedicatedAllocationMemoryAllocateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DedicatedAllocationMemoryAllocateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & + operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationMemoryAllocateInfoNV & + operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -32593,46 +41781,41 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default; #else - bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ) - && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); } - bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; - static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == + sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -32640,269 +41823,29 @@ namespace VULKAN_HPP_NAMESPACE using Type = DedicatedAllocationMemoryAllocateInfoNV; }; -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct DeferredOperationInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeferredOperationInfoKHR; - - VULKAN_HPP_CONSTEXPR DeferredOperationInfoKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ = {} ) VULKAN_HPP_NOEXCEPT - : operationHandle( operationHandle_ ) - {} - - DeferredOperationInfoKHR & operator=( DeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeferredOperationInfoKHR ) - offsetof( DeferredOperationInfoKHR, pNext ) ); - return *this; - } - - DeferredOperationInfoKHR( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeferredOperationInfoKHR& operator=( VkDeferredOperationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeferredOperationInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeferredOperationInfoKHR & setOperationHandle( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle_ ) VULKAN_HPP_NOEXCEPT - { - operationHandle = operationHandle_; - return *this; - } - - - operator VkDeferredOperationInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeferredOperationInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeferredOperationInfoKHR const& ) const = default; -#else - bool operator==( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( operationHandle == rhs.operationHandle ); - } - - bool operator!=( DeferredOperationInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeferredOperationInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeferredOperationKHR operationHandle = {}; - - }; - static_assert( sizeof( DeferredOperationInfoKHR ) == sizeof( VkDeferredOperationInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DeferredOperationInfoKHR; - }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - struct DescriptorBufferInfo - { - - - VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , offset( offset_ ) - , range( range_ ) - {} - - DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT - { - range = range_; - return *this; - } - - - operator VkDescriptorBufferInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorBufferInfo const& ) const = default; -#else - bool operator==( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( range == rhs.range ); - } - - bool operator!=( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize range = {}; - - }; - static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DescriptorImageInfo - { - - - VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, - VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : sampler( sampler_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) - {} - - DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT - { - sampler = sampler_; - return *this; - } - - DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT - { - imageView = imageView_; - return *this; - } - - DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT - { - imageLayout = imageLayout_; - return *this; - } - - - operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorImageInfo const& ) const = default; -#else - bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sampler == rhs.sampler ) - && ( imageView == rhs.imageView ) - && ( imageLayout == rhs.imageLayout ); - } - - bool operator!=( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::Sampler sampler = {}; - VULKAN_HPP_NAMESPACE::ImageView imageView = {}; - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - - }; - static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct DescriptorPoolSize { - - - VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , descriptorCount( descriptorCount_ ) {} - DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolSize( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & + operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -32918,76 +41861,84 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkDescriptorPoolSize const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorPoolSize const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolSize const & ) const = default; #else - bool operator==( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( type == rhs.type ) - && ( descriptorCount == rhs.descriptorCount ); + return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount ); } - bool operator!=( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - uint32_t descriptorCount = {}; - + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; }; - static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" ); + static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DescriptorPoolCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo; - VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, - uint32_t maxSets_ = {}, - uint32_t poolSizeCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, + uint32_t maxSets_ = {}, + uint32_t poolSizeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , maxSets( maxSets_ ) , poolSizeCount( poolSizeCount_ ) , pPoolSizes( pPoolSizes_ ) {} - DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorPoolCreateInfo ) - offsetof( DescriptorPoolCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DescriptorPoolCreateInfo( *reinterpret_cast( &rhs ) ) + {} - DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, + uint32_t maxSets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) + : flags( flags_ ) + , maxSets( maxSets_ ) + , poolSizeCount( static_cast( poolSizes_.size() ) ) + , pPoolSizes( poolSizes_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & + operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -33011,55 +41962,59 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ ) VULKAN_HPP_NOEXCEPT + DescriptorPoolCreateInfo & + setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT { pPoolSizes = pPoolSizes_; return *this; } - - operator VkDescriptorPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo & setPoolSizes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + poolSizeCount = static_cast( poolSizes_.size() ); + pPoolSizes = poolSizes_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorPoolCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolCreateInfo const & ) const = default; #else - bool operator==( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( maxSets == rhs.maxSets ) - && ( poolSizeCount == rhs.poolSizeCount ) - && ( pPoolSizes == rhs.pPoolSizes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) && + ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes ); } - bool operator!=( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; - uint32_t maxSets = {}; - uint32_t poolSizeCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; + uint32_t maxSets = {}; + uint32_t poolSizeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {}; }; - static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -33070,80 +42025,84 @@ namespace VULKAN_HPP_NAMESPACE struct DescriptorPoolInlineUniformBlockCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; - VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) {} - DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( + DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorPoolInlineUniformBlockCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfoEXT & + operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolInlineUniformBlockCreateInfoEXT & + operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) - offsetof( DescriptorPoolInlineUniformBlockCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DescriptorPoolInlineUniformBlockCreateInfoEXT & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT + DescriptorPoolInlineUniformBlockCreateInfoEXT & + setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT { maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; return *this; } - - operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const & ) const = default; #else - bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); } - bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; - const void* pNext = {}; - uint32_t maxInlineUniformBlockBindings = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; + const void * pNext = {}; + uint32_t maxInlineUniformBlockBindings = {}; }; - static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == + sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -33151,43 +42110,251 @@ namespace VULKAN_HPP_NAMESPACE using Type = DescriptorPoolInlineUniformBlockCreateInfoEXT; }; + class DescriptorPool + { + public: + using CType = VkDescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + + public: + VULKAN_HPP_CONSTEXPR DescriptorPool() = default; + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + : m_descriptorPool( descriptorPool ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorPool & operator=( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = descriptorPool; + return *this; + } +#endif + + DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPool const & ) const = default; +#else + bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == rhs.m_descriptorPool; + } + + bool operator!=( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != rhs.m_descriptorPool; + } + + bool operator<( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool < rhs.m_descriptorPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == VK_NULL_HANDLE; + } + + private: + VkDescriptorPool m_descriptorPool = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSetLayout() = default; + VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( descriptorSetLayout ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorSetLayout & operator=( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = descriptorSetLayout; + return *this; + } +#endif + + DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayout const & ) const = default; +#else + bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == rhs.m_descriptorSetLayout; + } + + bool operator!=( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != rhs.m_descriptorSetLayout; + } + + bool operator<( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout < rhs.m_descriptorSetLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == VK_NULL_HANDLE; + } + + private: + VkDescriptorSetLayout m_descriptorSetLayout = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + struct DescriptorSetAllocateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo; - VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, - uint32_t descriptorSetCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT : descriptorPool( descriptorPool_ ) , descriptorSetCount( descriptorSetCount_ ) , pSetLayouts( pSetLayouts_ ) {} - DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetAllocateInfo ) - offsetof( DescriptorSetAllocateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DescriptorSetAllocateInfo( *reinterpret_cast( &rhs ) ) + {} - DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo( + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_ ) + : descriptorPool( descriptorPool_ ) + , descriptorSetCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & + operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorSetAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetAllocateInfo & + setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT { descriptorPool = descriptorPool_; return *this; @@ -33199,54 +42366,60 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetAllocateInfo & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT { pSetLayouts = pSetLayouts_; return *this; } - - operator VkDescriptorSetAllocateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo & setSetLayouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + descriptorSetCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetAllocateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetAllocateInfo const & ) const = default; #else - bool operator==( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( descriptorPool == rhs.descriptorPool ) - && ( descriptorSetCount == rhs.descriptorSetCount ) - && ( pSetLayouts == rhs.pSetLayouts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && + ( descriptorSetCount == rhs.descriptorSetCount ) && ( pSetLayouts == rhs.pSetLayouts ); } - bool operator!=( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; - uint32_t descriptorSetCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; }; - static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -33256,13 +42429,13 @@ namespace VULKAN_HPP_NAMESPACE struct DescriptorSetLayoutBinding { - - - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, - const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT : binding( binding_ ) , descriptorType( descriptorType_ ) , descriptorCount( descriptorCount_ ) @@ -33270,14 +42443,34 @@ namespace VULKAN_HPP_NAMESPACE , pImmutableSamplers( pImmutableSamplers_ ) {} - DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBinding( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding( + uint32_t binding_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( static_cast( immutableSamplers_.size() ) ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( immutableSamplers_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & + operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -33287,7 +42480,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBinding & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; @@ -33305,84 +42499,105 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBinding & + setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT { pImmutableSamplers = pImmutableSamplers_; return *this; } - - operator VkDescriptorSetLayoutBinding const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding & setImmutableSamplers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + descriptorCount = static_cast( immutableSamplers_.size() ); + pImmutableSamplers = immutableSamplers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetLayoutBinding const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBinding const & ) const = default; #else - bool operator==( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( binding == rhs.binding ) - && ( descriptorType == rhs.descriptorType ) - && ( descriptorCount == rhs.descriptorCount ) - && ( stageFlags == rhs.stageFlags ) - && ( pImmutableSamplers == rhs.pImmutableSamplers ); + return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && + ( descriptorCount == rhs.descriptorCount ) && ( stageFlags == rhs.stageFlags ) && + ( pImmutableSamplers == rhs.pImmutableSamplers ); } - bool operator!=( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t binding = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - uint32_t descriptorCount = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; - const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers = {}; - + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {}; }; - static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct DescriptorSetLayoutBindingFlagsCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT : bindingCount( bindingCount_ ) , pBindingFlags( pBindingFlags_ ) {} - DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( + DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBindingFlagsCreateInfo( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindingFlags_ ) + : bindingCount( static_cast( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & + operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBindingFlagsCreateInfo & + operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) - offsetof( DescriptorSetLayoutBindingFlagsCreateInfo, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DescriptorSetLayoutBindingFlagsCreateInfo& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -33394,96 +42609,117 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBindingFlagsCreateInfo & + setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT { pBindingFlags = pBindingFlags_; return *this; } - - operator VkDescriptorSetLayoutBindingFlagsCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindingFlags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + bindingCount = static_cast( bindingFlags_.size() ); + pBindingFlags = bindingFlags_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default; #else - bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( bindingCount == rhs.bindingCount ) - && ( pBindingFlags == rhs.pBindingFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && + ( pBindingFlags == rhs.pBindingFlags ); } - bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; - const void* pNext = {}; - uint32_t bindingCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + const void * pNext = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {}; }; - static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == + sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = DescriptorSetLayoutBindingFlagsCreateInfo; }; + using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; struct DescriptorSetLayoutCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo; - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, - uint32_t bindingCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , bindingCount( bindingCount_ ) , pBindings( pBindings_ ) {} - DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetLayoutCreateInfo ) - offsetof( DescriptorSetLayoutCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DescriptorSetLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + {} - DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindings_ ) + : flags( flags_ ), bindingCount( static_cast( bindings_.size() ) ), pBindings( bindings_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & + operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorSetLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -33495,54 +42731,60 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutCreateInfo & + setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT { pBindings = pBindings_; return *this; } - - operator VkDescriptorSetLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo & setBindings( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindings_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + bindingCount = static_cast( bindings_.size() ); + pBindings = bindings_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetLayoutCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default; #else - bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( bindingCount == rhs.bindingCount ) - && ( pBindings == rhs.pBindings ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( bindingCount == rhs.bindingCount ) && ( pBindings == rhs.pBindings ); } - bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; - uint32_t bindingCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {}; }; - static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -33552,249 +42794,274 @@ namespace VULKAN_HPP_NAMESPACE struct DescriptorSetLayoutSupport { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT : supported( supported_ ) {} - DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetLayoutSupport ) - offsetof( DescriptorSetLayoutSupport, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DescriptorSetLayoutSupport( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorSetLayoutSupport& operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutSupport & + operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkDescriptorSetLayoutSupport const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetLayoutSupport const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutSupport const & ) const = default; #else - bool operator==( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supported == rhs.supported ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported ); } - bool operator!=( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 supported = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supported = {}; }; - static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = DescriptorSetLayoutSupport; }; + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; struct DescriptorSetVariableDescriptorCountAllocateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; - VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, - const uint32_t* pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, + const uint32_t * pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT : descriptorSetCount( descriptorSetCount_ ) , pDescriptorCounts( pDescriptorCounts_ ) {} - DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( + DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountAllocateInfo( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) + : descriptorSetCount( static_cast( descriptorCounts_.size() ) ) + , pDescriptorCounts( descriptorCounts_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & + operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountAllocateInfo & + operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) - offsetof( DescriptorSetVariableDescriptorCountAllocateInfo, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DescriptorSetVariableDescriptorCountAllocateInfo& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountAllocateInfo & + setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return *this; } - DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountAllocateInfo & + setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT { pDescriptorCounts = pDescriptorCounts_; return *this; } - - operator VkDescriptorSetVariableDescriptorCountAllocateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + descriptorSetCount = static_cast( descriptorCounts_.size() ); + pDescriptorCounts = descriptorCounts_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default; #else - bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( descriptorSetCount == rhs.descriptorSetCount ) - && ( pDescriptorCounts == rhs.pDescriptorCounts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) && + ( pDescriptorCounts == rhs.pDescriptorCounts ); } - bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; - const void* pNext = {}; - uint32_t descriptorSetCount = {}; - const uint32_t* pDescriptorCounts = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + const void * pNext = {}; + uint32_t descriptorSetCount = {}; + const uint32_t * pDescriptorCounts = {}; }; - static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == + sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = DescriptorSetVariableDescriptorCountAllocateInfo; }; + using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; struct DescriptorSetVariableDescriptorCountLayoutSupport { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; - VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT : maxVariableDescriptorCount( maxVariableDescriptorCount_ ) {} - DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( + DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountLayoutSupport( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) - offsetof( DescriptorSetVariableDescriptorCountLayoutSupport, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - DescriptorSetVariableDescriptorCountLayoutSupport& operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDescriptorSetVariableDescriptorCountLayoutSupport const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default; #else - bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); } - bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; - void* pNext = {}; - uint32_t maxVariableDescriptorCount = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + void * pNext = {}; + uint32_t maxVariableDescriptorCount = {}; }; - static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == + sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = DescriptorSetVariableDescriptorCountLayoutSupport; }; + using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; struct DescriptorUpdateTemplateEntry { - - - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - size_t offset_ = {}, - size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + size_t offset_ = {}, + size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT : dstBinding( dstBinding_ ) , dstArrayElement( dstArrayElement_ ) , descriptorCount( descriptorCount_ ) @@ -33803,14 +43070,20 @@ namespace VULKAN_HPP_NAMESPACE , stride( stride_ ) {} - DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateEntry( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & + operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -33832,7 +43105,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateEntry & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; @@ -33850,64 +43124,63 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkDescriptorUpdateTemplateEntry const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorUpdateTemplateEntry const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default; #else - bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( dstBinding == rhs.dstBinding ) - && ( dstArrayElement == rhs.dstArrayElement ) - && ( descriptorCount == rhs.descriptorCount ) - && ( descriptorType == rhs.descriptorType ) - && ( offset == rhs.offset ) - && ( stride == rhs.stride ); + return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && + ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( offset == rhs.offset ) && ( stride == rhs.stride ); } - bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t dstBinding = {}; - uint32_t dstArrayElement = {}; - uint32_t descriptorCount = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - size_t offset = {}; - size_t stride = {}; - + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + size_t offset = {}; + size_t stride = {}; }; - static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; struct DescriptorUpdateTemplateCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorUpdateTemplateCreateInfo; - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, - uint32_t descriptorUpdateEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, + uint32_t descriptorUpdateEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) @@ -33918,66 +43191,107 @@ namespace VULKAN_HPP_NAMESPACE , set( set_ ) {} - DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DescriptorUpdateTemplateCreateInfo ) - offsetof( DescriptorUpdateTemplateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast( &rhs ) ) + {} - DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorUpdateEntries_, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {} ) + : flags( flags_ ) + , descriptorUpdateEntryCount( static_cast( descriptorUpdateEntries_.size() ) ) + , pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateCreateInfo & + operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DescriptorUpdateTemplateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT { descriptorUpdateEntryCount = descriptorUpdateEntryCount_; return *this; } - DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT { pDescriptorUpdateEntries = pDescriptorUpdateEntries_; return *this; } - DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = static_cast( descriptorUpdateEntries_.size() ); + pDescriptorUpdateEntries = descriptorUpdateEntries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorUpdateTemplateCreateInfo & + setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT { templateType = templateType_; return *this; } - DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT { descriptorSetLayout = descriptorSetLayout_; return *this; } - DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } - DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & + setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT { pipelineLayout = pipelineLayout_; return *this; @@ -33989,98 +43303,103 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkDescriptorUpdateTemplateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DescriptorUpdateTemplateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default; #else - bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) - && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) - && ( templateType == rhs.templateType ) - && ( descriptorSetLayout == rhs.descriptorSetLayout ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( pipelineLayout == rhs.pipelineLayout ) - && ( set == rhs.set ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) && + ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) && + ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipelineLayout == rhs.pipelineLayout ) && ( set == rhs.set ); } - bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; - uint32_t descriptorUpdateEntryCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries = {}; - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; + uint32_t descriptorUpdateEntryCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; - uint32_t set = {}; - + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t set = {}; }; - static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = DescriptorUpdateTemplateCreateInfo; }; + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; struct DeviceQueueCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo; - VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, - uint32_t queueFamilyIndex_ = {}, - uint32_t queueCount_ = {}, - const float* pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueCount_ = {}, + const float * pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , queueFamilyIndex( queueFamilyIndex_ ) , queueCount( queueCount_ ) , pQueuePriorities( pQueuePriorities_ ) {} - DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceQueueCreateInfo ) - offsetof( DeviceQueueCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DeviceQueueCreateInfo( *reinterpret_cast( &rhs ) ) + {} - DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, + uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( static_cast( queuePriorities_.size() ) ) + , pQueuePriorities( queuePriorities_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & + operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceQueueCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -34104,55 +43423,58 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceQueueCreateInfo & setPQueuePriorities( const float* pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT + DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT { pQueuePriorities = pQueuePriorities_; return *this; } - - operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo & setQueuePriorities( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + queueCount = static_cast( queuePriorities_.size() ); + pQueuePriorities = queuePriorities_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceQueueCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueCreateInfo const & ) const = default; #else - bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( queueCount == rhs.queueCount ) - && ( pQueuePriorities == rhs.pQueuePriorities ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueCount == rhs.queueCount ) && + ( pQueuePriorities == rhs.pQueuePriorities ); } - bool operator!=( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; - uint32_t queueFamilyIndex = {}; - uint32_t queueCount = {}; - const float* pQueuePriorities = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueCount = {}; + const float * pQueuePriorities = {}; }; - static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -34163,63 +43485,63 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceFeatures { - - - VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT : robustBufferAccess( robustBufferAccess_ ) , fullDrawIndexUint32( fullDrawIndexUint32_ ) , imageCubeArray( imageCubeArray_ ) @@ -34277,24 +43599,31 @@ namespace VULKAN_HPP_NAMESPACE , inheritedQueries( inheritedQueries_ ) {} - PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT { robustBufferAccess = robustBufferAccess_; return *this; } - PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT { fullDrawIndexUint32 = fullDrawIndexUint32_; return *this; @@ -34318,7 +43647,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT { tessellationShader = tessellationShader_; return *this; @@ -34348,7 +43678,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT { drawIndirectFirstInstance = drawIndirectFirstInstance_; return *this; @@ -34408,115 +43739,134 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT { textureCompressionETC2 = textureCompressionETC2_; return *this; } - PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT { textureCompressionASTC_LDR = textureCompressionASTC_LDR_; return *this; } - PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT { textureCompressionBC = textureCompressionBC_; return *this; } - PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT { occlusionQueryPrecise = occlusionQueryPrecise_; return *this; } - PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT { pipelineStatisticsQuery = pipelineStatisticsQuery_; return *this; } - PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT { vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; return *this; } - PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT { fragmentStoresAndAtomics = fragmentStoresAndAtomics_; return *this; } - PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT { shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; return *this; } - PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT { shaderImageGatherExtended = shaderImageGatherExtended_; return *this; } - PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; return *this; } - PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageImageMultisample( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageMultisample = shaderStorageImageMultisample_; return *this; } - PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; return *this; } - PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; return *this; } - PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; return *this; } - PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; return *this; } - PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; return *this; } - PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; return *this; } - PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT { shaderClipDistance = shaderClipDistance_; return *this; } - PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT { shaderCullDistance = shaderCullDistance_; return *this; @@ -34540,13 +43890,15 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT { shaderResourceResidency = shaderResourceResidency_; return *this; } - PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT { shaderResourceMinLod = shaderResourceMinLod_; return *this; @@ -34558,55 +43910,64 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT { sparseResidencyBuffer = sparseResidencyBuffer_; return *this; } - PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT { sparseResidencyImage2D = sparseResidencyImage2D_; return *this; } - PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT { sparseResidencyImage3D = sparseResidencyImage3D_; return *this; } - PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT { sparseResidency2Samples = sparseResidency2Samples_; return *this; } - PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT { sparseResidency4Samples = sparseResidency4Samples_; return *this; } - PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT { sparseResidency8Samples = sparseResidency8Samples_; return *this; } - PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT { sparseResidency16Samples = sparseResidency16Samples_; return *this; } - PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT { sparseResidencyAliased = sparseResidencyAliased_; return *this; } - PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & + setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT { variableMultisampleRate = variableMultisampleRate_; return *this; @@ -34618,162 +43979,145 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( robustBufferAccess == rhs.robustBufferAccess ) - && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) - && ( imageCubeArray == rhs.imageCubeArray ) - && ( independentBlend == rhs.independentBlend ) - && ( geometryShader == rhs.geometryShader ) - && ( tessellationShader == rhs.tessellationShader ) - && ( sampleRateShading == rhs.sampleRateShading ) - && ( dualSrcBlend == rhs.dualSrcBlend ) - && ( logicOp == rhs.logicOp ) - && ( multiDrawIndirect == rhs.multiDrawIndirect ) - && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) - && ( depthClamp == rhs.depthClamp ) - && ( depthBiasClamp == rhs.depthBiasClamp ) - && ( fillModeNonSolid == rhs.fillModeNonSolid ) - && ( depthBounds == rhs.depthBounds ) - && ( wideLines == rhs.wideLines ) - && ( largePoints == rhs.largePoints ) - && ( alphaToOne == rhs.alphaToOne ) - && ( multiViewport == rhs.multiViewport ) - && ( samplerAnisotropy == rhs.samplerAnisotropy ) - && ( textureCompressionETC2 == rhs.textureCompressionETC2 ) - && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) - && ( textureCompressionBC == rhs.textureCompressionBC ) - && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) - && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) - && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) - && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) - && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) - && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) - && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) - && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) - && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) - && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) - && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) - && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) - && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) - && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) - && ( shaderClipDistance == rhs.shaderClipDistance ) - && ( shaderCullDistance == rhs.shaderCullDistance ) - && ( shaderFloat64 == rhs.shaderFloat64 ) - && ( shaderInt64 == rhs.shaderInt64 ) - && ( shaderInt16 == rhs.shaderInt16 ) - && ( shaderResourceResidency == rhs.shaderResourceResidency ) - && ( shaderResourceMinLod == rhs.shaderResourceMinLod ) - && ( sparseBinding == rhs.sparseBinding ) - && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) - && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) - && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) - && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) - && ( sparseResidency4Samples == rhs.sparseResidency4Samples ) - && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) - && ( sparseResidency16Samples == rhs.sparseResidency16Samples ) - && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) - && ( variableMultisampleRate == rhs.variableMultisampleRate ) - && ( inheritedQueries == rhs.inheritedQueries ); + return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) && + ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && + ( geometryShader == rhs.geometryShader ) && ( tessellationShader == rhs.tessellationShader ) && + ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) && + ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && + ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && ( depthClamp == rhs.depthClamp ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) && + ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && + ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) && + ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) && + ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && + ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) && + ( textureCompressionBC == rhs.textureCompressionBC ) && + ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) && + ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && + ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) && + ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) && + ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) && + ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && + ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) && + ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) && + ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) && + ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) && + ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) && + ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) && + ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) && + ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && + ( shaderClipDistance == rhs.shaderClipDistance ) && ( shaderCullDistance == rhs.shaderCullDistance ) && + ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) && + ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) && + ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) && + ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && + ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) && + ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && + ( sparseResidency2Samples == rhs.sparseResidency2Samples ) && + ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && + ( sparseResidency8Samples == rhs.sparseResidency8Samples ) && + ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && + ( sparseResidencyAliased == rhs.sparseResidencyAliased ) && + ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries ); } - bool operator!=( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; - VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; - VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; - VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; - VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; - VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; - VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; + VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; - VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; - VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; - + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; }; - static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" ); + static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct DeviceCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo; - VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, - uint32_t queueCreateInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {}, - uint32_t enabledLayerCount_ = {}, - const char* const* ppEnabledLayerNames_ = {}, - uint32_t enabledExtensionCount_ = {}, - const char* const* ppEnabledExtensionNames_ = {}, - const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, + uint32_t queueCreateInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) , queueCreateInfoCount( queueCreateInfoCount_ ) , pQueueCreateInfos( pQueueCreateInfos_ ) @@ -34784,24 +44128,41 @@ namespace VULKAN_HPP_NAMESPACE , pEnabledFeatures( pEnabledFeatures_ ) {} - DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceCreateInfo ) - offsetof( DeviceCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DeviceCreateInfo( *reinterpret_cast( &rhs ) ) + {} - DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo( + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueCreateInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} ) + : flags( flags_ ) + , queueCreateInfoCount( static_cast( queueCreateInfos_.size() ) ) + , pQueueCreateInfos( queueCreateInfos_.data() ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + , pEnabledFeatures( pEnabledFeatures_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -34819,93 +44180,118 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & + setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT { pQueueCreateInfos = pQueueCreateInfos_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setQueueCreateInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = static_cast( queueCreateInfos_.size() ); + pQueueCreateInfos = queueCreateInfos_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT { enabledLayerCount = enabledLayerCount_; return *this; } - DeviceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT { ppEnabledLayerNames = ppEnabledLayerNames_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT { enabledExtensionCount = enabledExtensionCount_; return *this; } - DeviceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return *this; } - DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setPEnabledExtensionNames( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) + VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceCreateInfo & + setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT { pEnabledFeatures = pEnabledFeatures_; return *this; } - - operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceCreateInfo const & ) const = default; #else - bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) - && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) - && ( enabledLayerCount == rhs.enabledLayerCount ) - && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) - && ( enabledExtensionCount == rhs.enabledExtensionCount ) - && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ) - && ( pEnabledFeatures == rhs.pEnabledFeatures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && + ( enabledLayerCount == rhs.enabledLayerCount ) && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ) && ( pEnabledFeatures == rhs.pEnabledFeatures ); } - bool operator!=( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; - uint32_t queueCreateInfoCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {}; - uint32_t enabledLayerCount = {}; - const char* const* ppEnabledLayerNames = {}; - uint32_t enabledExtensionCount = {}; - const char* const* ppEnabledExtensionNames = {}; - const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; + uint32_t queueCreateInfoCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {}; }; - static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -34914,82 +44300,185 @@ namespace VULKAN_HPP_NAMESPACE using Type = DeviceCreateInfo; }; - struct DeviceDiagnosticsConfigCreateInfoNV + struct DeviceDeviceMemoryReportCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; - VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) + , pfnUserCallback( pfnUserCallback_ ) + , pUserData( pUserData_ ) {} - DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDeviceMemoryReportCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & + operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDeviceMemoryReportCreateInfoEXT & + operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceDiagnosticsConfigCreateInfoNV ) - offsetof( DeviceDiagnosticsConfigCreateInfoNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceDiagnosticsConfigCreateInfoNV& operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + DeviceDeviceMemoryReportCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - - operator VkDeviceDiagnosticsConfigCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + DeviceDeviceMemoryReportCreateInfoEXT & + setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pfnUserCallback = pfnUserCallback_; + return *this; } - operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT + DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pUserData = pUserData_; + return *this; } + operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const& ) const = default; + operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceDeviceMemoryReportCreateInfoEXT const & ) const = default; #else - bool operator==( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); } - bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; + }; + static_assert( sizeof( DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DeviceDeviceMemoryReportCreateInfoEXT; + }; + + struct DeviceDiagnosticsConfigCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & + operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDiagnosticsConfigCreateInfoNV & + operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceDiagnosticsConfigCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default; +#else + bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; - }; - static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -34999,31 +44488,33 @@ namespace VULKAN_HPP_NAMESPACE struct DeviceEventInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT; - VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT : deviceEvent( deviceEvent_ ) {} - DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceEventInfoEXT ) - offsetof( DeviceEventInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DeviceEventInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & + operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -35035,43 +44526,37 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceEventInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceEventInfoEXT const & ) const = default; #else - bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceEvent == rhs.deviceEvent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent ); } - bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; - }; - static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -35082,33 +44567,34 @@ namespace VULKAN_HPP_NAMESPACE struct DeviceGroupBindSparseInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, - uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT : resourceDeviceIndex( resourceDeviceIndex_ ) , memoryDeviceIndex( memoryDeviceIndex_ ) {} - DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupBindSparseInfo ) - offsetof( DeviceGroupBindSparseInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DeviceGroupBindSparseInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & + operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -35126,80 +44612,78 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupBindSparseInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default; #else - bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) - && ( memoryDeviceIndex == rhs.memoryDeviceIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) && + ( memoryDeviceIndex == rhs.memoryDeviceIndex ); } - bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; - const void* pNext = {}; - uint32_t resourceDeviceIndex = {}; - uint32_t memoryDeviceIndex = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; + const void * pNext = {}; + uint32_t resourceDeviceIndex = {}; + uint32_t memoryDeviceIndex = {}; }; - static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = DeviceGroupBindSparseInfo; }; + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; struct DeviceGroupCommandBufferBeginInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceGroupCommandBufferBeginInfo; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT : deviceMask( deviceMask_ ) {} - DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupCommandBufferBeginInfo ) - offsetof( DeviceGroupCommandBufferBeginInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & + operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -35211,2893 +44695,731 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupCommandBufferBeginInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default; #else - bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceMask == rhs.deviceMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ); } - bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; - const void* pNext = {}; - uint32_t deviceMask = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; }; - static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = DeviceGroupCommandBufferBeginInfo; }; + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; - struct DeviceGroupDeviceCreateInfo + class DisplayKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; + public: + using CType = VkDisplayKHR; - VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {}, - const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT - : physicalDeviceCount( physicalDeviceCount_ ) - , pPhysicalDevices( pPhysicalDevices_ ) + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayKHR() = default; + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR ) {} - DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DisplayKHR & operator=( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupDeviceCreateInfo ) - offsetof( DeviceGroupDeviceCreateInfo, pNext ) ); + m_displayKHR = displayKHR; + return *this; + } +#endif + + DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = {}; return *this; } - DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayKHR const & ) const = default; +#else + bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return m_displayKHR == rhs.m_displayKHR; } - DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + return m_displayKHR != rhs.m_displayKHR; + } + + bool operator<( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR < rhs.m_displayKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayKHR m_displayKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct PerformanceConfigurationAcquireInfoINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePerformanceConfigurationAcquireInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) + VULKAN_HPP_NOEXCEPT : type( type_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( + PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) + VULKAN_HPP_NOEXCEPT + : PerformanceConfigurationAcquireInfoINTEL( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & + operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceConfigurationAcquireInfoINTEL & + operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationAcquireInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT { - physicalDeviceCount = physicalDeviceCount_; + type = type_; return *this; } - DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { - pPhysicalDevices = pPhysicalDevices_; - return *this; + return *reinterpret_cast( this ); } - - operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupDeviceCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default; #else - bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( physicalDeviceCount == rhs.physicalDeviceCount ) - && ( pPhysicalDevices == rhs.pPhysicalDevices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ); } - bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; - const void* pNext = {}; - uint32_t physicalDeviceCount = {}; - const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; }; - static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == + sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = DeviceGroupDeviceCreateInfo; + using Type = PerformanceConfigurationAcquireInfoINTEL; }; - struct DeviceGroupPresentCapabilitiesKHR + class PerformanceConfigurationINTEL { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + public: + using CType = VkPerformanceConfigurationINTEL; - VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array const& presentMask_ = {}, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT - : presentMask( presentMask_ ) - , modes( modes_ ) + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() = default; + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) {} - DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PerformanceConfigurationINTEL & + operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupPresentCapabilitiesKHR ) - offsetof( DeviceGroupPresentCapabilitiesKHR, pNext ) ); + m_performanceConfigurationINTEL = performanceConfigurationINTEL; return *this; } - - DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGroupPresentCapabilitiesKHR& operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupPresentCapabilitiesKHR const& ) const = default; -#else - bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( presentMask == rhs.presentMask ) - && ( modes == rhs.modes ); - } - - bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } #endif + PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = {}; + return *this; + } +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationINTEL const & ) const = default; +#else + bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; + } - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + bool operator!=( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; + } + bool operator<( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == VK_NULL_HANDLE; + } + + private: + VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL = {}; }; - static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == + sizeof( VkPerformanceConfigurationINTEL ), + "handle and wrapper have different size!" ); template <> - struct CppType + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type { - using Type = DeviceGroupPresentCapabilitiesKHR; + using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; }; - struct DeviceGroupPresentInfoKHR + template <> + struct CppType { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; + using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + }; - VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {}, - const uint32_t* pDeviceMasks_ = {}, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT - : swapchainCount( swapchainCount_ ) - , pDeviceMasks( pDeviceMasks_ ) - , mode( mode_ ) + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct MemoryAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : allocationSize( allocationSize_ ) + , memoryTypeIndex( memoryTypeIndex_ ) {} - DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & + operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupPresentInfoKHR ) - offsetof( DeviceGroupPresentInfoKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT { - swapchainCount = swapchainCount_; + allocationSize = allocationSize_; return *this; } - DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT { - pDeviceMasks = pDeviceMasks_; + memoryTypeIndex = memoryTypeIndex_; return *this; } - DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - mode = mode_; - return *this; + return *reinterpret_cast( this ); } - - operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupPresentInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateInfo const & ) const = default; #else - bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pDeviceMasks == rhs.pDeviceMasks ) - && ( mode == rhs.mode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && + ( memoryTypeIndex == rhs.memoryTypeIndex ); } - bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; - const void* pNext = {}; - uint32_t swapchainCount = {}; - const uint32_t* pDeviceMasks = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeIndex = {}; }; - static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = DeviceGroupPresentInfoKHR; + using Type = MemoryAllocateInfo; }; - struct DeviceGroupRenderPassBeginInfo +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoBindMemoryKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBindMemoryKHR; - VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, - uint32_t deviceRenderAreaCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceMask( deviceMask_ ) - , deviceRenderAreaCount( deviceRenderAreaCount_ ) - , pDeviceRenderAreas( pDeviceRenderAreas_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( uint32_t memoryBindIndex_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryBindIndex( memoryBindIndex_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , memorySize( memorySize_ ) {} - DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBindMemoryKHR( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoBindMemoryKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & + operator=( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBindMemoryKHR & operator=( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupRenderPassBeginInfo ) - offsetof( DeviceGroupRenderPassBeginInfo, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + VideoBindMemoryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + VideoBindMemoryKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT { - deviceMask = deviceMask_; + memoryBindIndex = memoryBindIndex_; return *this; } - DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT - { - deviceRenderAreaCount = deviceRenderAreaCount_; - return *this; - } - - DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT - { - pDeviceRenderAreas = pDeviceRenderAreas_; - return *this; - } - - - operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupRenderPassBeginInfo const& ) const = default; -#else - bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceMask == rhs.deviceMask ) - && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) - && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); - } - - bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; - const void* pNext = {}; - uint32_t deviceMask = {}; - uint32_t deviceRenderAreaCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {}; - - }; - static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DeviceGroupRenderPassBeginInfo; - }; - - struct DeviceGroupSubmitInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; - - VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, - const uint32_t* pWaitSemaphoreDeviceIndices_ = {}, - uint32_t commandBufferCount_ = {}, - const uint32_t* pCommandBufferDeviceMasks_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const uint32_t* pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) - , commandBufferCount( commandBufferCount_ ) - , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) - {} - - DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupSubmitInfo ) - offsetof( DeviceGroupSubmitInfo, pNext ) ); - return *this; - } - - DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - waitSemaphoreCount = waitSemaphoreCount_; - return *this; - } - - DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT - { - pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; - return *this; - } - - DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT - { - commandBufferCount = commandBufferCount_; - return *this; - } - - DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT - { - pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; - return *this; - } - - DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - signalSemaphoreCount = signalSemaphoreCount_; - return *this; - } - - DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT - { - pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; - return *this; - } - - - operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupSubmitInfo const& ) const = default; -#else - bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) - && ( commandBufferCount == rhs.commandBufferCount ) - && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); - } - - bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const uint32_t* pWaitSemaphoreDeviceIndices = {}; - uint32_t commandBufferCount = {}; - const uint32_t* pCommandBufferDeviceMasks = {}; - uint32_t signalSemaphoreCount = {}; - const uint32_t* pSignalSemaphoreDeviceIndices = {}; - - }; - static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DeviceGroupSubmitInfo; - }; - - struct DeviceGroupSwapchainCreateInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; - - VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT - : modes( modes_ ) - {} - - DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupSwapchainCreateInfoKHR ) - offsetof( DeviceGroupSwapchainCreateInfoKHR, pNext ) ); - return *this; - } - - DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT - { - modes = modes_; - return *this; - } - - - operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const& ) const = default; -#else - bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( modes == rhs.modes ); - } - - bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; - - }; - static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DeviceGroupSwapchainCreateInfoKHR; - }; - - struct DeviceMemoryOpaqueCaptureAddressInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; - - VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT - : memory( memory_ ) - {} - - DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) - offsetof( DeviceMemoryOpaqueCaptureAddressInfo, pNext ) ); - return *this; - } - - DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceMemoryOpaqueCaptureAddressInfo& operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VideoBindMemoryKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } - - operator VkDeviceMemoryOpaqueCaptureAddressInfo const&() const VULKAN_HPP_NOEXCEPT + VideoBindMemoryKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + memoryOffset = memoryOffset_; + return *this; } - operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT + VideoBindMemoryKHR & setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + memorySize = memorySize_; + return *this; } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const& ) const = default; -#else - bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + operator VkVideoBindMemoryKHR const &() const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ); + return *reinterpret_cast( this ); } - bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + operator VkVideoBindMemoryKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoBindMemoryKHR const & ) const = default; +# else + bool operator==( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && + ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize ); + } + + bool operator!=( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBindMemoryKHR; + const void * pNext = {}; + uint32_t memoryBindIndex = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {}; }; - static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VideoBindMemoryKHR ) == sizeof( VkVideoBindMemoryKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = DeviceMemoryOpaqueCaptureAddressInfo; + using Type = VideoBindMemoryKHR; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct DeviceMemoryOverallocationCreateInfoAMD + class DeferredOperationKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + public: + using CType = VkDeferredOperationKHR; - VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT - : overallocationBehavior( overallocationBehavior_ ) + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR DeferredOperationKHR() = default; + VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( deferredOperationKHR ) {} - DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DeferredOperationKHR & operator=( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) - offsetof( DeviceMemoryOverallocationCreateInfoAMD, pNext ) ); + m_deferredOperationKHR = deferredOperationKHR; return *this; } - - DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT - { - overallocationBehavior = overallocationBehavior_; - return *this; - } - - - operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const& ) const = default; -#else - bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( overallocationBehavior == rhs.overallocationBehavior ); - } - - bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } #endif + DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = {}; + return *this; + } +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeferredOperationKHR const & ) const = default; +#else + bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == rhs.m_deferredOperationKHR; + } - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; + bool operator!=( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != rhs.m_deferredOperationKHR; + } + bool operator<( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR < rhs.m_deferredOperationKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == VK_NULL_HANDLE; + } + + private: + VkDeferredOperationKHR m_deferredOperationKHR = {}; }; - static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), + "handle and wrapper have different size!" ); template <> - struct CppType + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type { - using Type = DeviceMemoryOverallocationCreateInfoAMD; + using type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; }; - struct DevicePrivateDataCreateInfoEXT - { - static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfoEXT; - - VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfoEXT( uint32_t privateDataSlotRequestCount_ = {} ) VULKAN_HPP_NOEXCEPT - : privateDataSlotRequestCount( privateDataSlotRequestCount_ ) - {} - - DevicePrivateDataCreateInfoEXT & operator=( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DevicePrivateDataCreateInfoEXT ) - offsetof( DevicePrivateDataCreateInfoEXT, pNext ) ); - return *this; - } - - DevicePrivateDataCreateInfoEXT( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DevicePrivateDataCreateInfoEXT& operator=( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DevicePrivateDataCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DevicePrivateDataCreateInfoEXT & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT - { - privateDataSlotRequestCount = privateDataSlotRequestCount_; - return *this; - } - - - operator VkDevicePrivateDataCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDevicePrivateDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DevicePrivateDataCreateInfoEXT const& ) const = default; -#else - bool operator==( DevicePrivateDataCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount ); - } - - bool operator!=( DevicePrivateDataCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfoEXT; - const void* pNext = {}; - uint32_t privateDataSlotRequestCount = {}; - - }; - static_assert( sizeof( DevicePrivateDataCreateInfoEXT ) == sizeof( VkDevicePrivateDataCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct CppType { - using Type = DevicePrivateDataCreateInfoEXT; + using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; }; - struct DeviceQueueGlobalPriorityCreateInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; - - VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT - : globalPriority( globalPriority_ ) - {} - - DeviceQueueGlobalPriorityCreateInfoEXT & operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) - offsetof( DeviceQueueGlobalPriorityCreateInfoEXT, pNext ) ); - return *this; - } - - DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT - { - globalPriority = globalPriority_; - return *this; - } - - - operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const& ) const = default; -#else - bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( globalPriority == rhs.globalPriority ); - } - - bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow; - - }; - static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct isVulkanHandleType { - using Type = DeviceQueueGlobalPriorityCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - struct DeviceQueueInfo2 + class PipelineCache { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; + public: + using CType = VkPipelineCache; - VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, - uint32_t queueFamilyIndex_ = {}, - uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , queueIndex( queueIndex_ ) + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + + public: + VULKAN_HPP_CONSTEXPR PipelineCache() = default; + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT + : m_pipelineCache( pipelineCache ) {} - DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PipelineCache & operator=( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( DeviceQueueInfo2 ) - offsetof( DeviceQueueInfo2, pNext ) ); + m_pipelineCache = pipelineCache; return *this; } - - DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT - { - queueFamilyIndex = queueFamilyIndex_; - return *this; - } - - DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT - { - queueIndex = queueIndex_; - return *this; - } - - - operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DeviceQueueInfo2 const& ) const = default; -#else - bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( queueIndex == rhs.queueIndex ); - } - - bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } #endif + PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = {}; + return *this; + } +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCache const & ) const = default; +#else + bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == rhs.m_pipelineCache; + } - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; - uint32_t queueFamilyIndex = {}; - uint32_t queueIndex = {}; + bool operator!=( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != rhs.m_pipelineCache; + } + bool operator<( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache < rhs.m_pipelineCache; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == VK_NULL_HANDLE; + } + + private: + VkPipelineCache m_pipelineCache = {}; }; - static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), + "handle and wrapper have different size!" ); template <> - struct CppType + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type { - using Type = DeviceQueueInfo2; + using type = VULKAN_HPP_NAMESPACE::PipelineCache; }; - struct DispatchIndirectCommand - { - - - VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, - uint32_t y_ = {}, - uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , z( z_ ) - {} - - DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT - { - x = x_; - return *this; - } - - DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT - { - y = y_; - return *this; - } - - DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT - { - z = z_; - return *this; - } - - - operator VkDispatchIndirectCommand const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DispatchIndirectCommand const& ) const = default; -#else - bool operator==( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( z == rhs.z ); - } - - bool operator!=( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint32_t x = {}; - uint32_t y = {}; - uint32_t z = {}; - - }; - static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayEventInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; - - VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT - : displayEvent( displayEvent_ ) - {} - - DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayEventInfoEXT ) - offsetof( DisplayEventInfoEXT, pNext ) ); - return *this; - } - - DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT - { - displayEvent = displayEvent_; - return *this; - } - - - operator VkDisplayEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayEventInfoEXT const& ) const = default; -#else - bool operator==( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayEvent == rhs.displayEvent ); - } - - bool operator!=( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; - - }; - static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct CppType { - using Type = DisplayEventInfoEXT; + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; }; - struct DisplayModeParametersKHR - { - - - VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, - uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT - : visibleRegion( visibleRegion_ ) - , refreshRate( refreshRate_ ) - {} - - DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT - { - visibleRegion = visibleRegion_; - return *this; - } - - DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT - { - refreshRate = refreshRate_; - return *this; - } - - - operator VkDisplayModeParametersKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayModeParametersKHR const& ) const = default; -#else - bool operator==( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( visibleRegion == rhs.visibleRegion ) - && ( refreshRate == rhs.refreshRate ); - } - - bool operator!=( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; - uint32_t refreshRate = {}; - - }; - static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayModeCreateInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; - - VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , parameters( parameters_ ) - {} - - DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayModeCreateInfoKHR ) - offsetof( DisplayModeCreateInfoKHR, pNext ) ); - return *this; - } - - DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT - { - parameters = parameters_; - return *this; - } - - - operator VkDisplayModeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayModeCreateInfoKHR const& ) const = default; -#else - bool operator==( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( parameters == rhs.parameters ); - } - - bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; - - }; - static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct CppType { - using Type = DisplayModeCreateInfoKHR; + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; }; - struct DisplayModePropertiesKHR - { - - - VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT - : displayMode( displayMode_ ) - , parameters( parameters_ ) - {} - - DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayModePropertiesKHR& operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDisplayModePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayModePropertiesKHR const& ) const = default; -#else - bool operator==( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( displayMode == rhs.displayMode ) - && ( parameters == rhs.parameters ); - } - - bool operator!=( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; - - }; - static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayModeProperties2KHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; - - VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : displayModeProperties( displayModeProperties_ ) - {} - - DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayModeProperties2KHR ) - offsetof( DisplayModeProperties2KHR, pNext ) ); - return *this; - } - - DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayModeProperties2KHR& operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDisplayModeProperties2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayModeProperties2KHR const& ) const = default; -#else - bool operator==( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayModeProperties == rhs.displayModeProperties ); - } - - bool operator!=( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; - - }; - static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct isVulkanHandleType { - using Type = DisplayModeProperties2KHR; - }; - - struct DisplayNativeHdrSurfaceCapabilitiesAMD - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; - - VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT - : localDimmingSupport( localDimmingSupport_ ) - {} - - DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) - offsetof( DisplayNativeHdrSurfaceCapabilitiesAMD, pNext ) ); - return *this; - } - - DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayNativeHdrSurfaceCapabilitiesAMD& operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const& ) const = default; -#else - bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( localDimmingSupport == rhs.localDimmingSupport ); - } - - bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; - - }; - static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; - }; - - struct DisplayPlaneCapabilitiesKHR - { - - - VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT - : supportedAlpha( supportedAlpha_ ) - , minSrcPosition( minSrcPosition_ ) - , maxSrcPosition( maxSrcPosition_ ) - , minSrcExtent( minSrcExtent_ ) - , maxSrcExtent( maxSrcExtent_ ) - , minDstPosition( minDstPosition_ ) - , maxDstPosition( maxDstPosition_ ) - , minDstExtent( minDstExtent_ ) - , maxDstExtent( maxDstExtent_ ) - {} - - DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPlaneCapabilitiesKHR& operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDisplayPlaneCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPlaneCapabilitiesKHR const& ) const = default; -#else - bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( supportedAlpha == rhs.supportedAlpha ) - && ( minSrcPosition == rhs.minSrcPosition ) - && ( maxSrcPosition == rhs.maxSrcPosition ) - && ( minSrcExtent == rhs.minSrcExtent ) - && ( maxSrcExtent == rhs.maxSrcExtent ) - && ( minDstPosition == rhs.minDstPosition ) - && ( maxDstPosition == rhs.maxDstPosition ) - && ( minDstExtent == rhs.minDstExtent ) - && ( maxDstExtent == rhs.maxDstExtent ); - } - - bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; - VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; - VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; - VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; - VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; - VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; - VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; - - }; - static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPlaneCapabilities2KHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; - - VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT - : capabilities( capabilities_ ) - {} - - DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayPlaneCapabilities2KHR ) - offsetof( DisplayPlaneCapabilities2KHR, pNext ) ); - return *this; - } - - DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPlaneCapabilities2KHR& operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDisplayPlaneCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPlaneCapabilities2KHR const& ) const = default; -#else - bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( capabilities == rhs.capabilities ); - } - - bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; - - }; - static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DisplayPlaneCapabilities2KHR; - }; - - struct DisplayPlaneInfo2KHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; - - VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, - uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : mode( mode_ ) - , planeIndex( planeIndex_ ) - {} - - DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayPlaneInfo2KHR ) - offsetof( DisplayPlaneInfo2KHR, pNext ) ); - return *this; - } - - DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT - { - mode = mode_; - return *this; - } - - DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT - { - planeIndex = planeIndex_; - return *this; - } - - - operator VkDisplayPlaneInfo2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPlaneInfo2KHR const& ) const = default; -#else - bool operator==( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( mode == rhs.mode ) - && ( planeIndex == rhs.planeIndex ); - } - - bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; - uint32_t planeIndex = {}; - - }; - static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DisplayPlaneInfo2KHR; - }; - - struct DisplayPlanePropertiesKHR - { - - - VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, - uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : currentDisplay( currentDisplay_ ) - , currentStackIndex( currentStackIndex_ ) - {} - - DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPlanePropertiesKHR& operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDisplayPlanePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPlanePropertiesKHR const& ) const = default; -#else - bool operator==( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( currentDisplay == rhs.currentDisplay ) - && ( currentStackIndex == rhs.currentStackIndex ); - } - - bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; - uint32_t currentStackIndex = {}; - - }; - static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPlaneProperties2KHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; - - VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : displayPlaneProperties( displayPlaneProperties_ ) - {} - - DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayPlaneProperties2KHR ) - offsetof( DisplayPlaneProperties2KHR, pNext ) ); - return *this; - } - - DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPlaneProperties2KHR& operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDisplayPlaneProperties2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPlaneProperties2KHR const& ) const = default; -#else - bool operator==( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayPlaneProperties == rhs.displayPlaneProperties ); - } - - bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; - - }; - static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DisplayPlaneProperties2KHR; - }; - - struct DisplayPowerInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; - - VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT - : powerState( powerState_ ) - {} - - DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayPowerInfoEXT ) - offsetof( DisplayPowerInfoEXT, pNext ) ); - return *this; - } - - DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayPowerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT - { - powerState = powerState_; - return *this; - } - - - operator VkDisplayPowerInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPowerInfoEXT const& ) const = default; -#else - bool operator==( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( powerState == rhs.powerState ); - } - - bool operator!=( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; - - }; - static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DisplayPowerInfoEXT; - }; - - struct DisplayPresentInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; - - VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, - VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcRect( srcRect_ ) - , dstRect( dstRect_ ) - , persistent( persistent_ ) - {} - - DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayPresentInfoKHR ) - offsetof( DisplayPresentInfoKHR, pNext ) ); - return *this; - } - - DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplayPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT - { - srcRect = srcRect_; - return *this; - } - - DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT - { - dstRect = dstRect_; - return *this; - } - - DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT - { - persistent = persistent_; - return *this; - } - - - operator VkDisplayPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPresentInfoKHR const& ) const = default; -#else - bool operator==( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcRect == rhs.srcRect ) - && ( dstRect == rhs.dstRect ) - && ( persistent == rhs.persistent ); - } - - bool operator!=( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; - VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; - VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; - - }; - static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DisplayPresentInfoKHR; - }; - - struct DisplayPropertiesKHR - { - - - VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, - const char* displayName_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT - : display( display_ ) - , displayName( displayName_ ) - , physicalDimensions( physicalDimensions_ ) - , physicalResolution( physicalResolution_ ) - , supportedTransforms( supportedTransforms_ ) - , planeReorderPossible( planeReorderPossible_ ) - , persistentContent( persistentContent_ ) - {} - - DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayPropertiesKHR& operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDisplayPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayPropertiesKHR const& ) const = default; -#else - bool operator==( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( display == rhs.display ) - && ( displayName == rhs.displayName ) - && ( physicalDimensions == rhs.physicalDimensions ) - && ( physicalResolution == rhs.physicalResolution ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( planeReorderPossible == rhs.planeReorderPossible ) - && ( persistentContent == rhs.persistentContent ); - } - - bool operator!=( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; - const char* displayName = {}; - VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; - VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; - VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; - - }; - static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayProperties2KHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; - - VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : displayProperties( displayProperties_ ) - {} - - DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplayProperties2KHR ) - offsetof( DisplayProperties2KHR, pNext ) ); - return *this; - } - - DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplayProperties2KHR& operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDisplayProperties2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplayProperties2KHR const& ) const = default; -#else - bool operator==( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayProperties == rhs.displayProperties ); - } - - bool operator!=( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; - - }; - static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DisplayProperties2KHR; - }; - - struct DisplaySurfaceCreateInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; - - VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, - uint32_t planeIndex_ = {}, - uint32_t planeStackIndex_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - float globalAlpha_ = {}, - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, - VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , displayMode( displayMode_ ) - , planeIndex( planeIndex_ ) - , planeStackIndex( planeStackIndex_ ) - , transform( transform_ ) - , globalAlpha( globalAlpha_ ) - , alphaMode( alphaMode_ ) - , imageExtent( imageExtent_ ) - {} - - DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DisplaySurfaceCreateInfoKHR ) - offsetof( DisplaySurfaceCreateInfoKHR, pNext ) ); - return *this; - } - - DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT - { - displayMode = displayMode_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT - { - planeIndex = planeIndex_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT - { - planeStackIndex = planeStackIndex_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT - { - transform = transform_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT - { - globalAlpha = globalAlpha_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT - { - alphaMode = alphaMode_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT - { - imageExtent = imageExtent_; - return *this; - } - - - operator VkDisplaySurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DisplaySurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( displayMode == rhs.displayMode ) - && ( planeIndex == rhs.planeIndex ) - && ( planeStackIndex == rhs.planeStackIndex ) - && ( transform == rhs.transform ) - && ( globalAlpha == rhs.globalAlpha ) - && ( alphaMode == rhs.alphaMode ) - && ( imageExtent == rhs.imageExtent ); - } - - bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; - uint32_t planeIndex = {}; - uint32_t planeStackIndex = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - float globalAlpha = {}; - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; - VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; - - }; - static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DisplaySurfaceCreateInfoKHR; - }; - - struct DrawIndexedIndirectCommand - { - - - VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, - uint32_t instanceCount_ = {}, - uint32_t firstIndex_ = {}, - int32_t vertexOffset_ = {}, - uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT - : indexCount( indexCount_ ) - , instanceCount( instanceCount_ ) - , firstIndex( firstIndex_ ) - , vertexOffset( vertexOffset_ ) - , firstInstance( firstInstance_ ) - {} - - DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT - { - indexCount = indexCount_; - return *this; - } - - DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT - { - instanceCount = instanceCount_; - return *this; - } - - DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT - { - firstIndex = firstIndex_; - return *this; - } - - DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT - { - vertexOffset = vertexOffset_; - return *this; - } - - DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT - { - firstInstance = firstInstance_; - return *this; - } - - - operator VkDrawIndexedIndirectCommand const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DrawIndexedIndirectCommand const& ) const = default; -#else - bool operator==( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( indexCount == rhs.indexCount ) - && ( instanceCount == rhs.instanceCount ) - && ( firstIndex == rhs.firstIndex ) - && ( vertexOffset == rhs.vertexOffset ) - && ( firstInstance == rhs.firstInstance ); - } - - bool operator!=( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint32_t indexCount = {}; - uint32_t instanceCount = {}; - uint32_t firstIndex = {}; - int32_t vertexOffset = {}; - uint32_t firstInstance = {}; - - }; - static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrawIndirectCommand - { - - - VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, - uint32_t instanceCount_ = {}, - uint32_t firstVertex_ = {}, - uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT - : vertexCount( vertexCount_ ) - , instanceCount( instanceCount_ ) - , firstVertex( firstVertex_ ) - , firstInstance( firstInstance_ ) - {} - - DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT - { - vertexCount = vertexCount_; - return *this; - } - - DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT - { - instanceCount = instanceCount_; - return *this; - } - - DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT - { - firstVertex = firstVertex_; - return *this; - } - - DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT - { - firstInstance = firstInstance_; - return *this; - } - - - operator VkDrawIndirectCommand const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DrawIndirectCommand const& ) const = default; -#else - bool operator==( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( vertexCount == rhs.vertexCount ) - && ( instanceCount == rhs.instanceCount ) - && ( firstVertex == rhs.firstVertex ) - && ( firstInstance == rhs.firstInstance ); - } - - bool operator!=( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint32_t vertexCount = {}; - uint32_t instanceCount = {}; - uint32_t firstVertex = {}; - uint32_t firstInstance = {}; - - }; - static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrawMeshTasksIndirectCommandNV - { - - - VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, - uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT - : taskCount( taskCount_ ) - , firstTask( firstTask_ ) - {} - - DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DrawMeshTasksIndirectCommandNV& operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT - { - taskCount = taskCount_; - return *this; - } - - DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT - { - firstTask = firstTask_; - return *this; - } - - - operator VkDrawMeshTasksIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DrawMeshTasksIndirectCommandNV const& ) const = default; -#else - bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( taskCount == rhs.taskCount ) - && ( firstTask == rhs.firstTask ); - } - - bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint32_t taskCount = {}; - uint32_t firstTask = {}; - - }; - static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrmFormatModifierPropertiesEXT - { - - - VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, - uint32_t drmFormatModifierPlaneCount_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) - {} - - DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DrmFormatModifierPropertiesEXT& operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DrmFormatModifierPropertiesEXT const& ) const = default; -#else - bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( drmFormatModifier == rhs.drmFormatModifier ) - && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) - && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); - } - - bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint64_t drmFormatModifier = {}; - uint32_t drmFormatModifierPlaneCount = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; - - }; - static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrmFormatModifierPropertiesListEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT; - - VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {}, - VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) - {} - - DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( DrmFormatModifierPropertiesListEXT ) - offsetof( DrmFormatModifierPropertiesListEXT, pNext ) ); - return *this; - } - - DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkDrmFormatModifierPropertiesListEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( DrmFormatModifierPropertiesListEXT const& ) const = default; -#else - bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) - && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); - } - - bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; - void* pNext = {}; - uint32_t drmFormatModifierCount = {}; - VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties = {}; - - }; - static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = DrmFormatModifierPropertiesListEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; struct EventCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) {} - EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( EventCreateInfo ) - offsetof( EventCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : EventCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - EventCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -38109,41 +45431,34 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkEventCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( EventCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( EventCreateInfo const & ) const = default; #else - bool operator==( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); } - bool operator!=( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; - }; static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -38154,1665 +45469,32 @@ namespace VULKAN_HPP_NAMESPACE using Type = EventCreateInfo; }; - struct ExportFenceCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; - - VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExportFenceCreateInfo ) - offsetof( ExportFenceCreateInfo, pNext ) ); - return *this; - } - - ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportFenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - - operator VkExportFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportFenceCreateInfo const& ) const = default; -#else - bool operator==( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; - - }; - static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExportFenceCreateInfo; - }; - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportFenceWin32HandleInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; - - VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) - {} - - ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExportFenceWin32HandleInfoKHR ) - offsetof( ExportFenceWin32HandleInfoKHR, pNext ) ); - return *this; - } - - ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT - { - pAttributes = pAttributes_; - return *this; - } - - ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT - { - dwAccess = dwAccess_; - return *this; - } - - ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } - - - operator VkExportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportFenceWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); - } - - bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; - - }; - static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExportFenceWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct ExportMemoryAllocateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; - - VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryAllocateInfo ) - offsetof( ExportMemoryAllocateInfo, pNext ) ); - return *this; - } - - ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - - operator VkExportMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportMemoryAllocateInfo const& ) const = default; -#else - bool operator==( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; - - }; - static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExportMemoryAllocateInfo; - }; - - struct ExportMemoryAllocateInfoNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; - - VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryAllocateInfoNV ) - offsetof( ExportMemoryAllocateInfoNV, pNext ) ); - return *this; - } - - ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - - operator VkExportMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportMemoryAllocateInfoNV const& ) const = default; -#else - bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; - - }; - static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExportMemoryAllocateInfoNV; - }; - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportMemoryWin32HandleInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; - - VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) - {} - - ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryWin32HandleInfoKHR ) - offsetof( ExportMemoryWin32HandleInfoKHR, pNext ) ); - return *this; - } - - ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT - { - pAttributes = pAttributes_; - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT - { - dwAccess = dwAccess_; - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } - - - operator VkExportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportMemoryWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); - } - - bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; - - }; - static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExportMemoryWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportMemoryWin32HandleInfoNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; - - VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - {} - - ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryWin32HandleInfoNV ) - offsetof( ExportMemoryWin32HandleInfoNV, pNext ) ); - return *this; - } - - ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT - { - pAttributes = pAttributes_; - return *this; - } - - ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT - { - dwAccess = dwAccess_; - return *this; - } - - - operator VkExportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportMemoryWin32HandleInfoNV const& ) const = default; -#else - bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ); - } - - bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - - }; - static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExportMemoryWin32HandleInfoNV; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct ExportSemaphoreCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; - - VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExportSemaphoreCreateInfo ) - offsetof( ExportSemaphoreCreateInfo, pNext ) ); - return *this; - } - - ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - - operator VkExportSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportSemaphoreCreateInfo const& ) const = default; -#else - bool operator==( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; - - }; - static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExportSemaphoreCreateInfo; - }; - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportSemaphoreWin32HandleInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR; - - VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) - {} - - ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExportSemaphoreWin32HandleInfoKHR ) - offsetof( ExportSemaphoreWin32HandleInfoKHR, pNext ) ); - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT - { - pAttributes = pAttributes_; - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT - { - dwAccess = dwAccess_; - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } - - - operator VkExportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); - } - - bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; - const void* pNext = {}; - const SECURITY_ATTRIBUTES* pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; - - }; - static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExportSemaphoreWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct ExtensionProperties - { - - - VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array const& extensionName_ = {}, - uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT - : extensionName( extensionName_ ) - , specVersion( specVersion_ ) - {} - - ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExtensionProperties& operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkExtensionProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExtensionProperties const& ) const = default; -#else - bool operator==( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( extensionName == rhs.extensionName ) - && ( specVersion == rhs.specVersion ); - } - - bool operator!=( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; - uint32_t specVersion = {}; - - }; - static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalMemoryProperties - { - - - VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : externalMemoryFeatures( externalMemoryFeatures_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - {} - - ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalMemoryProperties& operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkExternalMemoryProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalMemoryProperties const& ) const = default; -#else - bool operator==( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); - } - - bool operator!=( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; - - }; - static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalBufferProperties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; - - VULKAN_HPP_CONSTEXPR ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : externalMemoryProperties( externalMemoryProperties_ ) - {} - - ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalBufferProperties ) - offsetof( ExternalBufferProperties, pNext ) ); - return *this; - } - - ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalBufferProperties& operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkExternalBufferProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalBufferProperties const& ) const = default; -#else - bool operator==( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalMemoryProperties == rhs.externalMemoryProperties ); - } - - bool operator!=( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; - - }; - static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExternalBufferProperties; - }; - - struct ExternalFenceProperties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; - - VULKAN_HPP_CONSTEXPR ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - , externalFenceFeatures( externalFenceFeatures_ ) - {} - - ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalFenceProperties ) - offsetof( ExternalFenceProperties, pNext ) ); - return *this; - } - - ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalFenceProperties& operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkExternalFenceProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalFenceProperties const& ) const = default; -#else - bool operator==( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) - && ( externalFenceFeatures == rhs.externalFenceFeatures ); - } - - bool operator!=( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; - - }; - static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExternalFenceProperties; - }; - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct ExternalFormatANDROID - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; - - VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {} ) VULKAN_HPP_NOEXCEPT - : externalFormat( externalFormat_ ) - {} - - ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalFormatANDROID ) - offsetof( ExternalFormatANDROID, pNext ) ); - return *this; - } - - ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExternalFormatANDROID & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT - { - externalFormat = externalFormat_; - return *this; - } - - - operator VkExternalFormatANDROID const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalFormatANDROID const& ) const = default; -#else - bool operator==( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalFormat == rhs.externalFormat ); - } - - bool operator!=( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; - void* pNext = {}; - uint64_t externalFormat = {}; - - }; - static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExternalFormatANDROID; - }; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - struct ExternalImageFormatProperties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; - - VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : externalMemoryProperties( externalMemoryProperties_ ) - {} - - ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalImageFormatProperties ) - offsetof( ExternalImageFormatProperties, pNext ) ); - return *this; - } - - ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalImageFormatProperties& operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkExternalImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalImageFormatProperties const& ) const = default; -#else - bool operator==( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalMemoryProperties == rhs.externalMemoryProperties ); - } - - bool operator!=( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; - - }; - static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExternalImageFormatProperties; - }; - - struct ImageFormatProperties - { - - - VULKAN_HPP_CONSTEXPR ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, - uint32_t maxMipLevels_ = {}, - uint32_t maxArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxExtent( maxExtent_ ) - , maxMipLevels( maxMipLevels_ ) - , maxArrayLayers( maxArrayLayers_ ) - , sampleCounts( sampleCounts_ ) - , maxResourceSize( maxResourceSize_ ) - {} - - ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageFormatProperties& operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageFormatProperties const& ) const = default; -#else - bool operator==( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( maxExtent == rhs.maxExtent ) - && ( maxMipLevels == rhs.maxMipLevels ) - && ( maxArrayLayers == rhs.maxArrayLayers ) - && ( sampleCounts == rhs.sampleCounts ) - && ( maxResourceSize == rhs.maxResourceSize ); - } - - bool operator!=( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; - uint32_t maxMipLevels = {}; - uint32_t maxArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; - - }; - static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalImageFormatPropertiesNV - { - - - VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : imageFormatProperties( imageFormatProperties_ ) - , externalMemoryFeatures( externalMemoryFeatures_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - {} - - ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalImageFormatPropertiesNV& operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkExternalImageFormatPropertiesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalImageFormatPropertiesNV const& ) const = default; -#else - bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( imageFormatProperties == rhs.imageFormatProperties ) - && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); - } - - bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; - - }; - static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalMemoryBufferCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; - - VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalMemoryBufferCreateInfo ) - offsetof( ExternalMemoryBufferCreateInfo, pNext ) ); - return *this; - } - - ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - - operator VkExternalMemoryBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalMemoryBufferCreateInfo const& ) const = default; -#else - bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; - - }; - static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExternalMemoryBufferCreateInfo; - }; - - struct ExternalMemoryImageCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; - - VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalMemoryImageCreateInfo ) - offsetof( ExternalMemoryImageCreateInfo, pNext ) ); - return *this; - } - - ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - - operator VkExternalMemoryImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalMemoryImageCreateInfo const& ) const = default; -#else - bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; - - }; - static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExternalMemoryImageCreateInfo; - }; - - struct ExternalMemoryImageCreateInfoNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; - - VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : handleTypes( handleTypes_ ) - {} - - ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalMemoryImageCreateInfoNV ) - offsetof( ExternalMemoryImageCreateInfoNV, pNext ) ); - return *this; - } - - ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT - { - handleTypes = handleTypes_; - return *this; - } - - - operator VkExternalMemoryImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalMemoryImageCreateInfoNV const& ) const = default; -#else - bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; - - }; - static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExternalMemoryImageCreateInfoNV; - }; - - struct ExternalSemaphoreProperties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; - - VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) - {} - - ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ExternalSemaphoreProperties ) - offsetof( ExternalSemaphoreProperties, pNext ) ); - return *this; - } - - ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ExternalSemaphoreProperties& operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkExternalSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ExternalSemaphoreProperties const& ) const = default; -#else - bool operator==( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) - && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); - } - - bool operator!=( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; - - }; - static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ExternalSemaphoreProperties; - }; - struct FenceCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) {} - FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( FenceCreateInfo ) - offsetof( FenceCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : FenceCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - FenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -39824,41 +45506,34 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FenceCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceCreateInfo const & ) const = default; #else - bool operator==( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); } - bool operator!=( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; - }; static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -39869,649 +45544,18 @@ namespace VULKAN_HPP_NAMESPACE using Type = FenceCreateInfo; }; - struct FenceGetFdInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; - - VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : fence( fence_ ) - , handleType( handleType_ ) - {} - - FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( FenceGetFdInfoKHR ) - offsetof( FenceGetFdInfoKHR, pNext ) ); - return *this; - } - - FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - FenceGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT - { - fence = fence_; - return *this; - } - - FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - - operator VkFenceGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FenceGetFdInfoKHR const& ) const = default; -#else - bool operator==( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; - - }; - static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = FenceGetFdInfoKHR; - }; - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct FenceGetWin32HandleInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; - - VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : fence( fence_ ) - , handleType( handleType_ ) - {} - - FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( FenceGetWin32HandleInfoKHR ) - offsetof( FenceGetWin32HandleInfoKHR, pNext ) ); - return *this; - } - - FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT - { - fence = fence_; - return *this; - } - - FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - - operator VkFenceGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FenceGetWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; - - }; - static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = FenceGetWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct FilterCubicImageViewImageFormatPropertiesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; - - VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT - : filterCubic( filterCubic_ ) - , filterCubicMinmax( filterCubicMinmax_ ) - {} - - FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) - offsetof( FilterCubicImageViewImageFormatPropertiesEXT, pNext ) ); - return *this; - } - - FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FilterCubicImageViewImageFormatPropertiesEXT& operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const& ) const = default; -#else - bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( filterCubic == rhs.filterCubic ) - && ( filterCubicMinmax == rhs.filterCubicMinmax ); - } - - bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; - - }; - static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = FilterCubicImageViewImageFormatPropertiesEXT; - }; - - struct FormatProperties - { - - - VULKAN_HPP_CONSTEXPR FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : linearTilingFeatures( linearTilingFeatures_ ) - , optimalTilingFeatures( optimalTilingFeatures_ ) - , bufferFeatures( bufferFeatures_ ) - {} - - FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FormatProperties& operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkFormatProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FormatProperties const& ) const = default; -#else - bool operator==( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( linearTilingFeatures == rhs.linearTilingFeatures ) - && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) - && ( bufferFeatures == rhs.bufferFeatures ); - } - - bool operator!=( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; - - }; - static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct FormatProperties2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; - - VULKAN_HPP_CONSTEXPR FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : formatProperties( formatProperties_ ) - {} - - FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( FormatProperties2 ) - offsetof( FormatProperties2, pNext ) ); - return *this; - } - - FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FormatProperties2& operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FormatProperties2 const& ) const = default; -#else - bool operator==( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( formatProperties == rhs.formatProperties ); - } - - bool operator!=( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; - - }; - static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = FormatProperties2; - }; - - struct FramebufferAttachmentImageInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; - - VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t layerCount_ = {}, - uint32_t viewFormatCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , usage( usage_ ) - , width( width_ ) - , height( height_ ) - , layerCount( layerCount_ ) - , viewFormatCount( viewFormatCount_ ) - , pViewFormats( pViewFormats_ ) - {} - - FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( FramebufferAttachmentImageInfo ) - offsetof( FramebufferAttachmentImageInfo, pNext ) ); - return *this; - } - - FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FramebufferAttachmentImageInfo& operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - FramebufferAttachmentImageInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT - { - usage = usage_; - return *this; - } - - FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT - { - width = width_; - return *this; - } - - FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT - { - height = height_; - return *this; - } - - FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT - { - layerCount = layerCount_; - return *this; - } - - FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT - { - viewFormatCount = viewFormatCount_; - return *this; - } - - FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT - { - pViewFormats = pViewFormats_; - return *this; - } - - - operator VkFramebufferAttachmentImageInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FramebufferAttachmentImageInfo const& ) const = default; -#else - bool operator==( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( usage == rhs.usage ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( layerCount == rhs.layerCount ) - && ( viewFormatCount == rhs.viewFormatCount ) - && ( pViewFormats == rhs.pViewFormats ); - } - - bool operator!=( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - uint32_t width = {}; - uint32_t height = {}; - uint32_t layerCount = {}; - uint32_t viewFormatCount = {}; - const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {}; - - }; - static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = FramebufferAttachmentImageInfo; - }; - - struct FramebufferAttachmentsCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; - - VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT - : attachmentImageInfoCount( attachmentImageInfoCount_ ) - , pAttachmentImageInfos( pAttachmentImageInfos_ ) - {} - - FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( FramebufferAttachmentsCreateInfo ) - offsetof( FramebufferAttachmentsCreateInfo, pNext ) ); - return *this; - } - - FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FramebufferAttachmentsCreateInfo& operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - FramebufferAttachmentsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT - { - attachmentImageInfoCount = attachmentImageInfoCount_; - return *this; - } - - FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT - { - pAttachmentImageInfos = pAttachmentImageInfos_; - return *this; - } - - - operator VkFramebufferAttachmentsCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FramebufferAttachmentsCreateInfo const& ) const = default; -#else - bool operator==( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) - && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); - } - - bool operator!=( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; - const void* pNext = {}; - uint32_t attachmentImageInfoCount = {}; - const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos = {}; - - }; - static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = FramebufferAttachmentsCreateInfo; - }; - struct FramebufferCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo; - VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}, - uint32_t width_ = {}, - uint32_t height_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, uint32_t layers_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , renderPass( renderPass_ ) @@ -40522,24 +45566,41 @@ namespace VULKAN_HPP_NAMESPACE , layers( layers_ ) {} - FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( FramebufferCreateInfo ) - offsetof( FramebufferCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : FramebufferCreateInfo( *reinterpret_cast( &rhs ) ) + {} - FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferCreateInfo( + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {} ) + : flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & + operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - FramebufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -40563,12 +45624,23 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT + FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT { width = width_; @@ -40587,55 +45659,46 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkFramebufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FramebufferCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferCreateInfo const & ) const = default; #else - bool operator==( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( renderPass == rhs.renderPass ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( layers == rhs.layers ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( renderPass == rhs.renderPass ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( layers == rhs.layers ); } - bool operator!=( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {}; - uint32_t width = {}; - uint32_t height = {}; - uint32_t layers = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layers = {}; }; - static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -40644,495 +45707,33 @@ namespace VULKAN_HPP_NAMESPACE using Type = FramebufferCreateInfo; }; - struct FramebufferMixedSamplesCombinationNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV; - - VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT - : coverageReductionMode( coverageReductionMode_ ) - , rasterizationSamples( rasterizationSamples_ ) - , depthStencilSamples( depthStencilSamples_ ) - , colorSamples( colorSamples_ ) - {} - - FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( FramebufferMixedSamplesCombinationNV ) - offsetof( FramebufferMixedSamplesCombinationNV, pNext ) ); - return *this; - } - - FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - FramebufferMixedSamplesCombinationNV& operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkFramebufferMixedSamplesCombinationNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( FramebufferMixedSamplesCombinationNV const& ) const = default; -#else - bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( coverageReductionMode == rhs.coverageReductionMode ) - && ( rasterizationSamples == rhs.rasterizationSamples ) - && ( depthStencilSamples == rhs.depthStencilSamples ) - && ( colorSamples == rhs.colorSamples ); - } - - bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; - - }; - static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = FramebufferMixedSamplesCombinationNV; - }; - - struct IndirectCommandsStreamNV - { - - - VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , offset( offset_ ) - {} - - IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - IndirectCommandsStreamNV& operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - - operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsStreamNV const& ) const = default; -#else - bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( buffer == rhs.buffer ) - && ( offset == rhs.offset ); - } - - bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - - }; - static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct GeneratedCommandsInfoNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; - - VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, - uint32_t streamCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, - uint32_t sequencesCount_ = {}, - VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : pipelineBindPoint( pipelineBindPoint_ ) - , pipeline( pipeline_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , streamCount( streamCount_ ) - , pStreams( pStreams_ ) - , sequencesCount( sequencesCount_ ) - , preprocessBuffer( preprocessBuffer_ ) - , preprocessOffset( preprocessOffset_ ) - , preprocessSize( preprocessSize_ ) - , sequencesCountBuffer( sequencesCountBuffer_ ) - , sequencesCountOffset( sequencesCountOffset_ ) - , sequencesIndexBuffer( sequencesIndexBuffer_ ) - , sequencesIndexOffset( sequencesIndexOffset_ ) - {} - - GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( GeneratedCommandsInfoNV ) - offsetof( GeneratedCommandsInfoNV, pNext ) ); - return *this; - } - - GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - GeneratedCommandsInfoNV& operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT - { - pipeline = pipeline_; - return *this; - } - - GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT - { - indirectCommandsLayout = indirectCommandsLayout_; - return *this; - } - - GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT - { - streamCount = streamCount_; - return *this; - } - - GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT - { - pStreams = pStreams_; - return *this; - } - - GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT - { - sequencesCount = sequencesCount_; - return *this; - } - - GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT - { - preprocessBuffer = preprocessBuffer_; - return *this; - } - - GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT - { - preprocessOffset = preprocessOffset_; - return *this; - } - - GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT - { - preprocessSize = preprocessSize_; - return *this; - } - - GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT - { - sequencesCountBuffer = sequencesCountBuffer_; - return *this; - } - - GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT - { - sequencesCountOffset = sequencesCountOffset_; - return *this; - } - - GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT - { - sequencesIndexBuffer = sequencesIndexBuffer_; - return *this; - } - - GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT - { - sequencesIndexOffset = sequencesIndexOffset_; - return *this; - } - - - operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeneratedCommandsInfoNV const& ) const = default; -#else - bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( pipeline == rhs.pipeline ) - && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) - && ( streamCount == rhs.streamCount ) - && ( pStreams == rhs.pStreams ) - && ( sequencesCount == rhs.sequencesCount ) - && ( preprocessBuffer == rhs.preprocessBuffer ) - && ( preprocessOffset == rhs.preprocessOffset ) - && ( preprocessSize == rhs.preprocessSize ) - && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) - && ( sequencesCountOffset == rhs.sequencesCountOffset ) - && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) - && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); - } - - bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; - uint32_t streamCount = {}; - const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {}; - uint32_t sequencesCount = {}; - VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; - - }; - static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = GeneratedCommandsInfoNV; - }; - - struct GeneratedCommandsMemoryRequirementsInfoNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; - - VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, - uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT - : pipelineBindPoint( pipelineBindPoint_ ) - , pipeline( pipeline_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , maxSequencesCount( maxSequencesCount_ ) - {} - - GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) - offsetof( GeneratedCommandsMemoryRequirementsInfoNV, pNext ) ); - return *this; - } - - GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - GeneratedCommandsMemoryRequirementsInfoNV& operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT - { - pipeline = pipeline_; - return *this; - } - - GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT - { - indirectCommandsLayout = indirectCommandsLayout_; - return *this; - } - - GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT - { - maxSequencesCount = maxSequencesCount_; - return *this; - } - - - operator VkGeneratedCommandsMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const& ) const = default; -#else - bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( pipeline == rhs.pipeline ) - && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) - && ( maxSequencesCount == rhs.maxSequencesCount ); - } - - bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; - uint32_t maxSequencesCount = {}; - - }; - static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = GeneratedCommandsMemoryRequirementsInfoNV; - }; - struct VertexInputBindingDescription { - - - VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( uint32_t binding_ = {}, - uint32_t stride_ = {}, - VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = + VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT : binding( binding_ ) , stride( stride_ ) , inputRate( inputRate_ ) {} - VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & + operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -41154,67 +45755,68 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkVertexInputBindingDescription const&() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( VertexInputBindingDescription const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription const & ) const = default; #else - bool operator==( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( binding == rhs.binding ) - && ( stride == rhs.stride ) - && ( inputRate == rhs.inputRate ); + return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ); } - bool operator!=( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t binding = {}; - uint32_t stride = {}; + uint32_t binding = {}; + uint32_t stride = {}; VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; - }; - static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct VertexInputAttributeDescription { - - - VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {}, - uint32_t binding_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VertexInputAttributeDescription( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT : location( location_ ) , binding( binding_ ) , format( format_ ) , offset( offset_ ) {} - VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & + operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -41242,57 +45844,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkVertexInputAttributeDescription const&() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( VertexInputAttributeDescription const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription const & ) const = default; #else - bool operator==( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( location == rhs.location ) - && ( binding == rhs.binding ) - && ( format == rhs.format ) - && ( offset == rhs.offset ); + return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && + ( offset == rhs.offset ); } - bool operator!=( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t location = {}; - uint32_t binding = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - uint32_t offset = {}; - + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; }; - static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineVertexInputStateCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineVertexInputStateCreateInfo; - VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, - uint32_t vertexBindingDescriptionCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = {}, - uint32_t vertexAttributeDescriptionCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, + uint32_t vertexBindingDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, + uint32_t vertexAttributeDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {} ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) @@ -41300,105 +45901,143 @@ namespace VULKAN_HPP_NAMESPACE , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) {} - PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineVertexInputStateCreateInfo ) - offsetof( PipelineVertexInputStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineVertexInputStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} - PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexBindingDescriptions_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexAttributeDescriptions_ = {} ) + : flags( flags_ ) + , vertexBindingDescriptionCount( static_cast( vertexBindingDescriptions_.size() ) ) + , pVertexBindingDescriptions( vertexBindingDescriptions_.data() ) + , vertexAttributeDescriptionCount( static_cast( vertexAttributeDescriptions_.size() ) ) + , pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputStateCreateInfo & + operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & + setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT { vertexBindingDescriptionCount = vertexBindingDescriptionCount_; return *this; } - PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT { pVertexBindingDescriptions = pVertexBindingDescriptions_; return *this; } - PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = static_cast( vertexBindingDescriptions_.size() ); + pVertexBindingDescriptions = vertexBindingDescriptions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineVertexInputStateCreateInfo & + setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT { vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; return *this; } - PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT { pVertexAttributeDescriptions = pVertexAttributeDescriptions_; return *this; } - - operator VkPipelineVertexInputStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vertexAttributeDescriptionCount = static_cast( vertexAttributeDescriptions_.size() ); + pVertexAttributeDescriptions = vertexAttributeDescriptions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineVertexInputStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default; #else - bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) - && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) - && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) - && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && + ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) && + ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && + ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); } - bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; - uint32_t vertexBindingDescriptionCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions = {}; - uint32_t vertexAttributeDescriptionCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; + uint32_t vertexBindingDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {}; + uint32_t vertexAttributeDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {}; }; - static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -41408,100 +46047,101 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineInputAssemblyStateCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineInputAssemblyStateCreateInfo; - VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, - VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , topology( topology_ ) , primitiveRestartEnable( primitiveRestartEnable_ ) {} - PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineInputAssemblyStateCreateInfo ) - offsetof( PipelineInputAssemblyStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInputAssemblyStateCreateInfo & + operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & + setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT { topology = topology_; return *this; } - PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & + setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT { primitiveRestartEnable = primitiveRestartEnable_; return *this; } - - operator VkPipelineInputAssemblyStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineInputAssemblyStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default; #else - bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( topology == rhs.topology ) - && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( topology == rhs.topology ) && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); } - bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; - VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; - + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; }; - static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -41511,39 +46151,44 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineTessellationStateCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineTessellationStateCreateInfo; - VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, - uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, + uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , patchControlPoints( patchControlPoints_ ) {} - PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineTessellationStateCreateInfo ) - offsetof( PipelineTessellationStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineTessellationStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & + operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationStateCreateInfo & + operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineTessellationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -41555,46 +46200,41 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkPipelineTessellationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineTessellationStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default; #else - bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( patchControlPoints == rhs.patchControlPoints ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( patchControlPoints == rhs.patchControlPoints ); } - bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; - uint32_t patchControlPoints = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; + uint32_t patchControlPoints = {}; }; - static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -41602,126 +46242,18 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineTessellationStateCreateInfo; }; - struct Viewport - { - - - VULKAN_HPP_CONSTEXPR Viewport( float x_ = {}, - float y_ = {}, - float width_ = {}, - float height_ = {}, - float minDepth_ = {}, - float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , width( width_ ) - , height( height_ ) - , minDepth( minDepth_ ) - , maxDepth( maxDepth_ ) - {} - - Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - Viewport& operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT - { - x = x_; - return *this; - } - - Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT - { - y = y_; - return *this; - } - - Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT - { - width = width_; - return *this; - } - - Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT - { - height = height_; - return *this; - } - - Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT - { - minDepth = minDepth_; - return *this; - } - - Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT - { - maxDepth = maxDepth_; - return *this; - } - - - operator VkViewport const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkViewport &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Viewport const& ) const = default; -#else - bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( minDepth == rhs.minDepth ) - && ( maxDepth == rhs.maxDepth ); - } - - bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - float x = {}; - float y = {}; - float width = {}; - float height = {}; - float minDepth = {}; - float maxDepth = {}; - - }; - static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PipelineViewportStateCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo; - VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = {}, - uint32_t scissorCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, + uint32_t scissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , viewportCount( viewportCount_ ) , pViewports( pViewports_ ) @@ -41729,30 +46261,44 @@ namespace VULKAN_HPP_NAMESPACE , pScissors( pScissors_ ) {} - PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportStateCreateInfo ) - offsetof( PipelineViewportStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineViewportStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} - PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ = {} ) + : flags( flags_ ) + , viewportCount( static_cast( viewports_.size() ) ) + , pViewports( viewports_.data() ) + , scissorCount( static_cast( scissors_.size() ) ) + , pScissors( scissors_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & + operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineViewportStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -41764,70 +46310,87 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportStateCreateInfo & + setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT { pViewports = pViewports_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & setViewports( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_ ) + VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewports_.size() ); + pViewports = viewports_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT { scissorCount = scissorCount_; return *this; } - PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportStateCreateInfo & + setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT { pScissors = pScissors_; return *this; } - - operator VkPipelineViewportStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & + setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + scissorCount = static_cast( scissors_.size() ); + pScissors = scissors_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default; #else - bool operator==( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( viewportCount == rhs.viewportCount ) - && ( pViewports == rhs.pViewports ) - && ( scissorCount == rhs.scissorCount ) - && ( pScissors == rhs.pScissors ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( viewportCount == rhs.viewportCount ) && ( pViewports == rhs.pViewports ) && + ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors ); } - bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::Viewport* pViewports = {}; - uint32_t scissorCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pScissors = {}; - + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {}; + uint32_t scissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {}; }; - static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -41837,20 +46400,23 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineRasterizationStateCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationStateCreateInfo; - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, - VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, - VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, - VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, - VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, - float depthBiasConstantFactor_ = {}, - float depthBiasClamp_ = {}, - float depthBiasSlopeFactor_ = {}, - float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, + VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, + float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , depthClampEnable( depthClampEnable_ ) , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) @@ -41864,72 +46430,81 @@ namespace VULKAN_HPP_NAMESPACE , lineWidth( lineWidth_ ) {} - PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationStateCreateInfo ) - offsetof( PipelineRasterizationStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineRasterizationStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateCreateInfo & + operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT { depthClampEnable = depthClampEnable_; return *this; } - PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT { rasterizerDiscardEnable = rasterizerDiscardEnable_; return *this; } - PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT { polygonMode = polygonMode_; return *this; } - PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT { cullMode = cullMode_; return *this; } - PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT { frontFace = frontFace_; return *this; } - PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT { depthBiasEnable = depthBiasEnable_; return *this; } - PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & + setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT { depthBiasConstantFactor = depthBiasConstantFactor_; return *this; @@ -41953,64 +46528,55 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkPipelineRasterizationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default; #else - bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( depthClampEnable == rhs.depthClampEnable ) - && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) - && ( polygonMode == rhs.polygonMode ) - && ( cullMode == rhs.cullMode ) - && ( frontFace == rhs.frontFace ) - && ( depthBiasEnable == rhs.depthBiasEnable ) - && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) - && ( depthBiasClamp == rhs.depthBiasClamp ) - && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) - && ( lineWidth == rhs.lineWidth ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthClampEnable == rhs.depthClampEnable ) && + ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && + ( cullMode == rhs.cullMode ) && ( frontFace == rhs.frontFace ) && + ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && + ( lineWidth == rhs.lineWidth ); } - bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; - VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; - VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; - VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; - VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; - float depthBiasConstantFactor = {}; - float depthBiasClamp = {}; - float depthBiasSlopeFactor = {}; - float lineWidth = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; + VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; + float lineWidth = {}; }; - static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -42020,16 +46586,19 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineMultisampleStateCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineMultisampleStateCreateInfo; - VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, - float minSampleShading_ = {}, - const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, + float minSampleShading_ = {}, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , rasterizationSamples( rasterizationSamples_ ) , sampleShadingEnable( sampleShadingEnable_ ) @@ -42039,42 +46608,46 @@ namespace VULKAN_HPP_NAMESPACE , alphaToOneEnable( alphaToOneEnable_ ) {} - PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineMultisampleStateCreateInfo ) - offsetof( PipelineMultisampleStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineMultisampleStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineMultisampleStateCreateInfo & + operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineMultisampleStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT { rasterizationSamples = rasterizationSamples_; return *this; } - PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT { sampleShadingEnable = sampleShadingEnable_; return *this; @@ -42086,74 +46659,70 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT { pSampleMask = pSampleMask_; return *this; } - PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT { alphaToCoverageEnable = alphaToCoverageEnable_; return *this; } - PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & + setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT { alphaToOneEnable = alphaToOneEnable_; return *this; } - - operator VkPipelineMultisampleStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineMultisampleStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default; #else - bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( rasterizationSamples == rhs.rasterizationSamples ) - && ( sampleShadingEnable == rhs.sampleShadingEnable ) - && ( minSampleShading == rhs.minSampleShading ) - && ( pSampleMask == rhs.pSampleMask ) - && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) - && ( alphaToOneEnable == rhs.alphaToOneEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && + ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && + ( pSampleMask == rhs.pSampleMask ) && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && + ( alphaToOneEnable == rhs.alphaToOneEnable ); } - bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; - float minSampleShading = {}; - const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; - + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; + float minSampleShading = {}; + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; }; - static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -42163,15 +46732,15 @@ namespace VULKAN_HPP_NAMESPACE struct StencilOpState { - - - VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - uint32_t compareMask_ = {}, - uint32_t writeMask_ = {}, - uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + uint32_t compareMask_ = {}, + uint32_t writeMask_ = {}, + uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT : failOp( failOp_ ) , passOp( passOp_ ) , depthFailOp( depthFailOp_ ) @@ -42181,14 +46750,18 @@ namespace VULKAN_HPP_NAMESPACE , reference( reference_ ) {} - StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; - StencilOpState& operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + : StencilOpState( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -42234,68 +46807,62 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkStencilOpState const&() const VULKAN_HPP_NOEXCEPT + operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( StencilOpState const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StencilOpState const & ) const = default; #else - bool operator==( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( failOp == rhs.failOp ) - && ( passOp == rhs.passOp ) - && ( depthFailOp == rhs.depthFailOp ) - && ( compareOp == rhs.compareOp ) - && ( compareMask == rhs.compareMask ) - && ( writeMask == rhs.writeMask ) - && ( reference == rhs.reference ); + return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && + ( compareOp == rhs.compareOp ) && ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && + ( reference == rhs.reference ); } - bool operator!=( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; - VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; - VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; - uint32_t compareMask = {}; - uint32_t writeMask = {}; - uint32_t reference = {}; - + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + uint32_t compareMask = {}; + uint32_t writeMask = {}; + uint32_t reference = {}; }; static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineDepthStencilStateCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineDepthStencilStateCreateInfo; - VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, - VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, - VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, - float minDepthBounds_ = {}, - float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, + float minDepthBounds_ = {}, + float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , depthTestEnable( depthTestEnable_ ) , depthWriteEnable( depthWriteEnable_ ) @@ -42308,72 +46875,81 @@ namespace VULKAN_HPP_NAMESPACE , maxDepthBounds( maxDepthBounds_ ) {} - PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineDepthStencilStateCreateInfo ) - offsetof( PipelineDepthStencilStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineDepthStencilStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDepthStencilStateCreateInfo & + operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineDepthStencilStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT { depthTestEnable = depthTestEnable_; return *this; } - PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT { depthWriteEnable = depthWriteEnable_; return *this; } - PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT { depthCompareOp = depthCompareOp_; return *this; } - PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT { depthBoundsTestEnable = depthBoundsTestEnable_; return *this; } - PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT { stencilTestEnable = stencilTestEnable_; return *this; } - PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT { front = front_; return *this; } - PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & + setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT { back = back_; return *this; @@ -42391,62 +46967,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkPipelineDepthStencilStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineDepthStencilStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default; #else - bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( depthTestEnable == rhs.depthTestEnable ) - && ( depthWriteEnable == rhs.depthWriteEnable ) - && ( depthCompareOp == rhs.depthCompareOp ) - && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) - && ( stencilTestEnable == rhs.stencilTestEnable ) - && ( front == rhs.front ) - && ( back == rhs.back ) - && ( minDepthBounds == rhs.minDepthBounds ) - && ( maxDepthBounds == rhs.maxDepthBounds ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthTestEnable == rhs.depthTestEnable ) && ( depthWriteEnable == rhs.depthWriteEnable ) && + ( depthCompareOp == rhs.depthCompareOp ) && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && + ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && ( back == rhs.back ) && + ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds ); } - bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; - VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; - VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; - VULKAN_HPP_NAMESPACE::StencilOpState front = {}; - VULKAN_HPP_NAMESPACE::StencilOpState back = {}; - float minDepthBounds = {}; - float maxDepthBounds = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; + VULKAN_HPP_NAMESPACE::StencilOpState front = {}; + VULKAN_HPP_NAMESPACE::StencilOpState back = {}; + float minDepthBounds = {}; + float maxDepthBounds = {}; }; - static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -42456,16 +47022,16 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineColorBlendAttachmentState { - - - VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, - VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, - VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, - VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( + VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT : blendEnable( blendEnable_ ) , srcColorBlendFactor( srcColorBlendFactor_ ) , dstColorBlendFactor( dstColorBlendFactor_ ) @@ -42476,14 +47042,20 @@ namespace VULKAN_HPP_NAMESPACE , colorWriteMask( colorWriteMask_ ) {} - PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAttachmentState( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -42493,108 +47065,111 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT { srcColorBlendFactor = srcColorBlendFactor_; return *this; } - PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT { dstColorBlendFactor = dstColorBlendFactor_; return *this; } - PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT { colorBlendOp = colorBlendOp_; return *this; } - PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT { srcAlphaBlendFactor = srcAlphaBlendFactor_; return *this; } - PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT { dstAlphaBlendFactor = dstAlphaBlendFactor_; return *this; } - PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT { alphaBlendOp = alphaBlendOp_; return *this; } - PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & + setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT { colorWriteMask = colorWriteMask_; return *this; } - - operator VkPipelineColorBlendAttachmentState const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineColorBlendAttachmentState const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default; #else - bool operator==( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( blendEnable == rhs.blendEnable ) - && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) - && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) - && ( colorBlendOp == rhs.colorBlendOp ) - && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) - && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) - && ( alphaBlendOp == rhs.alphaBlendOp ) - && ( colorWriteMask == rhs.colorWriteMask ); + return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && + ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) && + ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && + ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask ); } - bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; - VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; - VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; - VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; - + VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; }; - static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineColorBlendStateCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineColorBlendStateCreateInfo; - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, - VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = {}, - std::array const& blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, + std::array const & blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , logicOpEnable( logicOpEnable_ ) , logicOp( logicOp_ ) @@ -42603,36 +47178,55 @@ namespace VULKAN_HPP_NAMESPACE , blendConstants( blendConstants_ ) {} - PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineColorBlendStateCreateInfo ) - offsetof( PipelineColorBlendStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 + PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineColorBlendStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} - PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_, + std::array const & blendConstants_ = {} ) + : flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , blendConstants( blendConstants_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & + operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineColorBlendStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & + setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT { logicOpEnable = logicOpEnable_; return *this; @@ -42650,66 +47244,71 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & setPAttachments( + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } - PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT { blendConstants = blendConstants_; return *this; } - - operator VkPipelineColorBlendStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineColorBlendStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default; #else - bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( logicOpEnable == rhs.logicOpEnable ) - && ( logicOp == rhs.logicOp ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( blendConstants == rhs.blendConstants ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( logicOpEnable == rhs.logicOpEnable ) && ( logicOp == rhs.logicOp ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( blendConstants == rhs.blendConstants ); } - bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; - VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; - + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; + VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; }; - static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -42719,41 +47318,54 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineDynamicStateCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo; - VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, - uint32_t dynamicStateCount_ = {}, - const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, + uint32_t dynamicStateCount_ = {}, + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , dynamicStateCount( dynamicStateCount_ ) , pDynamicStates( pDynamicStates_ ) {} - PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineDynamicStateCreateInfo ) - offsetof( PipelineDynamicStateCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineDynamicStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} - PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) + : flags( flags_ ) + , dynamicStateCount( static_cast( dynamicStates_.size() ) ) + , pDynamicStates( dynamicStates_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & + operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineDynamicStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineDynamicStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -42765,54 +47377,60 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ ) VULKAN_HPP_NOEXCEPT + PipelineDynamicStateCreateInfo & + setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT { pDynamicStates = pDynamicStates_; return *this; } - - operator VkPipelineDynamicStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo & setDynamicStates( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dynamicStateCount = static_cast( dynamicStates_.size() ); + pDynamicStates = dynamicStates_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineDynamicStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default; #else - bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( dynamicStateCount == rhs.dynamicStateCount ) - && ( pDynamicStates == rhs.pDynamicStates ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( dynamicStateCount == rhs.dynamicStateCount ) && ( pDynamicStates == rhs.pDynamicStates ); } - bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; - uint32_t dynamicStateCount = {}; - const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates = {}; - + uint32_t dynamicStateCount = {}; + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {}; }; - static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -42822,26 +47440,28 @@ namespace VULKAN_HPP_NAMESPACE struct GraphicsPipelineCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo; - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t subpass_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , stageCount( stageCount_ ) , pStages( pStages_ ) @@ -42861,24 +47481,63 @@ namespace VULKAN_HPP_NAMESPACE , basePipelineIndex( basePipelineIndex_ ) {} - GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( GraphicsPipelineCreateInfo ) - offsetof( GraphicsPipelineCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 + GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : GraphicsPipelineCreateInfo( *reinterpret_cast( &rhs ) ) + {} - GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) + : flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - GraphicsPipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -42896,61 +47555,82 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; } - GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsPipelineCreateInfo & setPVertexInputState( + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT { pVertexInputState = pVertexInputState_; return *this; } - GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPInputAssemblyState( + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT { pInputAssemblyState = pInputAssemblyState_; return *this; } - GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPTessellationState( + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT { pTessellationState = pTessellationState_; return *this; } - GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPViewportState( + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT { pViewportState = pViewportState_; return *this; } - GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPRasterizationState( + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT { pRasterizationState = pRasterizationState_; return *this; } - GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPMultisampleState( + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT { pMultisampleState = pMultisampleState_; return *this; } - GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPDepthStencilState( + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT { pDepthStencilState = pDepthStencilState_; return *this; } - GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPColorBlendState( + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT { pColorBlendState = pColorBlendState_; return *this; } - GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & setPDynamicState( + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT { pDynamicState = pDynamicState_; return *this; @@ -42974,7 +47654,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; @@ -42986,76 +47667,63 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkGraphicsPipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GraphicsPipelineCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default; #else - bool operator==( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( pVertexInputState == rhs.pVertexInputState ) - && ( pInputAssemblyState == rhs.pInputAssemblyState ) - && ( pTessellationState == rhs.pTessellationState ) - && ( pViewportState == rhs.pViewportState ) - && ( pRasterizationState == rhs.pRasterizationState ) - && ( pMultisampleState == rhs.pMultisampleState ) - && ( pDepthStencilState == rhs.pDepthStencilState ) - && ( pColorBlendState == rhs.pColorBlendState ) - && ( pDynamicState == rhs.pDynamicState ) - && ( layout == rhs.layout ) - && ( renderPass == rhs.renderPass ) - && ( subpass == rhs.subpass ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) && + ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) && + ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) && + ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && + ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && + ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); } - bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {}; - const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState = {}; - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {}; - const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState = {}; - const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState = {}; - const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState = {}; - const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState = {}; - const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState = {}; - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t subpass = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {}; + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; }; - static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -43063,841 +47731,27 @@ namespace VULKAN_HPP_NAMESPACE using Type = GraphicsPipelineCreateInfo; }; - struct GraphicsShaderGroupCreateInfoNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; - - VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {} ) VULKAN_HPP_NOEXCEPT - : stageCount( stageCount_ ) - , pStages( pStages_ ) - , pVertexInputState( pVertexInputState_ ) - , pTessellationState( pTessellationState_ ) - {} - - GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( GraphicsShaderGroupCreateInfoNV ) - offsetof( GraphicsShaderGroupCreateInfoNV, pNext ) ); - return *this; - } - - GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - GraphicsShaderGroupCreateInfoNV& operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - GraphicsShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT - { - stageCount = stageCount_; - return *this; - } - - GraphicsShaderGroupCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT - { - pStages = pStages_; - return *this; - } - - GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT - { - pVertexInputState = pVertexInputState_; - return *this; - } - - GraphicsShaderGroupCreateInfoNV & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT - { - pTessellationState = pTessellationState_; - return *this; - } - - - operator VkGraphicsShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GraphicsShaderGroupCreateInfoNV const& ) const = default; -#else - bool operator==( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( pVertexInputState == rhs.pVertexInputState ) - && ( pTessellationState == rhs.pTessellationState ); - } - - bool operator!=( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; - const void* pNext = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {}; - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {}; - - }; - static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = GraphicsShaderGroupCreateInfoNV; - }; - - struct GraphicsPipelineShaderGroupsCreateInfoNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; - - VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( uint32_t groupCount_ = {}, - const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ = {}, - uint32_t pipelineCount_ = {}, - const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ = {} ) VULKAN_HPP_NOEXCEPT - : groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , pipelineCount( pipelineCount_ ) - , pPipelines( pPipelines_ ) - {} - - GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) - offsetof( GraphicsPipelineShaderGroupsCreateInfoNV, pNext ) ); - return *this; - } - - GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - GraphicsPipelineShaderGroupsCreateInfoNV& operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT - { - groupCount = groupCount_; - return *this; - } - - GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT - { - pGroups = pGroups_; - return *this; - } - - GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT - { - pipelineCount = pipelineCount_; - return *this; - } - - GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ ) VULKAN_HPP_NOEXCEPT - { - pPipelines = pPipelines_; - return *this; - } - - - operator VkGraphicsPipelineShaderGroupsCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const& ) const = default; -#else - bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( groupCount == rhs.groupCount ) - && ( pGroups == rhs.pGroups ) - && ( pipelineCount == rhs.pipelineCount ) - && ( pPipelines == rhs.pPipelines ); - } - - bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; - const void* pNext = {}; - uint32_t groupCount = {}; - const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups = {}; - uint32_t pipelineCount = {}; - const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines = {}; - - }; - static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = GraphicsPipelineShaderGroupsCreateInfoNV; - }; - - struct XYColorEXT - { - - - VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, - float y_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - {} - - XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - XYColorEXT& operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT - { - x = x_; - return *this; - } - - XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT - { - y = y_; - return *this; - } - - - operator VkXYColorEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( XYColorEXT const& ) const = default; -#else - bool operator==( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( x == rhs.x ) - && ( y == rhs.y ); - } - - bool operator!=( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - float x = {}; - float y = {}; - - }; - static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct HdrMetadataEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; - - VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, - float maxLuminance_ = {}, - float minLuminance_ = {}, - float maxContentLightLevel_ = {}, - float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT - : displayPrimaryRed( displayPrimaryRed_ ) - , displayPrimaryGreen( displayPrimaryGreen_ ) - , displayPrimaryBlue( displayPrimaryBlue_ ) - , whitePoint( whitePoint_ ) - , maxLuminance( maxLuminance_ ) - , minLuminance( minLuminance_ ) - , maxContentLightLevel( maxContentLightLevel_ ) - , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) - {} - - HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( HdrMetadataEXT ) - offsetof( HdrMetadataEXT, pNext ) ); - return *this; - } - - HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - HdrMetadataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT - { - displayPrimaryRed = displayPrimaryRed_; - return *this; - } - - HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT - { - displayPrimaryGreen = displayPrimaryGreen_; - return *this; - } - - HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT - { - displayPrimaryBlue = displayPrimaryBlue_; - return *this; - } - - HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT - { - whitePoint = whitePoint_; - return *this; - } - - HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT - { - maxLuminance = maxLuminance_; - return *this; - } - - HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT - { - minLuminance = minLuminance_; - return *this; - } - - HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT - { - maxContentLightLevel = maxContentLightLevel_; - return *this; - } - - HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT - { - maxFrameAverageLightLevel = maxFrameAverageLightLevel_; - return *this; - } - - - operator VkHdrMetadataEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( HdrMetadataEXT const& ) const = default; -#else - bool operator==( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayPrimaryRed == rhs.displayPrimaryRed ) - && ( displayPrimaryGreen == rhs.displayPrimaryGreen ) - && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) - && ( whitePoint == rhs.whitePoint ) - && ( maxLuminance == rhs.maxLuminance ) - && ( minLuminance == rhs.minLuminance ) - && ( maxContentLightLevel == rhs.maxContentLightLevel ) - && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); - } - - bool operator!=( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; - float maxLuminance = {}; - float minLuminance = {}; - float maxContentLightLevel = {}; - float maxFrameAverageLightLevel = {}; - - }; - static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = HdrMetadataEXT; - }; - - struct HeadlessSurfaceCreateInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; - - VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - {} - - HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( HeadlessSurfaceCreateInfoEXT ) - offsetof( HeadlessSurfaceCreateInfoEXT, pNext ) ); - return *this; - } - - HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - HeadlessSurfaceCreateInfoEXT& operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - - operator VkHeadlessSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( HeadlessSurfaceCreateInfoEXT const& ) const = default; -#else - bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); - } - - bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; - - }; - static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = HeadlessSurfaceCreateInfoEXT; - }; - -#ifdef VK_USE_PLATFORM_IOS_MVK - struct IOSSurfaceCreateInfoMVK - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; - - VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, - const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pView( pView_ ) - {} - - IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( IOSSurfaceCreateInfoMVK ) - offsetof( IOSSurfaceCreateInfoMVK, pNext ) ); - return *this; - } - - IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT - { - pView = pView_; - return *this; - } - - - operator VkIOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IOSSurfaceCreateInfoMVK const& ) const = default; -#else - bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pView == rhs.pView ); - } - - bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; - const void* pView = {}; - - }; - static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = IOSSurfaceCreateInfoMVK; - }; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - - struct ImageBlit - { - - - VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - std::array const& srcOffsets_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - std::array const& dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffsets( srcOffsets_ ) - , dstSubresource( dstSubresource_ ) - , dstOffsets( dstOffsets_ ) - {} - - ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageBlit& operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT - { - srcSubresource = srcSubresource_; - return *this; - } - - ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT - { - srcOffsets = srcOffsets_; - return *this; - } - - ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT - { - dstSubresource = dstSubresource_; - return *this; - } - - ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT - { - dstOffsets = dstOffsets_; - return *this; - } - - - operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageBlit &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageBlit const& ) const = default; -#else - bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( srcSubresource == rhs.srcSubresource ) - && ( srcOffsets == rhs.srcOffsets ) - && ( dstSubresource == rhs.dstSubresource ) - && ( dstOffsets == rhs.dstOffsets ); - } - - bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; - - }; - static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageCopy - { - - - VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) - {} - - ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageCopy& operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT - { - srcSubresource = srcSubresource_; - return *this; - } - - ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT - { - srcOffset = srcOffset_; - return *this; - } - - ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT - { - dstSubresource = dstSubresource_; - return *this; - } - - ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT - { - dstOffset = dstOffset_; - return *this; - } - - ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT - { - extent = extent_; - return *this; - } - - - operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageCopy &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageCopy const& ) const = default; -#else - bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( srcSubresource == rhs.srcSubresource ) - && ( srcOffset == rhs.srcOffset ) - && ( dstSubresource == rhs.dstSubresource ) - && ( dstOffset == rhs.dstOffset ) - && ( extent == rhs.extent ); - } - - bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; - - }; - static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct ImageCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo; - VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, - uint32_t mipLevels_ = {}, - uint32_t arrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCreateInfo( + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + uint32_t mipLevels_ = {}, + uint32_t arrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) , imageType( imageType_ ) , format( format_ ) @@ -43913,24 +47767,51 @@ namespace VULKAN_HPP_NAMESPACE , initialLayout( initialLayout_ ) {} - ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageCreateInfo ) - offsetof( ImageCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : ImageCreateInfo( *reinterpret_cast( &rhs ) ) + {} - ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageType imageType_, + VULKAN_HPP_NAMESPACE::Format format_, + VULKAN_HPP_NAMESPACE::Extent3D extent_, + uint32_t mipLevels_, + uint32_t arrayLayers_, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + : flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , initialLayout( initialLayout_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -44002,77 +47883,73 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return *this; } - - operator VkImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCreateInfo const & ) const = default; #else - bool operator==( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( imageType == rhs.imageType ) - && ( format == rhs.format ) - && ( extent == rhs.extent ) - && ( mipLevels == rhs.mipLevels ) - && ( arrayLayers == rhs.arrayLayers ) - && ( samples == rhs.samples ) - && ( tiling == rhs.tiling ) - && ( usage == rhs.usage ) - && ( sharingMode == rhs.sharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) - && ( initialLayout == rhs.initialLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( imageType == rhs.imageType ) && ( format == rhs.format ) && ( extent == rhs.extent ) && + ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( initialLayout == rhs.initialLayout ); } - bool operator!=( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; - uint32_t mipLevels = {}; - uint32_t arrayLayers = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t* pQueueFamilyIndices = {}; - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + uint32_t mipLevels = {}; + uint32_t arrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -44083,1539 +47960,19 @@ namespace VULKAN_HPP_NAMESPACE using Type = ImageCreateInfo; }; - struct SubresourceLayout - { - - - VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT - : offset( offset_ ) - , size( size_ ) - , rowPitch( rowPitch_ ) - , arrayPitch( arrayPitch_ ) - , depthPitch( depthPitch_ ) - {} - - SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubresourceLayout& operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSubresourceLayout const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubresourceLayout const& ) const = default; -#else - bool operator==( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( offset == rhs.offset ) - && ( size == rhs.size ) - && ( rowPitch == rhs.rowPitch ) - && ( arrayPitch == rhs.arrayPitch ) - && ( depthPitch == rhs.depthPitch ); - } - - bool operator!=( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; - VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; - VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; - - }; - static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageDrmFormatModifierExplicitCreateInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; - - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {}, - uint32_t drmFormatModifierPlaneCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , pPlaneLayouts( pPlaneLayouts_ ) - {} - - ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) - offsetof( ImageDrmFormatModifierExplicitCreateInfoEXT, pNext ) ); - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT - { - drmFormatModifier = drmFormatModifier_; - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT - { - drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT - { - pPlaneLayouts = pPlaneLayouts_; - return *this; - } - - - operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const& ) const = default; -#else - bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ) - && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) - && ( pPlaneLayouts == rhs.pPlaneLayouts ); - } - - bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; - const void* pNext = {}; - uint64_t drmFormatModifier = {}; - uint32_t drmFormatModifierPlaneCount = {}; - const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts = {}; - - }; - static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; - }; - - struct ImageDrmFormatModifierListCreateInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; - - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, - const uint64_t* pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifiers( pDrmFormatModifiers_ ) - {} - - ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageDrmFormatModifierListCreateInfoEXT ) - offsetof( ImageDrmFormatModifierListCreateInfoEXT, pNext ) ); - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT - { - drmFormatModifierCount = drmFormatModifierCount_; - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT - { - pDrmFormatModifiers = pDrmFormatModifiers_; - return *this; - } - - - operator VkImageDrmFormatModifierListCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const& ) const = default; -#else - bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) - && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); - } - - bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; - const void* pNext = {}; - uint32_t drmFormatModifierCount = {}; - const uint64_t* pDrmFormatModifiers = {}; - - }; - static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageDrmFormatModifierListCreateInfoEXT; - }; - - struct ImageDrmFormatModifierPropertiesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT; - - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - {} - - ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageDrmFormatModifierPropertiesEXT ) - offsetof( ImageDrmFormatModifierPropertiesEXT, pNext ) ); - return *this; - } - - ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageDrmFormatModifierPropertiesEXT& operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkImageDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageDrmFormatModifierPropertiesEXT const& ) const = default; -#else - bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ); - } - - bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; - void* pNext = {}; - uint64_t drmFormatModifier = {}; - - }; - static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageDrmFormatModifierPropertiesEXT; - }; - - struct ImageFormatListCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; - - VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT - : viewFormatCount( viewFormatCount_ ) - , pViewFormats( pViewFormats_ ) - {} - - ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageFormatListCreateInfo ) - offsetof( ImageFormatListCreateInfo, pNext ) ); - return *this; - } - - ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageFormatListCreateInfo& operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageFormatListCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT - { - viewFormatCount = viewFormatCount_; - return *this; - } - - ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT - { - pViewFormats = pViewFormats_; - return *this; - } - - - operator VkImageFormatListCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageFormatListCreateInfo const& ) const = default; -#else - bool operator==( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( viewFormatCount == rhs.viewFormatCount ) - && ( pViewFormats == rhs.pViewFormats ); - } - - bool operator!=( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; - const void* pNext = {}; - uint32_t viewFormatCount = {}; - const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {}; - - }; - static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageFormatListCreateInfo; - }; - - struct ImageFormatProperties2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; - - VULKAN_HPP_CONSTEXPR ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : imageFormatProperties( imageFormatProperties_ ) - {} - - ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageFormatProperties2 ) - offsetof( ImageFormatProperties2, pNext ) ); - return *this; - } - - ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageFormatProperties2& operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageFormatProperties2 const& ) const = default; -#else - bool operator==( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageFormatProperties == rhs.imageFormatProperties ); - } - - bool operator!=( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; - - }; - static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageFormatProperties2; - }; - - struct ImageSubresourceRange - { - - - VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t baseMipLevel_ = {}, - uint32_t levelCount_ = {}, - uint32_t baseArrayLayer_ = {}, - uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , baseMipLevel( baseMipLevel_ ) - , levelCount( levelCount_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) - {} - - ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT - { - aspectMask = aspectMask_; - return *this; - } - - ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT - { - baseMipLevel = baseMipLevel_; - return *this; - } - - ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT - { - levelCount = levelCount_; - return *this; - } - - ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT - { - baseArrayLayer = baseArrayLayer_; - return *this; - } - - ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT - { - layerCount = layerCount_; - return *this; - } - - - operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageSubresourceRange const& ) const = default; -#else - bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( aspectMask == rhs.aspectMask ) - && ( baseMipLevel == rhs.baseMipLevel ) - && ( levelCount == rhs.levelCount ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); - } - - bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t baseMipLevel = {}; - uint32_t levelCount = {}; - uint32_t baseArrayLayer = {}; - uint32_t layerCount = {}; - - }; - static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageMemoryBarrier - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; - - VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t srcQueueFamilyIndex_ = {}, - uint32_t dstQueueFamilyIndex_ = {}, - VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT - : srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , oldLayout( oldLayout_ ) - , newLayout( newLayout_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , image( image_ ) - , subresourceRange( subresourceRange_ ) - {} - - ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageMemoryBarrier ) - offsetof( ImageMemoryBarrier, pNext ) ); - return *this; - } - - ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - srcAccessMask = srcAccessMask_; - return *this; - } - - ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - dstAccessMask = dstAccessMask_; - return *this; - } - - ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT - { - oldLayout = oldLayout_; - return *this; - } - - ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT - { - newLayout = newLayout_; - return *this; - } - - ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT - { - srcQueueFamilyIndex = srcQueueFamilyIndex_; - return *this; - } - - ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT - { - dstQueueFamilyIndex = dstQueueFamilyIndex_; - return *this; - } - - ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT - { - image = image_; - return *this; - } - - ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT - { - subresourceRange = subresourceRange_; - return *this; - } - - - operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageMemoryBarrier const& ) const = default; -#else - bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( oldLayout == rhs.oldLayout ) - && ( newLayout == rhs.newLayout ) - && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) - && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) - && ( image == rhs.image ) - && ( subresourceRange == rhs.subresourceRange ); - } - - bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t srcQueueFamilyIndex = {}; - uint32_t dstQueueFamilyIndex = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; - - }; - static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageMemoryBarrier; - }; - - struct ImageMemoryRequirementsInfo2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; - - VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - {} - - ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageMemoryRequirementsInfo2 ) - offsetof( ImageMemoryRequirementsInfo2, pNext ) ); - return *this; - } - - ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT - { - image = image_; - return *this; - } - - - operator VkImageMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageMemoryRequirementsInfo2 const& ) const = default; -#else - bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ); - } - - bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - - }; - static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageMemoryRequirementsInfo2; - }; - -#ifdef VK_USE_PLATFORM_FUCHSIA - struct ImagePipeSurfaceCreateInfoFUCHSIA - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; - - VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, - zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , imagePipeHandle( imagePipeHandle_ ) - {} - - ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) - offsetof( ImagePipeSurfaceCreateInfoFUCHSIA, pNext ) ); - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT - { - imagePipeHandle = imagePipeHandle_; - return *this; - } - - - operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const& ) const = default; -#else - bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 ); - } - - bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; - zx_handle_t imagePipeHandle = {}; - - }; - static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImagePipeSurfaceCreateInfoFUCHSIA; - }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - - struct ImagePlaneMemoryRequirementsInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; - - VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT - : planeAspect( planeAspect_ ) - {} - - ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImagePlaneMemoryRequirementsInfo ) - offsetof( ImagePlaneMemoryRequirementsInfo, pNext ) ); - return *this; - } - - ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT - { - planeAspect = planeAspect_; - return *this; - } - - - operator VkImagePlaneMemoryRequirementsInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImagePlaneMemoryRequirementsInfo const& ) const = default; -#else - bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( planeAspect == rhs.planeAspect ); - } - - bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; - - }; - static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImagePlaneMemoryRequirementsInfo; - }; - - struct ImageResolve - { - - - VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) - {} - - ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageResolve& operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT - { - srcSubresource = srcSubresource_; - return *this; - } - - ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT - { - srcOffset = srcOffset_; - return *this; - } - - ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT - { - dstSubresource = dstSubresource_; - return *this; - } - - ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT - { - dstOffset = dstOffset_; - return *this; - } - - ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT - { - extent = extent_; - return *this; - } - - - operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageResolve &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageResolve const& ) const = default; -#else - bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( srcSubresource == rhs.srcSubresource ) - && ( srcOffset == rhs.srcOffset ) - && ( dstSubresource == rhs.dstSubresource ) - && ( dstOffset == rhs.dstOffset ) - && ( extent == rhs.extent ); - } - - bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; - - }; - static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageSparseMemoryRequirementsInfo2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2; - - VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - {} - - ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageSparseMemoryRequirementsInfo2 ) - offsetof( ImageSparseMemoryRequirementsInfo2, pNext ) ); - return *this; - } - - ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT - { - image = image_; - return *this; - } - - - operator VkImageSparseMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageSparseMemoryRequirementsInfo2 const& ) const = default; -#else - bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ); - } - - bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - - }; - static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageSparseMemoryRequirementsInfo2; - }; - - struct ImageStencilUsageCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; - - VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT - : stencilUsage( stencilUsage_ ) - {} - - ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageStencilUsageCreateInfo ) - offsetof( ImageStencilUsageCreateInfo, pNext ) ); - return *this; - } - - ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageStencilUsageCreateInfo& operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageStencilUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT - { - stencilUsage = stencilUsage_; - return *this; - } - - - operator VkImageStencilUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageStencilUsageCreateInfo const& ) const = default; -#else - bool operator==( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stencilUsage == rhs.stencilUsage ); - } - - bool operator!=( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; - - }; - static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageStencilUsageCreateInfo; - }; - - struct ImageSwapchainCreateInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; - - VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT - : swapchain( swapchain_ ) - {} - - ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageSwapchainCreateInfoKHR ) - offsetof( ImageSwapchainCreateInfoKHR, pNext ) ); - return *this; - } - - ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT - { - swapchain = swapchain_; - return *this; - } - - - operator VkImageSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageSwapchainCreateInfoKHR const& ) const = default; -#else - bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchain == rhs.swapchain ); - } - - bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; - - }; - static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageSwapchainCreateInfoKHR; - }; - - struct ImageViewASTCDecodeModeEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; - - VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT - : decodeMode( decodeMode_ ) - {} - - ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageViewASTCDecodeModeEXT ) - offsetof( ImageViewASTCDecodeModeEXT, pNext ) ); - return *this; - } - - ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT - { - decodeMode = decodeMode_; - return *this; - } - - - operator VkImageViewASTCDecodeModeEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageViewASTCDecodeModeEXT const& ) const = default; -#else - bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( decodeMode == rhs.decodeMode ); - } - - bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; - - }; - static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageViewASTCDecodeModeEXT; - }; - - struct ImageViewAddressPropertiesNVX - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; - - VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceAddress( deviceAddress_ ) - , size( size_ ) - {} - - ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageViewAddressPropertiesNVX ) - offsetof( ImageViewAddressPropertiesNVX, pNext ) ); - return *this; - } - - ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageViewAddressPropertiesNVX& operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkImageViewAddressPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageViewAddressPropertiesNVX const& ) const = default; -#else - bool operator==( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceAddress == rhs.deviceAddress ) - && ( size == rhs.size ); - } - - bool operator!=( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - - }; - static_assert( sizeof( ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageViewAddressPropertiesNVX; - }; - struct ImageViewCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; - VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , image( image_ ) , viewType( viewType_ ) @@ -45624,24 +47981,23 @@ namespace VULKAN_HPP_NAMESPACE , subresourceRange( subresourceRange_ ) {} - ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageViewCreateInfo ) - offsetof( ImageViewCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : ImageViewCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & + operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ImageViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -45671,65 +48027,58 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT { components = components_; return *this; } - ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } - - operator VkImageViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageViewCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewCreateInfo const & ) const = default; #else - bool operator==( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( image == rhs.image ) - && ( viewType == rhs.viewType ) - && ( format == rhs.format ) - && ( components == rhs.components ) - && ( subresourceRange == rhs.subresourceRange ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && + ( viewType == rhs.viewType ) && ( format == rhs.format ) && ( components == rhs.components ) && + ( subresourceRange == rhs.subresourceRange ); } - bool operator!=( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; - }; - static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -45738,1157 +48087,27 @@ namespace VULKAN_HPP_NAMESPACE using Type = ImageViewCreateInfo; }; - struct ImageViewHandleInfoNVX - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; - - VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT - : imageView( imageView_ ) - , descriptorType( descriptorType_ ) - , sampler( sampler_ ) - {} - - ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageViewHandleInfoNVX ) - offsetof( ImageViewHandleInfoNVX, pNext ) ); - return *this; - } - - ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageViewHandleInfoNVX& operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageViewHandleInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT - { - imageView = imageView_; - return *this; - } - - ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT - { - descriptorType = descriptorType_; - return *this; - } - - ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT - { - sampler = sampler_; - return *this; - } - - - operator VkImageViewHandleInfoNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageViewHandleInfoNVX const& ) const = default; -#else - bool operator==( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageView == rhs.imageView ) - && ( descriptorType == rhs.descriptorType ) - && ( sampler == rhs.sampler ); - } - - bool operator!=( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageView imageView = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - VULKAN_HPP_NAMESPACE::Sampler sampler = {}; - - }; - static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageViewHandleInfoNVX; - }; - - struct ImageViewUsageCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; - - VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT - : usage( usage_ ) - {} - - ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImageViewUsageCreateInfo ) - offsetof( ImageViewUsageCreateInfo, pNext ) ); - return *this; - } - - ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT - { - usage = usage_; - return *this; - } - - - operator VkImageViewUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImageViewUsageCreateInfo const& ) const = default; -#else - bool operator==( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( usage == rhs.usage ); - } - - bool operator!=( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - - }; - static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImageViewUsageCreateInfo; - }; - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct ImportAndroidHardwareBufferInfoANDROID - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID; - - VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - {} - - ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImportAndroidHardwareBufferInfoANDROID ) - offsetof( ImportAndroidHardwareBufferInfoANDROID, pNext ) ); - return *this; - } - - ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - - operator VkImportAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const& ) const = default; -#else - bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ); - } - - bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; - const void* pNext = {}; - struct AHardwareBuffer* buffer = {}; - - }; - static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImportAndroidHardwareBufferInfoANDROID; - }; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - struct ImportFenceFdInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; - - VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - int fd_ = {} ) VULKAN_HPP_NOEXCEPT - : fence( fence_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , fd( fd_ ) - {} - - ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImportFenceFdInfoKHR ) - offsetof( ImportFenceFdInfoKHR, pNext ) ); - return *this; - } - - ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportFenceFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT - { - fence = fence_; - return *this; - } - - ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT - { - fd = fd_; - return *this; - } - - - operator VkImportFenceFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportFenceFdInfoKHR const& ) const = default; -#else - bool operator==( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( fd == rhs.fd ); - } - - bool operator!=( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; - int fd = {}; - - }; - static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImportFenceFdInfoKHR; - }; - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportFenceWin32HandleInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; - - VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : fence( fence_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) - {} - - ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImportFenceWin32HandleInfoKHR ) - offsetof( ImportFenceWin32HandleInfoKHR, pNext ) ); - return *this; - } - - ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT - { - fence = fence_; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT - { - handle = handle_; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } - - - operator VkImportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportFenceWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ) - && ( name == rhs.name ); - } - - bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; - HANDLE handle = {}; - LPCWSTR name = {}; - - }; - static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImportFenceWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct ImportMemoryFdInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; - - VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - int fd_ = {} ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) - , fd( fd_ ) - {} - - ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryFdInfoKHR ) - offsetof( ImportMemoryFdInfoKHR, pNext ) ); - return *this; - } - - ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT - { - fd = fd_; - return *this; - } - - - operator VkImportMemoryFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportMemoryFdInfoKHR const& ) const = default; -#else - bool operator==( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( fd == rhs.fd ); - } - - bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - int fd = {}; - - }; - static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImportMemoryFdInfoKHR; - }; - - struct ImportMemoryHostPointerInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; - - VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - void* pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) - , pHostPointer( pHostPointer_ ) - {} - - ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryHostPointerInfoEXT ) - offsetof( ImportMemoryHostPointerInfoEXT, pNext ) ); - return *this; - } - - ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) VULKAN_HPP_NOEXCEPT - { - pHostPointer = pHostPointer_; - return *this; - } - - - operator VkImportMemoryHostPointerInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportMemoryHostPointerInfoEXT const& ) const = default; -#else - bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( pHostPointer == rhs.pHostPointer ); - } - - bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - void* pHostPointer = {}; - - }; - static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImportMemoryHostPointerInfoEXT; - }; - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportMemoryWin32HandleInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; - - VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) - {} - - ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryWin32HandleInfoKHR ) - offsetof( ImportMemoryWin32HandleInfoKHR, pNext ) ); - return *this; - } - - ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT - { - handle = handle_; - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } - - - operator VkImportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportMemoryWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ) - && ( name == rhs.name ); - } - - bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - HANDLE handle = {}; - LPCWSTR name = {}; - - }; - static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImportMemoryWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportMemoryWin32HandleInfoNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; - - VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, - HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) - , handle( handle_ ) - {} - - ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryWin32HandleInfoNV ) - offsetof( ImportMemoryWin32HandleInfoNV, pNext ) ); - return *this; - } - - ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT - { - handle = handle_; - return *this; - } - - - operator VkImportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportMemoryWin32HandleInfoNV const& ) const = default; -#else - bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ); - } - - bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; - HANDLE handle = {}; - - }; - static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImportMemoryWin32HandleInfoNV; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct ImportSemaphoreFdInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; - - VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - int fd_ = {} ) VULKAN_HPP_NOEXCEPT - : semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , fd( fd_ ) - {} - - ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImportSemaphoreFdInfoKHR ) - offsetof( ImportSemaphoreFdInfoKHR, pNext ) ); - return *this; - } - - ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportSemaphoreFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } - - ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT - { - fd = fd_; - return *this; - } - - - operator VkImportSemaphoreFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportSemaphoreFdInfoKHR const& ) const = default; -#else - bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( fd == rhs.fd ); - } - - bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - int fd = {}; - - }; - static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImportSemaphoreFdInfoKHR; - }; - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportSemaphoreWin32HandleInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR; - - VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT - : semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) - {} - - ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ImportSemaphoreWin32HandleInfoKHR ) - offsetof( ImportSemaphoreWin32HandleInfoKHR, pNext ) ); - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT - { - handle = handle_; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } - - - operator VkImportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ) - && ( name == rhs.name ); - } - - bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - HANDLE handle = {}; - LPCWSTR name = {}; - - }; - static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ImportSemaphoreWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct IndirectCommandsLayoutTokenNV { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV; - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, - uint32_t stream_ = {}, - uint32_t offset_ = {}, - uint32_t vertexBindingUnit_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, - uint32_t pushconstantOffset_ = {}, - uint32_t pushconstantSize_ = {}, - VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, - uint32_t indexTypeCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ = {}, - const uint32_t* pIndexTypeValues_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, + uint32_t stream_ = {}, + uint32_t offset_ = {}, + uint32_t vertexBindingUnit_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, + uint32_t pushconstantOffset_ = {}, + uint32_t pushconstantSize_ = {}, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, + uint32_t indexTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, + const uint32_t * pIndexTypeValues_ = {} ) VULKAN_HPP_NOEXCEPT : tokenType( tokenType_ ) , stream( stream_ ) , offset( offset_ ) @@ -46904,30 +48123,72 @@ namespace VULKAN_HPP_NAMESPACE , pIndexTypeValues( pIndexTypeValues_ ) {} - IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( IndirectCommandsLayoutTokenNV ) - offsetof( IndirectCommandsLayoutTokenNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : IndirectCommandsLayoutTokenNV( *reinterpret_cast( &rhs ) ) + {} - IndirectCommandsLayoutTokenNV& operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, + uint32_t stream_, + uint32_t offset_, + uint32_t vertexBindingUnit_, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, + uint32_t pushconstantOffset_, + uint32_t pushconstantSize_, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ = {} ) + : tokenType( tokenType_ ) + , stream( stream_ ) + , offset( offset_ ) + , vertexBindingUnit( vertexBindingUnit_ ) + , vertexDynamicStride( vertexDynamicStride_ ) + , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) + , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) + , pushconstantOffset( pushconstantOffset_ ) + , pushconstantSize( pushconstantSize_ ) + , indirectStateFlags( indirectStateFlags_ ) + , indexTypeCount( static_cast( indexTypes_.size() ) ) + , pIndexTypes( indexTypes_.data() ) + , pIndexTypeValues( indexTypeValues_.data() ) { - *this = *reinterpret_cast(&rhs); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() ); +# else + if ( indexTypes_.size() != indexTypeValues_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - IndirectCommandsLayoutTokenNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & + setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT { tokenType = tokenType_; return *this; @@ -46951,19 +48212,22 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & + setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT { vertexDynamicStride = vertexDynamicStride_; return *this; } - IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT { pushconstantPipelineLayout = pushconstantPipelineLayout_; return *this; } - IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT { pushconstantShaderStageFlags = pushconstantShaderStageFlags_; return *this; @@ -46981,7 +48245,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - IndirectCommandsLayoutTokenNV & setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & + setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT { indirectStateFlags = indirectStateFlags_; return *this; @@ -46993,80 +48258,93 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & + setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT { pIndexTypes = pIndexTypes_; return *this; } - IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t* pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & setIndexTypes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_ ) + VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypes_.size() ); + pIndexTypes = indexTypes_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT { pIndexTypeValues = pIndexTypeValues_; return *this; } - - operator VkIndirectCommandsLayoutTokenNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & setIndexTypeValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + indexTypeCount = static_cast( indexTypeValues_.size() ); + pIndexTypeValues = indexTypeValues_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsLayoutTokenNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default; #else - bool operator==( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( tokenType == rhs.tokenType ) - && ( stream == rhs.stream ) - && ( offset == rhs.offset ) - && ( vertexBindingUnit == rhs.vertexBindingUnit ) - && ( vertexDynamicStride == rhs.vertexDynamicStride ) - && ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) - && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) - && ( pushconstantOffset == rhs.pushconstantOffset ) - && ( pushconstantSize == rhs.pushconstantSize ) - && ( indirectStateFlags == rhs.indirectStateFlags ) - && ( indexTypeCount == rhs.indexTypeCount ) - && ( pIndexTypes == rhs.pIndexTypes ) - && ( pIndexTypeValues == rhs.pIndexTypeValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && + ( stream == rhs.stream ) && ( offset == rhs.offset ) && ( vertexBindingUnit == rhs.vertexBindingUnit ) && + ( vertexDynamicStride == rhs.vertexDynamicStride ) && + ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && + ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) && + ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) && + ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && + ( pIndexTypes == rhs.pIndexTypes ) && ( pIndexTypeValues == rhs.pIndexTypeValues ); } - bool operator!=( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; - uint32_t stream = {}; - uint32_t offset = {}; - uint32_t vertexBindingUnit = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; - uint32_t pushconstantOffset = {}; - uint32_t pushconstantSize = {}; - VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; - uint32_t indexTypeCount = {}; - const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes = {}; - const uint32_t* pIndexTypeValues = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; + uint32_t stream = {}; + uint32_t offset = {}; + uint32_t vertexBindingUnit = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; + uint32_t pushconstantOffset = {}; + uint32_t pushconstantSize = {}; + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; + uint32_t indexTypeCount = {}; + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {}; + const uint32_t * pIndexTypeValues = {}; }; - static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -47076,15 +48354,18 @@ namespace VULKAN_HPP_NAMESPACE struct IndirectCommandsLayoutCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eIndirectCommandsLayoutCreateInfoNV; - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t tokenCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ = {}, - uint32_t streamCount_ = {}, - const uint32_t* pStreamStrides_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, + uint32_t streamCount_ = {}, + const uint32_t * pStreamStrides_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , pipelineBindPoint( pipelineBindPoint_ ) , tokenCount( tokenCount_ ) @@ -47093,36 +48374,55 @@ namespace VULKAN_HPP_NAMESPACE , pStreamStrides( pStreamStrides_ ) {} - IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( IndirectCommandsLayoutCreateInfoNV ) - offsetof( IndirectCommandsLayoutCreateInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} - IndirectCommandsLayoutCreateInfoNV& operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + tokens_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , tokenCount( static_cast( tokens_.size() ) ) + , pTokens( tokens_.data() ) + , streamCount( static_cast( streamStrides_.size() ) ) + , pStreamStrides( streamStrides_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutCreateInfoNV & + operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - IndirectCommandsLayoutCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; @@ -47134,72 +48434,87 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - IndirectCommandsLayoutCreateInfoNV & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & + setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT { pTokens = pTokens_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & setTokens( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + tokens_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = static_cast( tokens_.size() ); + pTokens = tokens_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT { streamCount = streamCount_; return *this; } - IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t* pStreamStrides_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT { pStreamStrides = pStreamStrides_; return *this; } - - operator VkIndirectCommandsLayoutCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & setStreamStrides( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + streamCount = static_cast( streamStrides_.size() ); + pStreamStrides = streamStrides_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( IndirectCommandsLayoutCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default; #else - bool operator==( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( tokenCount == rhs.tokenCount ) - && ( pTokens == rhs.pTokens ) - && ( streamCount == rhs.streamCount ) - && ( pStreamStrides == rhs.pStreamStrides ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( tokenCount == rhs.tokenCount ) && + ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && + ( pStreamStrides == rhs.pStreamStrides ); } - bool operator!=( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t tokenCount = {}; - const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens = {}; - uint32_t streamCount = {}; - const uint32_t* pStreamStrides = {}; - + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {}; + uint32_t streamCount = {}; + const uint32_t * pStreamStrides = {}; }; - static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -47207,494 +48522,4439 @@ namespace VULKAN_HPP_NAMESPACE using Type = IndirectCommandsLayoutCreateInfoNV; }; - struct InitializePerformanceApiInfoINTEL + struct PipelineCacheCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo; - VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT - : pUserData( pUserData_ ) - {} - - InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( InitializePerformanceApiInfoINTEL ) - offsetof( InitializePerformanceApiInfoINTEL, pNext ) ); - return *this; - } - - InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - InitializePerformanceApiInfoINTEL& operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - InitializePerformanceApiInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - InitializePerformanceApiInfoINTEL & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT - { - pUserData = pUserData_; - return *this; - } - - - operator VkInitializePerformanceApiInfoINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( InitializePerformanceApiInfoINTEL const& ) const = default; -#else - bool operator==( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pUserData == rhs.pUserData ); - } - - bool operator!=( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; - const void* pNext = {}; - void* pUserData = {}; - - }; - static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = InitializePerformanceApiInfoINTEL; - }; - - struct InputAttachmentAspectReference - { - - - VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {}, - uint32_t inputAttachmentIndex_ = {}, - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT - : subpass( subpass_ ) - , inputAttachmentIndex( inputAttachmentIndex_ ) - , aspectMask( aspectMask_ ) - {} - - InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT - { - subpass = subpass_; - return *this; - } - - InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT - { - inputAttachmentIndex = inputAttachmentIndex_; - return *this; - } - - InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT - { - aspectMask = aspectMask_; - return *this; - } - - - operator VkInputAttachmentAspectReference const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( InputAttachmentAspectReference const& ) const = default; -#else - bool operator==( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( subpass == rhs.subpass ) - && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) - && ( aspectMask == rhs.aspectMask ); - } - - bool operator!=( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint32_t subpass = {}; - uint32_t inputAttachmentIndex = {}; - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - - }; - static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct InstanceCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; - - VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, - const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = {}, - uint32_t enabledLayerCount_ = {}, - const char* const* ppEnabledLayerNames_ = {}, - uint32_t enabledExtensionCount_ = {}, - const char* const* ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) - , pApplicationInfo( pApplicationInfo_ ) - , enabledLayerCount( enabledLayerCount_ ) - , ppEnabledLayerNames( ppEnabledLayerNames_ ) - , enabledExtensionCount( enabledExtensionCount_ ) - , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) {} - InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCacheCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) + : flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & + operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( InstanceCreateInfo ) - offsetof( InstanceCreateInfo, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - InstanceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT + PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT { - pApplicationInfo = pApplicationInfo_; + initialDataSize = initialDataSize_; return *this; } - InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT { - enabledLayerCount = enabledLayerCount_; + pInitialData = pInitialData_; return *this; } - InstanceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo & + setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT { - ppEnabledLayerNames = ppEnabledLayerNames_; + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); return *this; } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - enabledExtensionCount = enabledExtensionCount_; - return *this; + return *reinterpret_cast( this ); } - InstanceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT { - ppEnabledExtensionNames = ppEnabledExtensionNames_; - return *this; + return *reinterpret_cast( this ); } - - operator VkInstanceCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( InstanceCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCacheCreateInfo const & ) const = default; #else - bool operator==( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pApplicationInfo == rhs.pApplicationInfo ) - && ( enabledLayerCount == rhs.enabledLayerCount ) - && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) - && ( enabledExtensionCount == rhs.enabledExtensionCount ) - && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData ); } - bool operator!=( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; - const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo = {}; - uint32_t enabledLayerCount = {}; - const char* const* ppEnabledLayerNames = {}; - uint32_t enabledExtensionCount = {}; - const char* const* ppEnabledExtensionNames = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; }; - static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = InstanceCreateInfo; + using Type = PipelineCacheCreateInfo; }; - struct LayerProperties + struct PushConstantRange { - - - VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array const& layerName_ = {}, - uint32_t specVersion_ = {}, - uint32_t implementationVersion_ = {}, - std::array const& description_ = {} ) VULKAN_HPP_NOEXCEPT - : layerName( layerName_ ) - , specVersion( specVersion_ ) - , implementationVersion( implementationVersion_ ) - , description( description_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + uint32_t offset_ = {}, + uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( size_ ) {} - LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; - LayerProperties& operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + : PushConstantRange( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & + operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkLayerProperties const&() const VULKAN_HPP_NOEXCEPT + PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stageFlags = stageFlags_; + return *this; } - operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT + PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + offset = offset_; + return *this; } + PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( LayerProperties const& ) const = default; + operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushConstantRange const & ) const = default; #else - bool operator==( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( layerName == rhs.layerName ) - && ( specVersion == rhs.specVersion ) - && ( implementationVersion == rhs.implementationVersion ) - && ( description == rhs.description ); + return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); } - bool operator!=( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; - uint32_t specVersion = {}; - uint32_t implementationVersion = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; }; - static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#ifdef VK_USE_PLATFORM_MACOS_MVK - struct MacOSSurfaceCreateInfoMVK + struct PipelineLayoutCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo; - VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, - const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) - , pView( pView_ ) + , setLayoutCount( setLayoutCount_ ) + , pSetLayouts( pSetLayouts_ ) + , pushConstantRangeCount( pushConstantRangeCount_ ) + , pPushConstantRanges( pPushConstantRanges_ ) {} - MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pushConstantRanges_ = {} ) + : flags( flags_ ) + , setLayoutCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & + operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MacOSSurfaceCreateInfoMVK ) - offsetof( MacOSSurfaceCreateInfoMVK, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MacOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - MacOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT + PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT { - pView = pView_; + setLayoutCount = setLayoutCount_; return *this; } - - operator VkMacOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT + PipelineLayoutCreateInfo & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pSetLayouts = pSetLayouts_; + return *this; } - operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & setSetLayouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; } + PipelineLayoutCreateInfo & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MacOSSurfaceCreateInfoMVK const& ) const = default; +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayoutCreateInfo const & ) const = default; #else - bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pView == rhs.pView ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ) && + ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ); } - bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; - const void* pView = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; }; - static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = MacOSSurfaceCreateInfoMVK; + using Type = PipelineLayoutCreateInfo; + }; + + struct PrivateDataSlotCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ = {} ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + PrivateDataSlotCreateInfoEXT( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PrivateDataSlotCreateInfoEXT( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PrivateDataSlotCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfoEXT & + operator=( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PrivateDataSlotCreateInfoEXT & operator=( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PrivateDataSlotCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PrivateDataSlotCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkPrivateDataSlotCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPrivateDataSlotCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PrivateDataSlotCreateInfoEXT const & ) const = default; +#else + bool operator==( PrivateDataSlotCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( PrivateDataSlotCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags = {}; + }; + static_assert( sizeof( PrivateDataSlotCreateInfoEXT ) == sizeof( VkPrivateDataSlotCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PrivateDataSlotCreateInfoEXT; + }; + + class PrivateDataSlotEXT + { + public: + using CType = VkPrivateDataSlotEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT() = default; + VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlotEXT( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT + : m_privateDataSlotEXT( privateDataSlotEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PrivateDataSlotEXT & operator=( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlotEXT = privateDataSlotEXT; + return *this; + } +#endif + + PrivateDataSlotEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlotEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PrivateDataSlotEXT const & ) const = default; +#else + bool operator==( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT == rhs.m_privateDataSlotEXT; + } + + bool operator!=( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT != rhs.m_privateDataSlotEXT; + } + + bool operator<( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT < rhs.m_privateDataSlotEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlotEXT() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlotEXT == VK_NULL_HANDLE; + } + + private: + VkPrivateDataSlotEXT m_privateDataSlotEXT = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT ) == sizeof( VkPrivateDataSlotEXT ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct QueryPoolCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, + uint32_t queryCount_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queryType( queryType_ ) + , queryCount( queryCount_ ) + , pipelineStatistics( pipelineStatistics_ ) + {} + + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & + operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT + { + queryType = queryType_; + return *this; + } + + QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT + { + queryCount = queryCount_; + return *this; + } + + QueryPoolCreateInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } + + operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolCreateInfo const & ) const = default; +#else + bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) && + ( pipelineStatistics == rhs.pipelineStatistics ); + } + + bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; + uint32_t queryCount = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = QueryPoolCreateInfo; + }; + + struct RayTracingShaderGroupCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRayTracingShaderGroupCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoKHR( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = {}, + uint32_t closestHitShader_ = {}, + uint32_t anyHitShader_ = {}, + uint32_t intersectionShader_ = {}, + const void * pShaderGroupCaptureReplayHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + , pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoKHR & + operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & + setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoKHR & + setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT + { + pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; + return *this; + } + + operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) && + ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) && + ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); + } + + bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + const void * pShaderGroupCaptureReplayHandle = {}; + }; + static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoKHR; + }; + + struct PipelineLibraryCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {} ) VULKAN_HPP_NOEXCEPT + : libraryCount( libraryCount_ ) + , pLibraries( pLibraries_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLibraryCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) + : libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & + operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = libraryCount_; + return *this; + } + + PipelineLibraryCreateInfoKHR & + setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT + { + pLibraries = pLibraries_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR & setLibraries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) + VULKAN_HPP_NOEXCEPT + { + libraryCount = static_cast( libraries_.size() ); + pLibraries = libraries_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && + ( pLibraries == rhs.pLibraries ); + } + + bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; + const void * pNext = {}; + uint32_t libraryCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {}; + }; + static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineLibraryCreateInfoKHR; + }; + + struct RayTracingPipelineInterfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_ = {}, + uint32_t maxPipelineRayHitAttributeSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ ) + , maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( + RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : RayTracingPipelineInterfaceCreateInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & + operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineInterfaceCreateInfoKHR & + operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & + setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_; + return *this; + } + + RayTracingPipelineInterfaceCreateInfoKHR & + setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_; + return *this; + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) && + ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize ); + } + + bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + const void * pNext = {}; + uint32_t maxPipelineRayPayloadSize = {}; + uint32_t maxPipelineRayHitAttributeSize = {}; + }; + static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) == + sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineInterfaceCreateInfoKHR; + }; + + struct RayTracingPipelineCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) + , pLibraryInfo( pLibraryInfo_ ) + , pLibraryInterface( pLibraryInterface_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) + : flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) + , pLibraryInfo( pLibraryInfo_ ) + , pLibraryInterface( pLibraryInterface_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & + setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoKHR & + setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & + setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInfo = pLibraryInfo_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPLibraryInterface( + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInterface = pLibraryInterface_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setPDynamicState( + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) && + ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) && + ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {}; + uint32_t maxPipelineRayRecursionDepth = {}; + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoKHR; + }; + + struct RayTracingShaderGroupCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRayTracingShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = {}, + uint32_t closestHitShader_ = {}, + uint32_t anyHitShader_ = {}, + uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + {} + + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & + operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & + setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) && + ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ); + } + + bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + }; + static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoNV; + }; + + struct RayTracingPipelineCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) + : flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + RayTracingPipelineCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + RayTracingPipelineCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV & + setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + RayTracingPipelineCreateInfoNV & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoNV; + }; + + struct SubpassDescription + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescription( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SubpassDescription & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription & setPResolveAttachments( + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription & setPDepthStencilAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setPreserveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription const & ) const = default; +#else + bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassDependency + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SubpassDependency( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDependency( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & + operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency const & ) const = default; +#else + bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && + ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( dependencyFlags == rhs.dependencyFlags ); + } + + bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + }; + static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RenderPassCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo( + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ = {} ) + : flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & + operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo & + setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setSubpasses( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) + VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo & + setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setDependencies( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo const & ) const = default; +#else + bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ); + } + + bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {}; + }; + static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassCreateInfo; + }; + + struct SubpassDescription2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription2( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t viewMask_ = {}, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescription2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SubpassDescription2 & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription2 & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription2 & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription2 & setPResolveAttachments( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription2 & setPDepthStencilAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setPreserveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription2 const & ) const = default; +#else + bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( viewMask == rhs.viewMask ) && + ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t viewMask = {}; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassDescription2; + }; + using SubpassDescription2KHR = SubpassDescription2; + + struct SubpassDependency2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + , viewOffset( viewOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDependency2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & + operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency2 & + setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT + { + viewOffset = viewOffset_; + return *this; + } + + operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency2 const & ) const = default; +#else + bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && + ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && + ( viewOffset == rhs.viewOffset ); + } + + bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; + const void * pNext = {}; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + int32_t viewOffset = {}; + }; + static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubpassDependency2; + }; + using SubpassDependency2KHR = SubpassDependency2; + + struct RenderPassCreateInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, + uint32_t correlatedViewMaskCount_ = {}, + const uint32_t * pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + , correlatedViewMaskCount( correlatedViewMaskCount_ ) + , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2( + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ = {} ) + : flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + , correlatedViewMaskCount( static_cast( correlatedViewMasks_.size() ) ) + , pCorrelatedViewMasks( correlatedViewMasks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & + operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo2 & + setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo2 & + setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setSubpasses( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + subpasses_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo2 & + setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setDependencies( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = correlatedViewMaskCount_; + return *this; + } + + RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelatedViewMasks = pCorrelatedViewMasks_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setCorrelatedViewMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = static_cast( correlatedViewMasks_.size() ); + pCorrelatedViewMasks = correlatedViewMasks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo2 const & ) const = default; +#else + bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) && + ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && + ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); + } + + bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {}; + uint32_t correlatedViewMaskCount = {}; + const uint32_t * pCorrelatedViewMasks = {}; + }; + static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = RenderPassCreateInfo2; + }; + using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; + + struct SamplerCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + float mipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, + float maxAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + float minLod_ = {}, + float maxLod_ = {}, + VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , magFilter( magFilter_ ) + , minFilter( minFilter_ ) + , mipmapMode( mipmapMode_ ) + , addressModeU( addressModeU_ ) + , addressModeV( addressModeV_ ) + , addressModeW( addressModeW_ ) + , mipLodBias( mipLodBias_ ) + , anisotropyEnable( anisotropyEnable_ ) + , maxAnisotropy( maxAnisotropy_ ) + , compareEnable( compareEnable_ ) + , compareOp( compareOp_ ) + , minLod( minLod_ ) + , maxLod( maxLod_ ) + , borderColor( borderColor_ ) + , unnormalizedCoordinates( unnormalizedCoordinates_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT + { + magFilter = magFilter_; + return *this; + } + + SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT + { + minFilter = minFilter_; + return *this; + } + + SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT + { + mipmapMode = mipmapMode_; + return *this; + } + + SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT + { + addressModeU = addressModeU_; + return *this; + } + + SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT + { + addressModeV = addressModeV_; + return *this; + } + + SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT + { + addressModeW = addressModeW_; + return *this; + } + + SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + mipLodBias = mipLodBias_; + return *this; + } + + SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT + { + anisotropyEnable = anisotropyEnable_; + return *this; + } + + SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + maxAnisotropy = maxAnisotropy_; + return *this; + } + + SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT + { + compareEnable = compareEnable_; + return *this; + } + + SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } + + SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT + { + maxLod = maxLod_; + return *this; + } + + SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT + { + borderColor = borderColor_; + return *this; + } + + SamplerCreateInfo & + setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return *this; + } + + operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCreateInfo const & ) const = default; +#else + bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && ( mipmapMode == rhs.mipmapMode ) && + ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && + ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && + ( anisotropyEnable == rhs.anisotropyEnable ) && ( maxAnisotropy == rhs.maxAnisotropy ) && + ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && + ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && + ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); + } + + bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + float mipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; + float maxAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + float minLod = {}; + float maxLod = {}; + VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; + }; + static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerCreateInfo; + }; + + struct SamplerYcbcrConversionCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , ycbcrModel( ycbcrModel_ ) + , ycbcrRange( ycbcrRange_ ) + , components( components_ ) + , xChromaOffset( xChromaOffset_ ) + , yChromaOffset( yChromaOffset_ ) + , chromaFilter( chromaFilter_ ) + , forceExplicitReconstruction( forceExplicitReconstruction_ ) + {} + + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrModel = ycbcrModel_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrRange = ycbcrRange_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + xChromaOffset = xChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + yChromaOffset = yChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT + { + chromaFilter = chromaFilter_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & + setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT + { + forceExplicitReconstruction = forceExplicitReconstruction_; + return *this; + } + + operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && + ( ycbcrModel == rhs.ycbcrModel ) && ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && + ( xChromaOffset == rhs.xChromaOffset ) && ( yChromaOffset == rhs.yChromaOffset ) && + ( chromaFilter == rhs.chromaFilter ) && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); + } + + bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; + }; + static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionCreateInfo; + }; + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + + public: + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() = default; + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( samplerYcbcrConversion ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SamplerYcbcrConversion & operator=( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = samplerYcbcrConversion; + return *this; + } +#endif + + SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversion const & ) const = default; +#else + bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; + } + + bool operator!=( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; + } + + bool operator<( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == VK_NULL_HANDLE; + } + + private: + VkSamplerYcbcrConversion m_samplerYcbcrConversion = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; + + struct SemaphoreCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & + operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreCreateInfo const & ) const = default; +#else + bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; + }; + static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreCreateInfo; + }; + + struct ShaderModuleCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, + size_t codeSize_ = {}, + const uint32_t * pCode_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , codeSize( codeSize_ ) + , pCode( pCode_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) + : flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & + operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = codeSize_; + return *this; + } + + ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT + { + pCode = pCode_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleCreateInfo & + setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = code_.size() * 4; + pCode = code_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleCreateInfo const & ) const = default; +#else + bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ); + } + + bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; + size_t codeSize = {}; + const uint32_t * pCode = {}; + }; + static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ShaderModuleCreateInfo; + }; + + class SurfaceKHR + { + public: + using CType = VkSurfaceKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + public: + VULKAN_HPP_CONSTEXPR SurfaceKHR() = default; + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SurfaceKHR & operator=( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = surfaceKHR; + return *this; + } +#endif + + SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceKHR const & ) const = default; +#else + bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == rhs.m_surfaceKHR; + } + + bool operator!=( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != rhs.m_surfaceKHR; + } + + bool operator<( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR < rhs.m_surfaceKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == VK_NULL_HANDLE; + } + + private: + VkSurfaceKHR m_surfaceKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct SwapchainCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, + uint32_t minImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + uint32_t imageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + {} + + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, + uint32_t minImageCount_, + VULKAN_HPP_NAMESPACE::Format imageFormat_, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, + uint32_t imageArrayLayers_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) + : flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT + { + minImageCount = minImageCount_; + return *this; + } + + SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + { + imageFormat = imageFormat_; + return *this; + } + + SwapchainCreateInfoKHR & + setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT + { + imageColorSpace = imageColorSpace_; + return *this; + } + + SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + imageArrayLayers = imageArrayLayers_; + return *this; + } + + SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT + { + imageUsage = imageUsage_; + return *this; + } + + SwapchainCreateInfoKHR & + setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT + { + imageSharingMode = imageSharingMode_; + return *this; + } + + SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainCreateInfoKHR & + setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT + { + preTransform = preTransform_; + return *this; + } + + SwapchainCreateInfoKHR & + setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT + { + compositeAlpha = compositeAlpha_; + return *this; + } + + SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT + { + presentMode = presentMode_; + return *this; + } + + SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT + { + clipped = clipped_; + return *this; + } + + SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT + { + oldSwapchain = oldSwapchain_; + return *this; + } + + operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) && + ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && + ( imageColorSpace == rhs.imageColorSpace ) && ( imageExtent == rhs.imageExtent ) && + ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) && + ( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && + ( compositeAlpha == rhs.compositeAlpha ) && ( presentMode == rhs.presentMode ) && + ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain ); + } + + bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + uint32_t minImageCount = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + uint32_t imageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; + }; + static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SwapchainCreateInfoKHR; + }; + + struct ValidationCacheCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + {} + + VULKAN_HPP_CONSTEXPR + ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) + : flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & + operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ValidationCacheCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT & + setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default; +#else + bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; + }; + static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ValidationCacheCreateInfoEXT; + }; + + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + public: + VULKAN_HPP_CONSTEXPR ValidationCacheEXT() = default; + VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + : m_validationCacheEXT( validationCacheEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ValidationCacheEXT & operator=( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = validationCacheEXT; + return *this; + } +#endif + + ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheEXT const & ) const = default; +#else + bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == rhs.m_validationCacheEXT; + } + + bool operator!=( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != rhs.m_validationCacheEXT; + } + + bool operator<( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT < rhs.m_validationCacheEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == VK_NULL_HANDLE; + } + + private: + VkValidationCacheEXT m_validationCacheEXT = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoProfileKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoProfileKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid, + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {} ) VULKAN_HPP_NOEXCEPT + : videoCodecOperation( videoCodecOperation_ ) + , chromaSubsampling( chromaSubsampling_ ) + , lumaBitDepth( lumaBitDepth_ ) + , chromaBitDepth( chromaBitDepth_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoProfileKHR( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfileKHR( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoProfileKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & operator=( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfileKHR & operator=( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoProfileKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoProfileKHR & setVideoCodecOperation( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT + { + videoCodecOperation = videoCodecOperation_; + return *this; + } + + VideoProfileKHR & setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) + VULKAN_HPP_NOEXCEPT + { + chromaSubsampling = chromaSubsampling_; + return *this; + } + + VideoProfileKHR & + setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT + { + lumaBitDepth = lumaBitDepth_; + return *this; + } + + VideoProfileKHR & + setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT + { + chromaBitDepth = chromaBitDepth_; + return *this; + } + + operator VkVideoProfileKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoProfileKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoProfileKHR const & ) const = default; +# else + bool operator==( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) && + ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) && + ( chromaBitDepth == rhs.chromaBitDepth ); + } + + bool operator!=( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid; + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {}; + }; + static_assert( sizeof( VideoProfileKHR ) == sizeof( VkVideoProfileKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoProfileKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoSessionCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionCreateInfoKHR( + uint32_t queueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ = {}, + VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t maxReferencePicturesSlotsCount_ = {}, + uint32_t maxReferencePicturesActiveCount_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyIndex( queueFamilyIndex_ ) + , flags( flags_ ) + , pVideoProfile( pVideoProfile_ ) + , pictureFormat( pictureFormat_ ) + , maxCodedExtent( maxCodedExtent_ ) + , referencePicturesFormat( referencePicturesFormat_ ) + , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ ) + , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoSessionCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VideoSessionCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoSessionCreateInfoKHR & + setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT + { + pVideoProfile = pVideoProfile_; + return *this; + } + + VideoSessionCreateInfoKHR & setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT + { + pictureFormat = pictureFormat_; + return *this; + } + + VideoSessionCreateInfoKHR & + setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT + { + maxCodedExtent = maxCodedExtent_; + return *this; + } + + VideoSessionCreateInfoKHR & + setReferencePicturesFormat( VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ ) VULKAN_HPP_NOEXCEPT + { + referencePicturesFormat = referencePicturesFormat_; + return *this; + } + + VideoSessionCreateInfoKHR & + setMaxReferencePicturesSlotsCount( uint32_t maxReferencePicturesSlotsCount_ ) VULKAN_HPP_NOEXCEPT + { + maxReferencePicturesSlotsCount = maxReferencePicturesSlotsCount_; + return *this; + } + + VideoSessionCreateInfoKHR & + setMaxReferencePicturesActiveCount( uint32_t maxReferencePicturesActiveCount_ ) VULKAN_HPP_NOEXCEPT + { + maxReferencePicturesActiveCount = maxReferencePicturesActiveCount_; + return *this; + } + + operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default; +# else + bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( flags == rhs.flags ) && ( pVideoProfile == rhs.pVideoProfile ) && + ( pictureFormat == rhs.pictureFormat ) && ( maxCodedExtent == rhs.maxCodedExtent ) && + ( referencePicturesFormat == rhs.referencePicturesFormat ) && + ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) && + ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount ); + } + + bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile = {}; + VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; + VULKAN_HPP_NAMESPACE::Format referencePicturesFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t maxReferencePicturesSlotsCount = {}; + uint32_t maxReferencePicturesActiveCount = {}; + }; + static_assert( sizeof( VideoSessionCreateInfoKHR ) == sizeof( VkVideoSessionCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoSessionCreateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoSessionParametersCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoSessionParametersCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {} ) VULKAN_HPP_NOEXCEPT + : videoSessionParametersTemplate( videoSessionParametersTemplate_ ) + , videoSession( videoSession_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & + operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersCreateInfoKHR & + operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoSessionParametersCreateInfoKHR & setVideoSessionParametersTemplate( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParametersTemplate = videoSessionParametersTemplate_; + return *this; + } + + VideoSessionParametersCreateInfoKHR & + setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT + { + videoSession = videoSession_; + return *this; + } + + operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default; +# else + bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate ) && + ( videoSession == rhs.videoSession ); + } + + bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + }; + static_assert( sizeof( VideoSessionParametersCreateInfoKHR ) == sizeof( VkVideoSessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoSessionParametersCreateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct DisplayPowerInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT + : powerState( powerState_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPowerInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & + operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT + { + powerState = powerState_; + return *this; + } + + operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPowerInfoEXT const & ) const = default; +#else + bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState ); + } + + bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; + }; + static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPowerInfoEXT; }; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ struct MappedMemoryRange { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : memory( memory_ ) , offset( offset_ ) , size( size_ ) {} - MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MappedMemoryRange ) - offsetof( MappedMemoryRange, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : MappedMemoryRange( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & + operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MappedMemoryRange & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -47718,47 +52978,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkMappedMemoryRange const&() const VULKAN_HPP_NOEXCEPT + operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MappedMemoryRange const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MappedMemoryRange const & ) const = default; #else - bool operator==( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && + ( size == rhs.size ); } - bool operator!=( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; - static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" ); + static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -47767,601 +53020,2466 @@ namespace VULKAN_HPP_NAMESPACE using Type = MappedMemoryRange; }; - struct MemoryAllocateFlagsInfo + struct MemoryRequirements { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; - - VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, - uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , deviceMask( deviceMask_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : size( size_ ) + , alignment( alignment_ ) + , memoryTypeBits( memoryTypeBits_ ) {} - MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryRequirements & + operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryAllocateFlagsInfo ) - offsetof( MemoryAllocateFlagsInfo, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements const & ) const = default; +#else + bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryRequirements2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryRequirements2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryRequirements2 & + operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryAllocateFlagsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements2 const & ) const = default; +#else + bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + }; + static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryRequirements2; + }; + using MemoryRequirements2KHR = MemoryRequirements2; + + struct DeviceGroupPresentCapabilitiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceGroupPresentCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( + std::array const & presentMask_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT + : presentMask( presentMask_ ) + , modes( modes_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR & + operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default; +#else + bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && + ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupPresentCapabilitiesKHR; + }; + + struct PhysicalDeviceSurfaceInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT + : surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & + operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + surface = surface_; return *this; } - MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT { - deviceMask = deviceMask_; - return *this; + return *reinterpret_cast( this ); } - - operator VkMemoryAllocateFlagsInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryAllocateFlagsInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default; #else - bool operator==( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( deviceMask == rhs.deviceMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface ); } - bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; - uint32_t deviceMask = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; }; - static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = MemoryAllocateFlagsInfo; + using Type = PhysicalDeviceSurfaceInfo2KHR; }; - struct MemoryAllocateInfo + struct DeviceMemoryOpaqueCaptureAddressInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; - VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, - uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : allocationSize( allocationSize_ ) - , memoryTypeIndex( memoryTypeIndex_ ) - {} - - MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryAllocateInfo ) - offsetof( MemoryAllocateInfo, pNext ) ); - return *this; - } - - MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT - { - allocationSize = allocationSize_; - return *this; - } - - MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT - { - memoryTypeIndex = memoryTypeIndex_; - return *this; - } - - - operator VkMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryAllocateInfo const& ) const = default; -#else - bool operator==( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( allocationSize == rhs.allocationSize ) - && ( memoryTypeIndex == rhs.memoryTypeIndex ); - } - - bool operator!=( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; - uint32_t memoryTypeIndex = {}; - - }; - static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = MemoryAllocateInfo; - }; - - struct MemoryBarrier - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; - - VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT - : srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - {} - - MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryBarrier ) - offsetof( MemoryBarrier, pNext ) ); - return *this; - } - - MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - srcAccessMask = srcAccessMask_; - return *this; - } - - MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - dstAccessMask = dstAccessMask_; - return *this; - } - - - operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryBarrier const& ) const = default; -#else - bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ); - } - - bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - - }; - static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = MemoryBarrier; - }; - - struct MemoryDedicatedAllocateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; - - VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - , buffer( buffer_ ) - {} - - MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryDedicatedAllocateInfo ) - offsetof( MemoryDedicatedAllocateInfo, pNext ) ); - return *this; - } - - MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryDedicatedAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT - { - image = image_; - return *this; - } - - MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - - operator VkMemoryDedicatedAllocateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryDedicatedAllocateInfo const& ) const = default; -#else - bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ) - && ( buffer == rhs.buffer ); - } - - bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - - }; - static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = MemoryDedicatedAllocateInfo; - }; - - struct MemoryDedicatedRequirements - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; - - VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT - : prefersDedicatedAllocation( prefersDedicatedAllocation_ ) - , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) - {} - - MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryDedicatedRequirements ) - offsetof( MemoryDedicatedRequirements, pNext ) ); - return *this; - } - - MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryDedicatedRequirements& operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkMemoryDedicatedRequirements const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryDedicatedRequirements const& ) const = default; -#else - bool operator==( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) - && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); - } - - bool operator!=( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; - VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; - - }; - static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = MemoryDedicatedRequirements; - }; - - struct MemoryFdPropertiesKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; - - VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryTypeBits( memoryTypeBits_ ) - {} - - MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryFdPropertiesKHR ) - offsetof( MemoryFdPropertiesKHR, pNext ) ); - return *this; - } - - MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryFdPropertiesKHR& operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkMemoryFdPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryFdPropertiesKHR const& ) const = default; -#else - bool operator==( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; - void* pNext = {}; - uint32_t memoryTypeBits = {}; - - }; - static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = MemoryFdPropertiesKHR; - }; - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct MemoryGetAndroidHardwareBufferInfoANDROID - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; - - VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT : memory( memory_ ) {} - MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & + operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOpaqueCaptureAddressInfo & + operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) - offsetof( MemoryGetAndroidHardwareBufferInfoANDROID, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } - - operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT + operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default; #else - bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); } - bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = DeviceMemoryOpaqueCaptureAddressInfo; + }; + using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; + + struct PresentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, + const uint32_t * pImageIndices_ = {}, + VULKAN_HPP_NAMESPACE::Result * pResults_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , swapchainCount( swapchainCount_ ) + , pSwapchains( pSwapchains_ ) + , pImageIndices( pImageIndices_ ) + , pResults( pResults_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , swapchainCount( static_cast( swapchains_.size() ) ) + , pSwapchains( swapchains_.data() ) + , pImageIndices( imageIndices_.data() ) + , pResults( results_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() ); + VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) ); + VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) ); +# else + if ( swapchains_.size() != imageIndices_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" ); + } + if ( !results_.empty() && ( swapchains_.size() != results_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" ); + } + if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setWaitSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT + { + pSwapchains = pSwapchains_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setSwapchains( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ ) + VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( swapchains_.size() ); + pSwapchains = swapchains_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT + { + pImageIndices = pImageIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setImageIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( imageIndices_.size() ); + pImageIndices = imageIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT + { + pResults = pResults_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setResults( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( results_.size() ); + pResults = results_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentInfoKHR const & ) const = default; +#else + bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && + ( pSwapchains == rhs.pSwapchains ) && ( pImageIndices == rhs.pImageIndices ) && + ( pResults == rhs.pResults ); + } + + bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {}; + const uint32_t * pImageIndices = {}; + VULKAN_HPP_NAMESPACE::Result * pResults = {}; }; - static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PresentInfoKHR; + }; + + struct SubmitInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, + uint32_t commandBufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , pWaitDstStageMask( pWaitDstStageMask_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBuffers( pCommandBuffers_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + {} + + VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SubmitInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitDstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + commandBuffers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphores_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , pWaitDstStageMask( waitDstStageMask_.data() ) + , commandBufferCount( static_cast( commandBuffers_.size() ) ) + , pCommandBuffers( commandBuffers_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() ); +# else + if ( waitSemaphores_.size() != waitDstStageMask_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setWaitSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo & + setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + pWaitDstStageMask = pWaitDstStageMask_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setWaitDstStageMask( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitDstStageMask_.size() ); + pWaitDstStageMask = waitDstStageMask_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBuffers = pCommandBuffers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setCommandBuffers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ ) + VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBuffers_.size() ); + pCommandBuffers = commandBuffers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setSignalSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo const & ) const = default; +#else + bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && + ( commandBufferCount == rhs.commandBufferCount ) && ( pCommandBuffers == rhs.pCommandBuffers ) && + ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && ( pSignalSemaphores == rhs.pSignalSemaphores ); + } + + bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {}; + uint32_t commandBufferCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; + }; + static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubmitInfo; + }; + + struct SemaphoreSubmitInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + uint64_t value_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask_ = {}, + uint32_t deviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , value( value_ ) + , stageMask( stageMask_ ) + , deviceIndex( deviceIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfoKHR( SemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSubmitInfoKHR( VkSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreSubmitInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfoKHR & + operator=( SemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSubmitInfoKHR & operator=( VkSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreSubmitInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreSubmitInfoKHR & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } + + SemaphoreSubmitInfoKHR & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask_ ) VULKAN_HPP_NOEXCEPT + { + stageMask = stageMask_; + return *this; + } + + SemaphoreSubmitInfoKHR & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndex = deviceIndex_; + return *this; + } + + operator VkSemaphoreSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSubmitInfoKHR const & ) const = default; +#else + bool operator==( SemaphoreSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( value == rhs.value ) && ( stageMask == rhs.stageMask ) && ( deviceIndex == rhs.deviceIndex ); + } + + bool operator!=( SemaphoreSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask = {}; + uint32_t deviceIndex = {}; + }; + static_assert( sizeof( SemaphoreSubmitInfoKHR ) == sizeof( VkSemaphoreSubmitInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SemaphoreSubmitInfoKHR; + }; + + struct SubmitInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubmitInfo2KHR( + VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_ = {}, + uint32_t waitSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pWaitSemaphoreInfos_ = {}, + uint32_t commandBufferInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR * pCommandBufferInfos_ = {}, + uint32_t signalSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pSignalSemaphoreInfos_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , waitSemaphoreInfoCount( waitSemaphoreInfoCount_ ) + , pWaitSemaphoreInfos( pWaitSemaphoreInfos_ ) + , commandBufferInfoCount( commandBufferInfoCount_ ) + , pCommandBufferInfos( pCommandBufferInfos_ ) + , signalSemaphoreInfoCount( signalSemaphoreInfoCount_ ) + , pSignalSemaphoreInfos( pSignalSemaphoreInfos_ ) + {} + + VULKAN_HPP_CONSTEXPR SubmitInfo2KHR( SubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo2KHR( VkSubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SubmitInfo2KHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2KHR( + VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitSemaphoreInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + commandBufferInfos_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphoreInfos_ = {} ) + : flags( flags_ ) + , waitSemaphoreInfoCount( static_cast( waitSemaphoreInfos_.size() ) ) + , pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ) + , commandBufferInfoCount( static_cast( commandBufferInfos_.size() ) ) + , pCommandBufferInfos( commandBufferInfos_.data() ) + , signalSemaphoreInfoCount( static_cast( signalSemaphoreInfos_.size() ) ) + , pSignalSemaphoreInfos( signalSemaphoreInfos_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2KHR & operator=( SubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo2KHR & operator=( VkSubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SubmitInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubmitInfo2KHR & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SubmitInfo2KHR & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreInfoCount = waitSemaphoreInfoCount_; + return *this; + } + + SubmitInfo2KHR & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pWaitSemaphoreInfos_ ) + VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreInfos = pWaitSemaphoreInfos_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2KHR & setWaitSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreInfoCount = static_cast( waitSemaphoreInfos_.size() ); + pWaitSemaphoreInfos = waitSemaphoreInfos_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo2KHR & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = commandBufferInfoCount_; + return *this; + } + + SubmitInfo2KHR & setPCommandBufferInfos( + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferInfos = pCommandBufferInfos_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2KHR & setCommandBufferInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = static_cast( commandBufferInfos_.size() ); + pCommandBufferInfos = commandBufferInfos_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo2KHR & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreInfoCount = signalSemaphoreInfoCount_; + return *this; + } + + SubmitInfo2KHR & setPSignalSemaphoreInfos( + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreInfos = pSignalSemaphoreInfos_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2KHR & setSignalSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreInfoCount = static_cast( signalSemaphoreInfos_.size() ); + pSignalSemaphoreInfos = signalSemaphoreInfos_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSubmitInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo2KHR const & ) const = default; +#else + bool operator==( SubmitInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) && + ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) && + ( commandBufferInfoCount == rhs.commandBufferInfoCount ) && + ( pCommandBufferInfos == rhs.pCommandBufferInfos ) && + ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) && + ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos ); + } + + bool operator!=( SubmitInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags = {}; + uint32_t waitSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pWaitSemaphoreInfos = {}; + uint32_t commandBufferInfoCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR * pCommandBufferInfos = {}; + uint32_t signalSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pSignalSemaphoreInfos = {}; + }; + static_assert( sizeof( SubmitInfo2KHR ) == sizeof( VkSubmitInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SubmitInfo2KHR; + }; + + class Queue + { + public: + using CType = VkQueue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + + public: + VULKAN_HPP_CONSTEXPR Queue() = default; + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Queue & operator=( VkQueue queue ) VULKAN_HPP_NOEXCEPT + { + m_queue = queue; + return *this; + } +#endif + + Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queue = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Queue const & ) const = default; +#else + bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue == rhs.m_queue; + } + + bool operator!=( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue != rhs.m_queue; + } + + bool operator<( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue < rhs.m_queue; + } +#endif + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD Result + submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindSparse( ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD Result + presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result presentKHR( const PresentInfoKHR & presentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_debug_utils === + + template + void + beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_device_diagnostic_checkpoints === + + template + void getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CheckpointDataNVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_INTEL_performance_query === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_synchronization2 === + + template + VULKAN_HPP_NODISCARD Result + submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit2KHR( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CheckpointData2NVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queue != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queue == VK_NULL_HANDLE; + } + + private: + VkQueue m_queue = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DeviceQueueInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueIndex( queueIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueIndex = queueIndex_; + return *this; + } + + operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueInfo2 const & ) const = default; +#else + bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueIndex == rhs.queueIndex ); + } + + bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueIndex = {}; + }; + static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceQueueInfo2; + }; + + struct FenceGetFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & + operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + FenceGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetFdInfoKHR const & ) const = default; +#else + bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FenceGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct FenceGetWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & + operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + FenceGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FenceGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct GeneratedCommandsMemoryRequirementsInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , maxSequencesCount( maxSequencesCount_ ) + {} + + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( + GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : GeneratedCommandsMemoryRequirementsInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoNV & + operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & + setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequencesCount = maxSequencesCount_; + return *this; + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default; +#else + bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && + ( maxSequencesCount == rhs.maxSequencesCount ); + } + + bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t maxSequencesCount = {}; + }; + static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) == + sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GeneratedCommandsMemoryRequirementsInfoNV; + }; + + struct ImageDrmFormatModifierPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageDrmFormatModifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierPropertiesEXT & + operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierPropertiesEXT & + operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ); + } + + bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; + void * pNext = {}; + uint64_t drmFormatModifier = {}; + }; + static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierPropertiesEXT; + }; + + struct ImageMemoryRequirementsInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & + operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); + } + + bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageMemoryRequirementsInfo2; + }; + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + + struct SparseImageFormatProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , imageGranularity( imageGranularity_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageFormatProperties & + operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties const & ) const = default; +#else + bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags ); + } + + bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + }; + static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct SparseImageMemoryRequirements + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, + uint32_t imageMipTailFirstLod_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ) + , imageMipTailFirstLod( imageMipTailFirstLod_ ) + , imageMipTailSize( imageMipTailSize_ ) + , imageMipTailOffset( imageMipTailOffset_ ) + , imageMipTailStride( imageMipTailStride_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryRequirements & + operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) && + ( imageMipTailSize == rhs.imageMipTailSize ) && ( imageMipTailOffset == rhs.imageMipTailOffset ) && + ( imageMipTailStride == rhs.imageMipTailStride ); + } + + bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; + uint32_t imageMipTailFirstLod = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; + }; + static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct ImageSparseMemoryRequirementsInfo2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageSparseMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & + operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSparseMemoryRequirementsInfo2 & + operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); + } + + bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageSparseMemoryRequirementsInfo2; + }; + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + + struct SparseImageMemoryRequirements2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryRequirements2 & + operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; + }; + static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SparseImageMemoryRequirements2; + }; + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + + struct SubresourceLayout + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , size( size_ ) + , rowPitch( rowPitch_ ) + , arrayPitch( arrayPitch_ ) + , depthPitch( depthPitch_ ) + {} + + VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : SubresourceLayout( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & + operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceLayout const & ) const = default; +#else + bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && + ( arrayPitch == rhs.arrayPitch ) && ( depthPitch == rhs.depthPitch ); + } + + bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; + }; + static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageViewAddressPropertiesNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewAddressPropertiesNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageViewAddressPropertiesNVX & + operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default; +#else + bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && + ( size == rhs.size ); + } + + bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewAddressPropertiesNVX; + }; + + struct ImageViewHandleInfoNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT + : imageView( imageView_ ) + , descriptorType( descriptorType_ ) + , sampler( sampler_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewHandleInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & + operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + ImageViewHandleInfoNVX & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } + + operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewHandleInfoNVX const & ) const = default; +#else + bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && + ( descriptorType == rhs.descriptorType ) && ( sampler == rhs.sampler ); + } + + bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + }; + static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewHandleInfoNVX; + }; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct MemoryGetAndroidHardwareBufferInfoANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( + MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT + : MemoryGetAndroidHardwareBufferInfoANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & + operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetAndroidHardwareBufferInfoANDROID & + operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryGetAndroidHardwareBufferInfoANDROID & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); + } + + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == + sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -48372,33 +55490,35 @@ namespace VULKAN_HPP_NAMESPACE struct MemoryGetFdInfoKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR; - VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : memory( memory_ ) , handleType( handleType_ ) {} - MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryGetFdInfoKHR ) - offsetof( MemoryGetFdInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : MemoryGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & + operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -48410,51 +55530,47 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + MemoryGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - - operator VkMemoryGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryGetFdInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetFdInfoKHR const & ) const = default; #else - bool operator==( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); } - bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -48463,36 +55579,171 @@ namespace VULKAN_HPP_NAMESPACE using Type = MemoryGetFdInfoKHR; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryFdPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryFdPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryFdPropertiesKHR & + operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryFdPropertiesKHR const & ) const = default; +#else + bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryFdPropertiesKHR; + }; + + struct MemoryHostPointerPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryHostPointerPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryHostPointerPropertiesEXT & + operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default; +#else + bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryHostPointerPropertiesEXT; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) struct MemoryGetWin32HandleInfoKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR; - VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : memory( memory_ ) , handleType( handleType_ ) {} - MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryGetWin32HandleInfoKHR ) - offsetof( MemoryGetWin32HandleInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : MemoryGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & + operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -48504,52 +55755,49 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + MemoryGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - - operator VkMemoryGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryGetWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); } - bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -48558,557 +55806,67 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct MemoryHeap - { - - - VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : size( size_ ) - , flags( flags_ ) - {} - - MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryHeap& operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkMemoryHeap const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryHeap const& ) const = default; -#else - bool operator==( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( size == rhs.size ) - && ( flags == rhs.flags ); - } - - bool operator!=( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; - - }; - static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct MemoryHostPointerPropertiesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; - - VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryTypeBits( memoryTypeBits_ ) - {} - - MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryHostPointerPropertiesEXT ) - offsetof( MemoryHostPointerPropertiesEXT, pNext ) ); - return *this; - } - - MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkMemoryHostPointerPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryHostPointerPropertiesEXT const& ) const = default; -#else - bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; - void* pNext = {}; - uint32_t memoryTypeBits = {}; - - }; - static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = MemoryHostPointerPropertiesEXT; - }; - - struct MemoryOpaqueCaptureAddressAllocateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; - - VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT - : opaqueCaptureAddress( opaqueCaptureAddress_ ) - {} - - MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) - offsetof( MemoryOpaqueCaptureAddressAllocateInfo, pNext ) ); - return *this; - } - - MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryOpaqueCaptureAddressAllocateInfo& operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT - { - opaqueCaptureAddress = opaqueCaptureAddress_; - return *this; - } - - - operator VkMemoryOpaqueCaptureAddressAllocateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const& ) const = default; -#else - bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); - } - - bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; - const void* pNext = {}; - uint64_t opaqueCaptureAddress = {}; - - }; - static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = MemoryOpaqueCaptureAddressAllocateInfo; - }; - - struct MemoryPriorityAllocateInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; - - VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {} ) VULKAN_HPP_NOEXCEPT - : priority( priority_ ) - {} - - MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryPriorityAllocateInfoEXT ) - offsetof( MemoryPriorityAllocateInfoEXT, pNext ) ); - return *this; - } - - MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryPriorityAllocateInfoEXT& operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MemoryPriorityAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT - { - priority = priority_; - return *this; - } - - - operator VkMemoryPriorityAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryPriorityAllocateInfoEXT const& ) const = default; -#else - bool operator==( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( priority == rhs.priority ); - } - - bool operator!=( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; - const void* pNext = {}; - float priority = {}; - - }; - static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = MemoryPriorityAllocateInfoEXT; - }; - - struct MemoryRequirements - { - - - VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, - uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : size( size_ ) - , alignment( alignment_ ) - , memoryTypeBits( memoryTypeBits_ ) - {} - - MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryRequirements& operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryRequirements const& ) const = default; -#else - bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( size == rhs.size ) - && ( alignment == rhs.alignment ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; - uint32_t memoryTypeBits = {}; - - }; - static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct MemoryRequirements2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; - - VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryRequirements( memoryRequirements_ ) - {} - - MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryRequirements2 ) - offsetof( MemoryRequirements2, pNext ) ); - return *this; - } - - MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryRequirements2 const& ) const = default; -#else - bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryRequirements == rhs.memoryRequirements ); - } - - bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; - - }; - static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = MemoryRequirements2; - }; - - struct MemoryType - { - - - VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, - uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : propertyFlags( propertyFlags_ ) - , heapIndex( heapIndex_ ) - {} - - MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MemoryType& operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMemoryType &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryType const& ) const = default; -#else - bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( propertyFlags == rhs.propertyFlags ) - && ( heapIndex == rhs.heapIndex ); - } - - bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; - uint32_t heapIndex = {}; - - }; - static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) struct MemoryWin32HandlePropertiesKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR; +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT : memoryTypeBits( memoryTypeBits_ ) {} - MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( MemoryWin32HandlePropertiesKHR ) - offsetof( MemoryWin32HandlePropertiesKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : MemoryWin32HandlePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryWin32HandlePropertiesKHR & + operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MemoryWin32HandlePropertiesKHR const& ) const = default; -#else - bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default; +# else + bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); } - bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; - void* pNext = {}; - uint32_t memoryTypeBits = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; }; - static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -49117,181 +55875,180 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - struct MetalSurfaceCreateInfoEXT +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct MemoryGetZirconHandleInfoFUCHSIA { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; - VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, - const CAMetalLayer* pLayer_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pLayer( pLayer_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , handleType( handleType_ ) {} - MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & + operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MetalSurfaceCreateInfoEXT ) - offsetof( MetalSurfaceCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + memory = memory_; return *this; } - MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) VULKAN_HPP_NOEXCEPT + MemoryGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - pLayer = pLayer_; + handleType = handleType_; return *this; } - - operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MetalSurfaceCreateInfoEXT const& ) const = default; -#else - bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pLayer == rhs.pLayer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); } - bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; - const CAMetalLayer* pLayer = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryGetZirconHandleInfoFUCHSIA ) == sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = MetalSurfaceCreateInfoEXT; + using Type = MemoryGetZirconHandleInfoFUCHSIA; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct MultisamplePropertiesEXT +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct MemoryZirconHandlePropertiesFUCHSIA { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMemoryZirconHandlePropertiesFUCHSIA; - VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) {} - MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryZirconHandlePropertiesFUCHSIA & + operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryZirconHandlePropertiesFUCHSIA & + operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( MultisamplePropertiesEXT ) - offsetof( MultisamplePropertiesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - - operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default; +# else + bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); } - operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( MultisamplePropertiesEXT const& ) const = default; -#else - bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); - } - - bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + void * pNext = {}; + uint32_t memoryTypeBits = {}; }; - static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryZirconHandlePropertiesFUCHSIA ) == sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = MultisamplePropertiesEXT; + using Type = MemoryZirconHandlePropertiesFUCHSIA; }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ struct PastPresentationTimingGOOGLE { - - - VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, - uint64_t desiredPresentTime_ = {}, - uint64_t actualPresentTime_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {}, + uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, - uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT + uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT : presentID( presentID_ ) , desiredPresentTime( desiredPresentTime_ ) , actualPresentTime( actualPresentTime_ ) @@ -49299,774 +56056,75 @@ namespace VULKAN_HPP_NAMESPACE , presentMargin( presentMargin_ ) {} - PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PastPresentationTimingGOOGLE& operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PastPresentationTimingGOOGLE( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingGOOGLE & + operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkPastPresentationTimingGOOGLE const&() const VULKAN_HPP_NOEXCEPT + operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PastPresentationTimingGOOGLE const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default; #else - bool operator==( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( presentID == rhs.presentID ) - && ( desiredPresentTime == rhs.desiredPresentTime ) - && ( actualPresentTime == rhs.actualPresentTime ) - && ( earliestPresentTime == rhs.earliestPresentTime ) - && ( presentMargin == rhs.presentMargin ); + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && + ( actualPresentTime == rhs.actualPresentTime ) && ( earliestPresentTime == rhs.earliestPresentTime ) && + ( presentMargin == rhs.presentMargin ); } - bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t presentID = {}; - uint64_t desiredPresentTime = {}; - uint64_t actualPresentTime = {}; + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + uint64_t actualPresentTime = {}; uint64_t earliestPresentTime = {}; - uint64_t presentMargin = {}; - - }; - static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PerformanceConfigurationAcquireInfoINTEL - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; - - VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - {} - - PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceConfigurationAcquireInfoINTEL ) - offsetof( PerformanceConfigurationAcquireInfoINTEL, pNext ) ); - return *this; - } - - PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceConfigurationAcquireInfoINTEL& operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - - operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const& ) const = default; -#else - bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ); - } - - bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; - - }; - static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PerformanceConfigurationAcquireInfoINTEL; - }; - - struct PerformanceCounterDescriptionKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; - - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, - std::array const& name_ = {}, - std::array const& category_ = {}, - std::array const& description_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , name( name_ ) - , category( category_ ) - , description( description_ ) - {} - - PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceCounterDescriptionKHR ) - offsetof( PerformanceCounterDescriptionKHR, pNext ) ); - return *this; - } - - PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceCounterDescriptionKHR& operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPerformanceCounterDescriptionKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceCounterDescriptionKHR const& ) const = default; -#else - bool operator==( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( name == rhs.name ) - && ( category == rhs.category ) - && ( description == rhs.description ); - } - - bool operator!=( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - - }; - static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PerformanceCounterDescriptionKHR; - }; - - struct PerformanceCounterKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; - - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, - VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, - VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, - std::array const& uuid_ = {} ) VULKAN_HPP_NOEXCEPT - : unit( unit_ ) - , scope( scope_ ) - , storage( storage_ ) - , uuid( uuid_ ) - {} - - PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceCounterKHR ) - offsetof( PerformanceCounterKHR, pNext ) ); - return *this; - } - - PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceCounterKHR& operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPerformanceCounterKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceCounterKHR const& ) const = default; -#else - bool operator==( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( unit == rhs.unit ) - && ( scope == rhs.scope ) - && ( storage == rhs.storage ) - && ( uuid == rhs.uuid ); - } - - bool operator!=( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; - VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; - VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; - - }; - static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PerformanceCounterKHR; - }; - - union PerformanceCounterResultKHR - { - PerformanceCounterResultKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const& rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); - } - - PerformanceCounterResultKHR( int32_t int32_ = {} ) - : int32( int32_ ) - {} - - PerformanceCounterResultKHR( int64_t int64_ ) - : int64( int64_ ) - {} - - PerformanceCounterResultKHR( uint32_t uint32_ ) - : uint32( uint32_ ) - {} - - PerformanceCounterResultKHR( uint64_t uint64_ ) - : uint64( uint64_ ) - {} - - PerformanceCounterResultKHR( float float32_ ) - : float32( float32_ ) - {} - - PerformanceCounterResultKHR( double float64_ ) - : float64( float64_ ) - {} - - PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT - { - int32 = int32_; - return *this; - } - - PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT - { - int64 = int64_; - return *this; - } - - PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT - { - uint32 = uint32_; - return *this; - } - - PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT - { - uint64 = uint64_; - return *this; - } - - PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT - { - float32 = float32_; - return *this; - } - - PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT - { - float64 = float64_; - return *this; - } - - VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); - return *this; - } - - operator VkPerformanceCounterResultKHR const&() const - { - return *reinterpret_cast(this); - } - - operator VkPerformanceCounterResultKHR &() - { - return *reinterpret_cast(this); - } - - int32_t int32; - int64_t int64; - uint32_t uint32; - uint64_t uint64; - float float32; - double float64; - }; - - struct PerformanceMarkerInfoINTEL - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; - - VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT - : marker( marker_ ) - {} - - PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceMarkerInfoINTEL ) - offsetof( PerformanceMarkerInfoINTEL, pNext ) ); - return *this; - } - - PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceMarkerInfoINTEL& operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT - { - marker = marker_; - return *this; - } - - - operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default; -#else - bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( marker == rhs.marker ); - } - - bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; - const void* pNext = {}; - uint64_t marker = {}; - - }; - static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PerformanceMarkerInfoINTEL; - }; - - struct PerformanceOverrideInfoINTEL - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; - - VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, - VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, - uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , enable( enable_ ) - , parameter( parameter_ ) - {} - - PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceOverrideInfoINTEL ) - offsetof( PerformanceOverrideInfoINTEL, pNext ) ); - return *this; - } - - PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceOverrideInfoINTEL& operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT - { - enable = enable_; - return *this; - } - - PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT - { - parameter = parameter_; - return *this; - } - - - operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default; -#else - bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( enable == rhs.enable ) - && ( parameter == rhs.parameter ); - } - - bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; - VULKAN_HPP_NAMESPACE::Bool32 enable = {}; - uint64_t parameter = {}; - - }; - static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PerformanceOverrideInfoINTEL; - }; - - struct PerformanceQuerySubmitInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; - - VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : counterPassIndex( counterPassIndex_ ) - {} - - PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceQuerySubmitInfoKHR ) - offsetof( PerformanceQuerySubmitInfoKHR, pNext ) ); - return *this; - } - - PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceQuerySubmitInfoKHR& operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PerformanceQuerySubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT - { - counterPassIndex = counterPassIndex_; - return *this; - } - - - operator VkPerformanceQuerySubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceQuerySubmitInfoKHR const& ) const = default; -#else - bool operator==( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( counterPassIndex == rhs.counterPassIndex ); - } - - bool operator!=( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; - const void* pNext = {}; - uint32_t counterPassIndex = {}; - - }; - static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PerformanceQuerySubmitInfoKHR; - }; - - struct PerformanceStreamMarkerInfoINTEL - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; - - VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {} ) VULKAN_HPP_NOEXCEPT - : marker( marker_ ) - {} - - PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PerformanceStreamMarkerInfoINTEL ) - offsetof( PerformanceStreamMarkerInfoINTEL, pNext ) ); - return *this; - } - - PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PerformanceStreamMarkerInfoINTEL& operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT - { - marker = marker_; - return *this; - } - - - operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default; -#else - bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( marker == rhs.marker ); - } - - bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; - const void* pNext = {}; - uint32_t marker = {}; - - }; - static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PerformanceStreamMarkerInfoINTEL; + uint64_t presentMargin = {}; }; + static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); union PerformanceValueDataINTEL { - PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); } - PerformanceValueDataINTEL( uint32_t value32_ = {} ) - : value32( value32_ ) - {} + PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {} - PerformanceValueDataINTEL( uint64_t value64_ ) - : value64( value64_ ) - {} + PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {} - PerformanceValueDataINTEL( float valueFloat_ ) - : valueFloat( valueFloat_ ) - {} + PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {} - PerformanceValueDataINTEL( const char* valueString_ ) - : valueString( valueString_ ) - {} + PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {} PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT { @@ -50092,61 +56150,66 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PerformanceValueDataINTEL & setValueString( const char* valueString_ ) VULKAN_HPP_NOEXCEPT + PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT { valueString = valueString_; return *this; } - VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & + operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); return *this; } - operator VkPerformanceValueDataINTEL const&() const + operator VkPerformanceValueDataINTEL const &() const { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } operator VkPerformanceValueDataINTEL &() { - return *reinterpret_cast(this); + return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - uint32_t value32; - uint64_t value64; - float valueFloat; + uint32_t value32; + uint64_t value64; + float valueFloat; VULKAN_HPP_NAMESPACE::Bool32 valueBool; - const char* valueString; + const char * valueString; #else - uint32_t value32; - uint64_t value64; - float valueFloat; - VkBool32 valueBool; - const char* valueString; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char * valueString; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct PerformanceValueINTEL { - - - PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, - VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + PerformanceValueINTEL( + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT : type( type_ ) , data( data_ ) {} - PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PerformanceValueINTEL& operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceValueINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -50162,2874 +56225,8001 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkPerformanceValueINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - - - public: VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; - }; - static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" ); + static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDevice16BitStorageFeatures + struct PipelineExecutableInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR; - VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT - : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) - , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) - , storagePushConstant16( storagePushConstant16_ ) - , storageInputOutput16( storageInputOutput16_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : pipeline( pipeline_ ) + , executableIndex( executableIndex_ ) {} - PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & + operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevice16BitStorageFeatures ) - offsetof( PhysicalDevice16BitStorageFeatures, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDevice16BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - storageBuffer16BitAccess = storageBuffer16BitAccess_; + pipeline = pipeline_; return *this; } - PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT { - uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + executableIndex = executableIndex_; return *this; } - PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - storagePushConstant16 = storagePushConstant16_; - return *this; + return *reinterpret_cast( this ); } - PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT { - storageInputOutput16 = storageInputOutput16_; - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDevice16BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevice16BitStorageFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInfoKHR const & ) const = default; #else - bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) - && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) - && ( storagePushConstant16 == rhs.storagePushConstant16 ) - && ( storageInputOutput16 == rhs.storageInputOutput16 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) && + ( executableIndex == rhs.executableIndex ); } - bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + uint32_t executableIndex = {}; }; - static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDevice16BitStorageFeatures; + using Type = PipelineExecutableInfoKHR; }; - struct PhysicalDevice8BitStorageFeatures + struct PipelineExecutableInternalRepresentationKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineExecutableInternalRepresentationKHR; - VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT - : storageBuffer8BitAccess( storageBuffer8BitAccess_ ) - , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) - , storagePushConstant8( storagePushConstant8_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutableInternalRepresentationKHR( std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, + size_t dataSize_ = {}, + void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : name( name_ ) + , description( description_ ) + , isText( isText_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) {} - PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( + PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineExecutableInternalRepresentationKHR( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineExecutableInternalRepresentationKHR( std::array const & name_, + std::array const & description_, + VULKAN_HPP_NAMESPACE::Bool32 isText_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) + : name( name_ ) + , description( description_ ) + , isText( isText_ ) + , dataSize( data_.size() * sizeof( T ) ) + , pData( data_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR & + operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInternalRepresentationKHR & + operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevice8BitStorageFeatures ) - offsetof( PhysicalDevice8BitStorageFeatures, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDevice8BitStorageFeatures& operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInternalRepresentationKHR const & ) const = default; +#else + bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && + ( description == rhs.description ) && ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) && + ( pData == rhs.pData ); + } + + bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::Bool32 isText = {}; + size_t dataSize = {}; + void * pData = {}; + }; + static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == + sizeof( VkPipelineExecutableInternalRepresentationKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableInternalRepresentationKHR; + }; + + struct PipelineInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT + : pipeline( pipeline_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDevice8BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - storageBuffer8BitAccess = storageBuffer8BitAccess_; + pipeline = pipeline_; return *this; } - PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; - return *this; + return *reinterpret_cast( this ); } - PhysicalDevice8BitStorageFeatures & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT { - storagePushConstant8 = storagePushConstant8_; - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDevice8BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevice8BitStorageFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInfoKHR const & ) const = default; #else - bool operator==( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) - && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) - && ( storagePushConstant8 == rhs.storagePushConstant8 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); } - bool operator!=( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; }; - static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDevice8BitStorageFeatures; + using Type = PipelineInfoKHR; }; - struct PhysicalDeviceASTCDecodeFeaturesEXT + struct PipelineExecutablePropertiesKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT - : decodeModeSharedExponent( decodeModeSharedExponent_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, + std::array const & name_ = {}, + std::array const & description_ = {}, + uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : stages( stages_ ) + , name( name_ ) + , description( description_ ) + , subgroupSize( subgroupSize_ ) {} - PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutablePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR & + operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) - offsetof( PhysicalDeviceASTCDecodeFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutablePropertiesKHR const & ) const = default; +#else + bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( name == rhs.name ) && + ( description == rhs.description ) && ( subgroupSize == rhs.subgroupSize ); + } + + bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t subgroupSize = {}; + }; + static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutablePropertiesKHR; + }; + + union PipelineExecutableStatisticValueKHR + { + PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); + } + + PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ ) {} + + PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {} + + PipelineExecutableStatisticValueKHR( uint64_t u64_ ) : u64( u64_ ) {} + + PipelineExecutableStatisticValueKHR( double f64_ ) : f64( f64_ ) {} + + PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT + { + b32 = b32_; return *this; } - PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT + { + i64 = i64_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT + { + u64 = u64_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT + { + f64 = f64_; + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & + operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); + return *this; + } + + operator VkPipelineExecutableStatisticValueKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticValueKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::Bool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#else + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PipelineExecutableStatisticKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + PipelineExecutableStatisticKHR( std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) + VULKAN_HPP_NOEXCEPT + : name( name_ ) + , description( description_ ) + , format( format_ ) + , value( value_ ) + {} + + PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableStatisticKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineExecutableStatisticKHR & + operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; + }; + static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableStatisticKHR; + }; + + struct RefreshCycleDurationGOOGLE + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT + : refreshDuration( refreshDuration_ ) + {} + + VULKAN_HPP_CONSTEXPR + RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : RefreshCycleDurationGOOGLE( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RefreshCycleDurationGOOGLE & + operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default; +#else + bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( refreshDuration == rhs.refreshDuration ); + } + + bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t refreshDuration = {}; + }; + static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct SemaphoreGetFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & + operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT + SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - decodeModeSharedExponent = decodeModeSharedExponent_; + semaphore = semaphore_; return *this; } - - operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + SemaphoreGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } - operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } + operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); } - bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceASTCDecodeFeaturesEXT; + using Type = SemaphoreGetFdInfoKHR; }; - struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SemaphoreGetWin32HandleInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT - : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) {} - PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & + operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT + SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + semaphore = semaphore_; return *this; } - - operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + SemaphoreGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } - operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); + return *reinterpret_cast( this ); } - bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + using Type = SemaphoreGetWin32HandleInfoKHR; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct SemaphoreGetZirconHandleInfoFUCHSIA { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; - VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT - : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) - , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) - , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) - , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ) - , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ) - , advancedBlendAllOperations( advancedBlendAllOperations_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) {} - PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) - && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) - && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) - && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) - && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) - && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); - } - - bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; - void* pNext = {}; - uint32_t advancedBlendMaxColorAttachments = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; - - }; - static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; - }; - - struct PhysicalDeviceBufferDeviceAddressFeatures - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT - : bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & + operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetZirconHandleInfoFUCHSIA & + operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) - offsetof( PhysicalDeviceBufferDeviceAddressFeatures, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceBufferDeviceAddressFeatures& operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddress = bufferDeviceAddress_; + semaphore = semaphore_; return *this; } - PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + SemaphoreGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + handleType = handleType_; return *this; } - PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { - bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDeviceBufferDeviceAddressFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const& ) const = default; -#else - bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) - && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) - && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); - } - - bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SemaphoreGetZirconHandleInfoFUCHSIA ) == sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceBufferDeviceAddressFeatures; + using Type = SemaphoreGetZirconHandleInfoFUCHSIA; }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct PhysicalDeviceBufferDeviceAddressFeaturesEXT +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoGetMemoryPropertiesKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoGetMemoryPropertiesKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT - : bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoGetMemoryPropertiesKHR( + uint32_t memoryBindIndex_ = {}, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryBindIndex( memoryBindIndex_ ) + , pMemoryRequirements( pMemoryRequirements_ ) {} - PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + VideoGetMemoryPropertiesKHR( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoGetMemoryPropertiesKHR( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoGetMemoryPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR & + operator=( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoGetMemoryPropertiesKHR & operator=( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) - offsetof( PhysicalDeviceBufferDeviceAddressFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + VideoGetMemoryPropertiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + VideoGetMemoryPropertiesKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddress = bufferDeviceAddress_; + memoryBindIndex = memoryBindIndex_; return *this; } - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + VideoGetMemoryPropertiesKHR & + setPMemoryRequirements( VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + pMemoryRequirements = pMemoryRequirements_; return *this; } - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoGetMemoryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkVideoGetMemoryPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoGetMemoryPropertiesKHR const & ) const = default; +# else + bool operator==( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && + ( pMemoryRequirements == rhs.pMemoryRequirements ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) - && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) - && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); - } - - bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoGetMemoryPropertiesKHR; + const void * pNext = {}; + uint32_t memoryBindIndex = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements = {}; }; - static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VideoGetMemoryPropertiesKHR ) == sizeof( VkVideoGetMemoryPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + using Type = VideoGetMemoryPropertiesKHR; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PhysicalDeviceCoherentMemoryFeaturesAMD + struct ImportFenceFdInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceCoherentMemory( deviceCoherentMemory_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) {} - PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & + operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) - offsetof( PhysicalDeviceCoherentMemoryFeaturesAMD, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT + ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT { - deviceCoherentMemory = deviceCoherentMemory_; + fence = fence_; return *this; } - - operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const&() const VULKAN_HPP_NOEXCEPT + ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } - operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT + ImportFenceFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } + ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const& ) const = default; + operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceFdInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && + ( handleType == rhs.handleType ) && ( fd == rhs.fd ); } - bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + int fd = {}; }; - static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; + using Type = ImportFenceFdInfoKHR; }; - struct PhysicalDeviceComputeShaderDerivativesFeaturesNV +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportFenceWin32HandleInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT - : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) - , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) {} - PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & + operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) - offsetof( PhysicalDeviceComputeShaderDerivativesFeaturesNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT { - computeDerivativeGroupQuads = computeDerivativeGroupQuads_; + fence = fence_; return *this; } - PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT { - computeDerivativeGroupLinear = computeDerivativeGroupLinear_; + flags = flags_; return *this; } - - operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } - operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handle = handle_; + return *this; } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) - && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); + name = name_; + return *this; } - bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && + ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); + } + + bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; }; - static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV; + using Type = ImportFenceWin32HandleInfoKHR; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct PhysicalDeviceConditionalRenderingFeaturesEXT + struct ImportSemaphoreFdInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT - : conditionalRendering( conditionalRendering_ ) - , inheritedConditionalRendering( inheritedConditionalRendering_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) {} - PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & + operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) - offsetof( PhysicalDeviceConditionalRenderingFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - conditionalRendering = conditionalRendering_; + semaphore = semaphore_; return *this; } - PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT { - inheritedConditionalRendering = inheritedConditionalRendering_; + flags = flags_; return *this; } - - operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + ImportSemaphoreFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } - operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT + ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + fd = fd_; + return *this; } + operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const& ) const = default; + operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( conditionalRendering == rhs.conditionalRendering ) - && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); } - bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; - VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + int fd = {}; }; - static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; + using Type = ImportSemaphoreFdInfoKHR; }; - struct PhysicalDeviceConservativeRasterizationPropertiesEXT +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportSemaphoreWin32HandleInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportSemaphoreWin32HandleInfoKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {}, - float maxExtraPrimitiveOverestimationSize_ = {}, - float extraPrimitiveOverestimationSizeGranularity_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT - : primitiveOverestimationSize( primitiveOverestimationSize_ ) - , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) - , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) - , primitiveUnderestimation( primitiveUnderestimation_ ) - , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) - , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) - , degenerateLinesRasterized( degenerateLinesRasterized_ ) - , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) - , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) {} - PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceConservativeRasterizationPropertiesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) - && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) - && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) - && ( primitiveUnderestimation == rhs.primitiveUnderestimation ) - && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) - && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) - && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) - && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) - && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); - } - - bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; - void* pNext = {}; - float primitiveOverestimationSize = {}; - float maxExtraPrimitiveOverestimationSize = {}; - float extraPrimitiveOverestimationSizeGranularity = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; - VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; - VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; - VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; - VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; - VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; - - }; - static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; - }; - - struct PhysicalDeviceCooperativeMatrixFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT - : cooperativeMatrix( cooperativeMatrix_ ) - , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) + ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & + operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) - offsetof( PhysicalDeviceCooperativeMatrixFeaturesNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - cooperativeMatrix = cooperativeMatrix_; + semaphore = semaphore_; return *this; } - PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT { - cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + flags = flags_; return *this; } - - operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } - operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handle = handle_; + return *this; } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( cooperativeMatrix == rhs.cooperativeMatrix ) - && ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); + name = name_; + return *this; } - bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && + ( name == rhs.name ); + } + + bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; - VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; }; - static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; + using Type = ImportSemaphoreWin32HandleInfoKHR; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct PhysicalDeviceCooperativeMatrixPropertiesNV +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportSemaphoreZirconHandleInfoFUCHSIA { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; - VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT - : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportSemaphoreZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + zx_handle_t zirconHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , zirconHandle( zirconHandle_ ) {} - PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) - offsetof( PhysicalDeviceCooperativeMatrixPropertiesNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceCooperativeMatrixPropertiesNV& operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); - } - - bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; - - }; - static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; - }; - - struct PhysicalDeviceCornerSampledImageFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT - : cornerSampledImage( cornerSampledImage_ ) + ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreZirconHandleInfoFUCHSIA( + *reinterpret_cast( &rhs ) ) {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & + operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreZirconHandleInfoFUCHSIA & + operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) - offsetof( PhysicalDeviceCornerSampledImageFeaturesNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreZirconHandleInfoFUCHSIA & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - cornerSampledImage = cornerSampledImage_; + semaphore = semaphore_; return *this; } - - operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + ImportSemaphoreZirconHandleInfoFUCHSIA & + setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } - operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + ImportSemaphoreZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( cornerSampledImage == rhs.cornerSampledImage ); + zirconHandle = zirconHandle_; + return *this; } - bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + zx_handle_t zirconHandle = {}; }; - static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImportSemaphoreZirconHandleInfoFUCHSIA ) == sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCornerSampledImageFeaturesNV; + using Type = ImportSemaphoreZirconHandleInfoFUCHSIA; }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct PhysicalDeviceCoverageReductionModeFeaturesNV + struct InitializePerformanceApiInfoINTEL { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eInitializePerformanceApiInfoINTEL; - VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT - : coverageReductionMode( coverageReductionMode_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : pUserData( pUserData_ ) {} - PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : InitializePerformanceApiInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & + operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) - offsetof( PhysicalDeviceCoverageReductionModeFeaturesNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { - coverageReductionMode = coverageReductionMode_; + pUserData = pUserData_; return *this; } - - operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default; #else - bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( coverageReductionMode == rhs.coverageReductionMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData ); } - bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; + const void * pNext = {}; + void * pUserData = {}; }; - static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; + using Type = InitializePerformanceApiInfoINTEL; }; - struct PhysicalDeviceCustomBorderColorFeaturesEXT + struct DisplayEventInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; - VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {} ) VULKAN_HPP_NOEXCEPT - : customBorderColors( customBorderColors_ ) - , customBorderColorWithoutFormat( customBorderColorWithoutFormat_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT + : displayEvent( displayEvent_ ) {} - PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayEventInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & + operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) - offsetof( PhysicalDeviceCustomBorderColorFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceCustomBorderColorFeaturesEXT& operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT + DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT { - customBorderColors = customBorderColors_; + displayEvent = displayEvent_; return *this; } - PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - customBorderColorWithoutFormat = customBorderColorWithoutFormat_; - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayEventInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( customBorderColors == rhs.customBorderColors ) - && ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent ); } - bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {}; - VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; }; - static_assert( sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; + using Type = DisplayEventInfoEXT; }; - struct PhysicalDeviceCustomBorderColorPropertiesEXT + struct XYColorEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {} ) VULKAN_HPP_NOEXCEPT - : maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) {} - PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : XYColorEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) - offsetof( PhysicalDeviceCustomBorderColorPropertiesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - PhysicalDeviceCustomBorderColorPropertiesEXT& operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); + x = x_; return *this; } - - operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + y = y_; + return *this; } - operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } + operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XYColorEXT const & ) const = default; #else - bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers ); + return ( x == rhs.x ) && ( y == rhs.y ); } - bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; - void* pNext = {}; - uint32_t maxCustomBorderColorSamplers = {}; - + float x = {}; + float y = {}; }; - static_assert( sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct HdrMetadataEXT { - using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; - }; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; - struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT - : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, + float maxLuminance_ = {}, + float minLuminance_ = {}, + float maxContentLightLevel_ = {}, + float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT + : displayPrimaryRed( displayPrimaryRed_ ) + , displayPrimaryGreen( displayPrimaryGreen_ ) + , displayPrimaryBlue( displayPrimaryBlue_ ) + , whitePoint( whitePoint_ ) + , maxLuminance( maxLuminance_ ) + , minLuminance( minLuminance_ ) + , maxContentLightLevel( maxContentLightLevel_ ) + , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) {} - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HdrMetadataEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) - offsetof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT + HdrMetadataEXT & + setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT { - dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; + displayPrimaryRed = displayPrimaryRed_; return *this; } - - operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + HdrMetadataEXT & + setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + displayPrimaryGreen = displayPrimaryGreen_; + return *this; } - operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT + HdrMetadataEXT & + setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + displayPrimaryBlue = displayPrimaryBlue_; + return *this; } + HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT + { + whitePoint = whitePoint_; + return *this; + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& ) const = default; + HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT + { + maxLuminance = maxLuminance_; + return *this; + } + + HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT + { + minLuminance = minLuminance_; + return *this; + } + + HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxContentLightLevel = maxContentLightLevel_; + return *this; + } + + HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameAverageLightLevel = maxFrameAverageLightLevel_; + return *this; + } + + operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HdrMetadataEXT const & ) const = default; #else - bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) && + ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && + ( whitePoint == rhs.whitePoint ) && ( maxLuminance == rhs.maxLuminance ) && + ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) && + ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); } - bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; + float maxLuminance = {}; + float minLuminance = {}; + float maxContentLightLevel = {}; + float maxFrameAverageLightLevel = {}; }; - static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + using Type = HdrMetadataEXT; }; - struct PhysicalDeviceDepthClipEnableFeaturesEXT + struct SemaphoreSignalInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo; - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT - : depthClipEnable( depthClipEnable_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + uint64_t value_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , value( value_ ) {} - PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreSignalInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & + operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) - offsetof( PhysicalDeviceDepthClipEnableFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - depthClipEnable = depthClipEnable_; + semaphore = semaphore_; return *this; } - - operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + value = value_; + return *this; } - operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } + operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSignalInfo const & ) const = default; #else - bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( depthClipEnable == rhs.depthClipEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( value == rhs.value ); } - bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; }; - static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; + using Type = SemaphoreSignalInfo; }; + using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; - struct PhysicalDeviceDepthStencilResolveProperties +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoSessionParametersUpdateInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoSessionParametersUpdateInfoKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT - : supportedDepthResolveModes( supportedDepthResolveModes_ ) - , supportedStencilResolveModes( supportedStencilResolveModes_ ) - , independentResolveNone( independentResolveNone_ ) - , independentResolve( independentResolve_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {} ) VULKAN_HPP_NOEXCEPT + : updateSequenceCount( updateSequenceCount_ ) {} - PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDepthStencilResolveProperties ) - offsetof( PhysicalDeviceDepthStencilResolveProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceDepthStencilResolveProperties& operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceDepthStencilResolveProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const& ) const = default; -#else - bool operator==( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) - && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) - && ( independentResolveNone == rhs.independentResolveNone ) - && ( independentResolve == rhs.independentResolve ); - } - - bool operator!=( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; - - }; - static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) == sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceDepthStencilResolveProperties; - }; - - struct PhysicalDeviceDescriptorIndexingFeatures - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) - , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) - , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) - , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) - , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) - , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) - , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) - , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) - , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) - , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) - , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) - , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) - , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) - , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) - , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) - , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) - , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) - , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) - , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) - , runtimeDescriptorArray( runtimeDescriptorArray_ ) + VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionParametersUpdateInfoKHR( *reinterpret_cast( &rhs ) ) {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & + operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersUpdateInfoKHR & + operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDescriptorIndexingFeatures ) - offsetof( PhysicalDeviceDescriptorIndexingFeatures, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceDescriptorIndexingFeatures& operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + VideoSessionParametersUpdateInfoKHR & setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT { - shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + updateSequenceCount = updateSequenceCount_; return *this; } - PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT { - shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default; +# else + bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; - return *this; + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( updateSequenceCount == rhs.updateSequenceCount ); } - PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT - { - runtimeDescriptorArray = runtimeDescriptorArray_; - return *this; - } - - - operator VkPhysicalDeviceDescriptorIndexingFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const& ) const = default; -#else - bool operator==( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) - && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) - && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) - && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) - && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) - && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) - && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) - && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) - && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) - && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) - && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) - && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) - && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) - && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) - && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) - && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) - && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) - && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) - && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) - && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); - } - - bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersUpdateInfoKHR; + const void * pNext = {}; + uint32_t updateSequenceCount = {}; }; - static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VideoSessionParametersUpdateInfoKHR ) == sizeof( VkVideoSessionParametersUpdateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDescriptorIndexingFeatures; + using Type = VideoSessionParametersUpdateInfoKHR; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PhysicalDeviceDescriptorIndexingProperties + struct SemaphoreWaitInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo; - VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, - uint32_t maxPerStageUpdateAfterBindResources_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT - : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) - , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) - , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) - , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) - , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) - , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) - , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) - , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) - , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) - , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) - , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) - , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) - , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) - , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) - , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) - , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, + uint32_t semaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, + const uint64_t * pValues_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , semaphoreCount( semaphoreCount_ ) + , pSemaphores( pSemaphores_ ) + , pValues( pValues_ ) {} - PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDescriptorIndexingProperties ) - offsetof( PhysicalDeviceDescriptorIndexingProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceDescriptorIndexingProperties& operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceDescriptorIndexingProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const& ) const = default; -#else - bool operator==( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) - && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) - && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) - && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) - && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) - && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) - && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) - && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) - && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) - && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) - && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) - && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) - && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) - && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) - && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) - && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) - && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) - && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) - && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) - && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) - && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) - && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) - && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); - } - - bool operator!=( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; - void* pNext = {}; - uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; - uint32_t maxPerStageUpdateAfterBindResources = {}; - uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; - - }; - static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceDescriptorIndexingProperties; - }; - - struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceGeneratedCommands( deviceGeneratedCommands_ ) + SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreWaitInfo( *reinterpret_cast( &rhs ) ) {} - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo( + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ = {} ) + : flags( flags_ ) + , semaphoreCount( static_cast( semaphores_.size() ) ) + , pSemaphores( semaphores_.data() ) + , pValues( values_.data() ) { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) - offsetof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, pNext ) ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() ); +# else + if ( semaphores_.size() != values_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & + operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV& operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT { - deviceGeneratedCommands = deviceGeneratedCommands_; + flags = flags_; return *this; } - - operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + semaphoreCount = semaphoreCount_; + return *this; } - operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT + SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pSemaphores = pSemaphores_; + return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & setSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_ ) + VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( semaphores_.size() ); + pSemaphores = semaphores_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& ) const = default; + SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT + { + pValues = pValues_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & + setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreWaitInfo const & ) const = default; #else - bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( semaphoreCount == rhs.semaphoreCount ) && ( pSemaphores == rhs.pSemaphores ) && + ( pValues == rhs.pValues ); } - bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; + uint32_t semaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {}; + const uint64_t * pValues = {}; }; - static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + using Type = SemaphoreWaitInfo; + }; + using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Device; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueAccelerationStructureKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueAccelerationStructureNV = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueBuffer = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueBufferView = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = PoolFree; + }; + using UniqueCommandBuffer = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueCommandPool = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueCuFunctionNVX = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueCuModuleNVX = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDeferredOperationKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDescriptorPool = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = PoolFree; + }; + using UniqueDescriptorSet = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDescriptorSetLayout = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDescriptorUpdateTemplate = UniqueHandle; + using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectFree; + }; + using UniqueDeviceMemory = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueEvent = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueFence = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueFramebuffer = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueImage = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueImageView = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueIndirectCommandsLayoutNV = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePipeline = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePipelineCache = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePipelineLayout = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePrivateDataSlotEXT = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueQueryPool = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueRenderPass = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSampler = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSamplerYcbcrConversion = UniqueHandle; + using UniqueSamplerYcbcrConversionKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSemaphore = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueShaderModule = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSwapchainKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueValidationCacheEXT = UniqueHandle; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueVideoSessionKHR = UniqueHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueVideoSessionParametersKHR = UniqueHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Device + { + public: + using CType = VkDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + + public: + VULKAN_HPP_CONSTEXPR Device() = default; + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Device & operator=( VkDevice device ) VULKAN_HPP_NOEXCEPT + { + m_device = device; + return *this; + } +#endif + + Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_device = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Device const & ) const = default; +#else + bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device == rhs.m_device; + } + + bool operator!=( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device != rhs.m_device; + } + + bool operator<( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device < rhs.m_device; + } +#endif + + //=== VK_VERSION_1_0 === + + template + PFN_vkVoidFunction + getProcAddr( const char * pName, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction + getProcAddr( const std::string & name, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue + getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + allocateMemory( const MemoryAllocateInfo & allocateInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result + flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + flushMappedMemoryRanges( ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( + uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + invalidateMappedMemoryRanges( ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( + VULKAN_HPP_NAMESPACE::Buffer buffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirementsAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createFence( const FenceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createFenceUnique( const FenceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + resetFences( ArrayProxy const & fences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSemaphore( const SemaphoreCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createEvent( const EventCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createEventUnique( const EventCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createQueryPool( const QueryPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + ArrayProxy const & data, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createBuffer( const BufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createBufferUnique( const BufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createBufferView( const BufferViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createBufferViewUnique( const BufferViewCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createImage( const ImageCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createImageUnique( const ImageCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( + VULKAN_HPP_NAMESPACE::Image image, + const ImageSubresource & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createImageView( const ImageViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createImageViewUnique( const ImageViewCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createShaderModule( const ShaderModuleCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createPipelineCache( const PipelineCacheCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue createGraphicsPipeline( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelineUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createComputePipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue createComputePipeline( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createComputePipelineUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineLayout( + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSampler( const SamplerCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSamplerUnique( const SamplerCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorSetLayout( + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorPool( + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DescriptorSetAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename DescriptorSetAllocator = std::allocator>, + typename B = DescriptorSetAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSets( ArrayProxy const & descriptorWrites, + ArrayProxy const & descriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createFramebuffer( const FramebufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createFramebufferUnique( const FramebufferCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createRenderPass( const RenderPassCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createRenderPassUnique( const RenderPassCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createCommandPool( const CommandPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CommandBufferAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename CommandBufferAllocator = std::allocator>, + typename B = CommandBufferAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD Result + bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( + const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirements2( + const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( + const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirements2( + const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( + const ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue + getQueue2( const DeviceQueueInfo2 & queueInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( + const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversion( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplate( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( + const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getDescriptorSetLayoutSupport( + const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_2 === + + template + VULKAN_HPP_NODISCARD Result + createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createRenderPass2( const RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result + getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphore( const SemaphoreSignalInfo & signalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceAddress + getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress + getBufferAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD Result + createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = ImageAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + ImageAllocator & imageAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue + acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue + acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_display_swapchain === + + template + VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( + uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + createSharedSwapchainsKHR( ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SwapchainKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + createSharedSwapchainsKHR( ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createSharedSwapchainKHR( + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = SwapchainKHRAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_debug_marker === + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + template + VULKAN_HPP_NODISCARD Result + createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createVideoSessionKHR( const VideoSessionCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createVideoSessionKHRUnique( const VideoSessionCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyVideoSessionKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pVideoSessionMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename VideoGetMemoryPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = VideoGetMemoryPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + ArrayProxy const & videoSessionBindMemories, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR( + const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createVideoSessionParametersKHR( const VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createVideoSessionParametersKHRUnique( const VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NVX_binary_import === + + template + VULKAN_HPP_NODISCARD Result + createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createCuModuleNVX( const CuModuleCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createCuModuleNVXUnique( const CuModuleCreateInfoNVX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createCuFunctionNVX( const CuFunctionCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NVX_image_view_handle === + + template + uint32_t + getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint32_t + getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_shader_info === + + template + VULKAN_HPP_NODISCARD Result + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD Result + getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + + template + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance1 === + + template + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD Result + getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + template + VULKAN_HPP_NODISCARD Result + getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + template + VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + template + VULKAN_HPP_NODISCARD Result + importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_descriptor_update_template === + + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_display_control === + + template + VULKAN_HPP_NODISCARD Result + displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type registerDisplayEventEXT( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_GOOGLE_display_timing === + + template + VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PastPresentationTimingGOOGLEAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PastPresentationTimingGOOGLEAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_hdr_metadata === + + template + void setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setHdrMetadataEXT( ArrayProxy const & swapchains, + ArrayProxy const & metadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_create_renderpass2 === + + template + VULKAN_HPP_NODISCARD Result + createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_shared_presentable_image === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + template + VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + template + VULKAN_HPP_NODISCARD Result + importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_performance_query === + + template + VULKAN_HPP_NODISCARD Result + acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_utils === + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + template + VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID( + const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_get_memory_requirements2 === + + template + void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( + const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirements2KHR( + const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( + const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirements2KHR( + const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( + const ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_acceleration_structure === + + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + ArrayProxy const & data, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type writeAccelerationStructuresPropertyKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceAddress getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress getAccelerationStructureAddressKHR( + const AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const AccelerationStructureBuildGeometryInfoKHR & buildInfo, + ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_sampler_ycbcr_conversion === + + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( + const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversionKHR( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_bind_memory2 === + + template + VULKAN_HPP_NODISCARD Result + bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_image_drm_format_modifier === + + template + VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_validation_cache === + + template + VULKAN_HPP_NODISCARD Result + createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createValidationCacheEXT( + const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyValidationCacheEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createAccelerationStructureNV( + const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( + const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getAccelerationStructureMemoryRequirementsNV( + const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindAccelerationStructureMemoryNV( + ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue createRayTracingPipelineNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelineNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance3 === + + template + void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( + const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getDescriptorSetLayoutSupportKHR( + const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_external_memory_host === + + template + VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( + uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + ArrayProxy const & timestamps, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint64_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_timeline_semaphore === + + template + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_INTEL_performance_query === + + template + VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type releasePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( + VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_display_native_hdr === + + template + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_buffer_device_address === + + template + DeviceAddress + getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress + getBufferAddressEXT( const BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + + template + DeviceAddress + getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress + getBufferAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_host_query_reset === + + template + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_deferred_host_operations === + + template + VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDeferredOperationKHR( + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + createDeferredOperationKHRUnique( Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDeferredOperationKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result + getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result + deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_pipeline_executable_properties === + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineExecutablePropertiesKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutablePropertiesKHR( + const PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineExecutableStatisticKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableStatisticsKHR( + const PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PipelineExecutableInternalRepresentationKHRAllocator = + std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineExecutableInternalRepresentationKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableInternalRepresentationsKHR( + const PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_device_generated_commands === + + template + void getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( + const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getGeneratedCommandsMemoryRequirementsNV( + const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyIndirectCommandsLayoutNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_private_data === + + template + VULKAN_HPP_NODISCARD Result + createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT * pPrivateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createPrivateDataSlotEXT( + const PrivateDataSlotCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPrivateDataSlotEXT( + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint64_t + getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_ray_tracing_pipeline === + + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelineKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + template + VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getMemoryZirconHandlePropertiesFUCHSIA( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + template + VULKAN_HPP_NODISCARD Result importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importSemaphoreZirconHandleFUCHSIA( + const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_device != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_device == VK_NULL_HANDLE; + } + + private: + VkDevice m_device = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Device; }; - struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV + template <> + struct CppType { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + using Type = VULKAN_HPP_NAMESPACE::Device; + }; - VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( uint32_t maxGraphicsShaderGroupCount_ = {}, - uint32_t maxIndirectSequenceCount_ = {}, - uint32_t maxIndirectCommandsTokenCount_ = {}, - uint32_t maxIndirectCommandsStreamCount_ = {}, - uint32_t maxIndirectCommandsTokenOffset_ = {}, - uint32_t maxIndirectCommandsStreamStride_ = {}, - uint32_t minSequencesCountBufferOffsetAlignment_ = {}, - uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, - uint32_t minIndirectCommandsBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT - : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ) - , maxIndirectSequenceCount( maxIndirectSequenceCount_ ) - , maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ) - , maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ) - , maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ) - , maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ) - , minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ) - , minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ) - , minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DisplayModeParametersKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, + uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT + : visibleRegion( visibleRegion_ ) + , refreshRate( refreshRate_ ) {} - PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeParametersKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & + operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) - offsetof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayModeParametersKHR & + setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - PhysicalDeviceDeviceGeneratedCommandsPropertiesNV& operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); + visibleRegion = visibleRegion_; return *this; } - - operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + refreshRate = refreshRate_; + return *this; } - operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT + operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } + operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeParametersKHR const & ) const = default; #else - bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) - && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) - && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) - && ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) - && ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) - && ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) - && ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) - && ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) - && ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); + return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate ); } - bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; - void* pNext = {}; - uint32_t maxGraphicsShaderGroupCount = {}; - uint32_t maxIndirectSequenceCount = {}; - uint32_t maxIndirectCommandsTokenCount = {}; - uint32_t maxIndirectCommandsStreamCount = {}; - uint32_t maxIndirectCommandsTokenOffset = {}; - uint32_t maxIndirectCommandsStreamStride = {}; - uint32_t minSequencesCountBufferOffsetAlignment = {}; - uint32_t minSequencesIndexBufferOffsetAlignment = {}; - uint32_t minIndirectCommandsBufferOffsetAlignment = {}; - + VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; + uint32_t refreshRate = {}; }; - static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct DisplayModeCreateInfoKHR { - using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; - }; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; - struct PhysicalDeviceDiagnosticsConfigFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {} ) VULKAN_HPP_NOEXCEPT - : diagnosticsConfig( diagnosticsConfig_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , parameters( parameters_ ) {} - PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & + operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) - offsetof( PhysicalDeviceDiagnosticsConfigFeaturesNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceDiagnosticsConfigFeaturesNV& operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - diagnosticsConfig = diagnosticsConfig_; + flags = flags_; return *this; } - - operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + DisplayModeCreateInfoKHR & + setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + parameters = parameters_; + return *this; } - operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } + operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( diagnosticsConfig == rhs.diagnosticsConfig ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( parameters == rhs.parameters ); } - bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; }; - static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; + using Type = DisplayModeCreateInfoKHR; }; - struct PhysicalDeviceDiscardRectanglePropertiesEXT + class DisplayModeKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + public: + using CType = VkDisplayModeKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT - : maxDiscardRectangles( maxDiscardRectangles_ ) + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayModeKHR() = default; + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT + : m_displayModeKHR( displayModeKHR ) {} - PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DisplayModeKHR & operator=( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) - offsetof( PhysicalDeviceDiscardRectanglePropertiesEXT, pNext ) ); + m_displayModeKHR = displayModeKHR; + return *this; + } +#endif + + DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = {}; return *this; } - PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeKHR const & ) const = default; #else - bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); + return m_displayModeKHR == rhs.m_displayModeKHR; } - bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != rhs.m_displayModeKHR; + } + + bool operator<( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR < rhs.m_displayModeKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayModeKHR m_displayModeKHR = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct ExtensionProperties + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ExtensionProperties( std::array const & extensionName_ = {}, + uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : extensionName( extensionName_ ) + , specVersion( specVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExtensionProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties & + operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExtensionProperties const & ) const = default; +#else + bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion ); + } + + bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; - void* pNext = {}; - uint32_t maxDiscardRectangles = {}; - + VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; + uint32_t specVersion = {}; }; - static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct LayerProperties { - using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; - }; - - struct PhysicalDeviceDriverProperties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, - std::array const& driverName_ = {}, - std::array const& driverInfo_ = {}, - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT - : driverID( driverID_ ) - , driverName( driverName_ ) - , driverInfo( driverInfo_ ) - , conformanceVersion( conformanceVersion_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + LayerProperties( std::array const & layerName_ = {}, + uint32_t specVersion_ = {}, + uint32_t implementationVersion_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT + : layerName( layerName_ ) + , specVersion( specVersion_ ) + , implementationVersion( implementationVersion_ ) + , description( description_ ) {} - PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : LayerProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDriverProperties ) - offsetof( PhysicalDeviceDriverProperties, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceDriverProperties& operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDeviceDriverProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceDriverProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LayerProperties const & ) const = default; #else - bool operator==( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( driverID == rhs.driverID ) - && ( driverName == rhs.driverName ) - && ( driverInfo == rhs.driverInfo ) - && ( conformanceVersion == rhs.conformanceVersion ); + return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) && + ( implementationVersion == rhs.implementationVersion ) && ( description == rhs.description ); } - bool operator!=( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; - + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; + uint32_t specVersion = {}; + uint32_t implementationVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; }; - static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct PerformanceCounterKHR { - using Type = PhysicalDeviceDriverProperties; - }; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; - struct PhysicalDeviceExclusiveScissorFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) VULKAN_HPP_NOEXCEPT - : exclusiveScissor( exclusiveScissor_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, + std::array const & uuid_ = {} ) VULKAN_HPP_NOEXCEPT + : unit( unit_ ) + , scope( scope_ ) + , storage( storage_ ) + , uuid( uuid_ ) {} - PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR & + operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) - offsetof( PhysicalDeviceExclusiveScissorFeaturesNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterKHR const & ) const = default; +#else + bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && + ( storage == rhs.storage ) && ( uuid == rhs.uuid ); + } + + bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; + }; + static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceCounterKHR; + }; + + struct PerformanceCounterDescriptionKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, + std::array const & name_ = {}, + std::array const & category_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , name( name_ ) + , category( category_ ) + , description( description_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR & + operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default; +#else + bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) && + ( category == rhs.category ) && ( description == rhs.description ); + } + + bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceCounterDescriptionKHR; + }; + + struct DisplayModePropertiesKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : displayMode( displayMode_ ) + , parameters( parameters_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayModePropertiesKHR & + operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayModeProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayModeProperties( displayModeProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayModeProperties2KHR & + operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeProperties2KHR const & ) const = default; +#else + bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties ); + } + + bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; + }; + static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayModeProperties2KHR; + }; + + struct DisplayPlaneInfo2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, + uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : mode( mode_ ) + , planeIndex( planeIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneInfo2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & + operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT + DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT { - exclusiveScissor = exclusiveScissor_; + mode = mode_; return *this; } - - operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + planeIndex = planeIndex_; + return *this; } - operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } + operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default; #else - bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exclusiveScissor == rhs.exclusiveScissor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && + ( planeIndex == rhs.planeIndex ); } - bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; + uint32_t planeIndex = {}; }; - static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExclusiveScissorFeaturesNV; + using Type = DisplayPlaneInfo2KHR; }; - struct PhysicalDeviceExtendedDynamicStateFeaturesEXT + struct DisplayPlaneCapabilitiesKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT - : extendedDynamicState( extendedDynamicState_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedAlpha( supportedAlpha_ ) + , minSrcPosition( minSrcPosition_ ) + , maxSrcPosition( maxSrcPosition_ ) + , minSrcExtent( minSrcExtent_ ) + , maxSrcExtent( maxSrcExtent_ ) + , minDstPosition( minDstPosition_ ) + , maxDstPosition( maxDstPosition_ ) + , minDstExtent( minDstExtent_ ) + , maxDstExtent( maxDstExtent_ ) {} - PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneCapabilitiesKHR & + operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) - offsetof( PhysicalDeviceExtendedDynamicStateFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceExtendedDynamicStateFeaturesEXT& operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT - { - extendedDynamicState = extendedDynamicState_; - return *this; - } - - - operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default; #else - bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( extendedDynamicState == rhs.extendedDynamicState ); + return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && + ( maxSrcPosition == rhs.maxSrcPosition ) && ( minSrcExtent == rhs.minSrcExtent ) && + ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) && + ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && + ( maxDstExtent == rhs.maxDstExtent ); } - bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; + }; + static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + struct DisplayPlaneCapabilities2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT + : capabilities( capabilities_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilities2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneCapabilities2KHR & + operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities ); + } + + bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; }; - static_assert( sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; + using Type = DisplayPlaneCapabilities2KHR; + }; + + struct DisplayPlanePropertiesKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, + uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : currentDisplay( currentDisplay_ ) + , currentStackIndex( currentStackIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlanePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPlanePropertiesKHR & + operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex ); + } + + bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; + uint32_t currentStackIndex = {}; + }; + static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct DisplayPlaneProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayPlaneProperties( displayPlaneProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneProperties2KHR & + operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( displayPlaneProperties == rhs.displayPlaneProperties ); + } + + bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; + }; + static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPlaneProperties2KHR; + }; + + struct DisplayPropertiesKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, + const char * displayName_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT + : display( display_ ) + , displayName( displayName_ ) + , physicalDimensions( physicalDimensions_ ) + , physicalResolution( physicalResolution_ ) + , supportedTransforms( supportedTransforms_ ) + , planeReorderPossible( planeReorderPossible_ ) + , persistentContent( persistentContent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPropertiesKHR & + operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPropertiesKHR const & ) const = default; +#else + bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( display == rhs.display ) && ( displayName == rhs.displayName ) && + ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) && + ( supportedTransforms == rhs.supportedTransforms ) && + ( planeReorderPossible == rhs.planeReorderPossible ) && ( persistentContent == rhs.persistentContent ); + } + + bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; + const char * displayName = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; + }; + static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayProperties( displayProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayProperties2KHR & + operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayProperties2KHR const & ) const = default; +#else + bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties ); + } + + bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; + }; + static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayProperties2KHR; }; struct PhysicalDeviceExternalBufferInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo; - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , usage( usage_ ) , handleType( handleType_ ) {} - PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalBufferInfo ) - offsetof( PhysicalDeviceExternalBufferInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceExternalBufferInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & + operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -53047,456 +64237,723 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalBufferInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - - operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExternalBufferInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default; #else - bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( usage == rhs.usage ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && + ( handleType == rhs.handleType ); } - bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceExternalBufferInfo; }; + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; - struct PhysicalDeviceExternalFenceInfo + struct ExternalMemoryProperties { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryFeatures( externalMemoryFeatures_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) {} - PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryProperties & + operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalFenceInfo ) - offsetof( PhysicalDeviceExternalFenceInfo, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - - operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExternalFenceInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryProperties const & ) const = default; #else - bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); } - bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + struct ExternalBufferProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryProperties( externalMemoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalBufferProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalBufferProperties & + operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalBufferProperties const & ) const = default; +#else + bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; }; - static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalBufferProperties; + }; + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + + struct PhysicalDeviceExternalFenceInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFenceInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & + operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalFenceInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceExternalFenceInfo; }; + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; - struct PhysicalDeviceExternalImageFormatInfo + struct ExternalFenceProperties { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : handleType( handleType_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + , externalFenceFeatures( externalFenceFeatures_ ) {} - PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFenceProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalFenceProperties & + operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalImageFormatInfo ) - offsetof( PhysicalDeviceExternalImageFormatInfo, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceExternalImageFormatInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - - operator VkPhysicalDeviceExternalImageFormatInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExternalImageFormatInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFenceProperties const & ) const = default; #else - bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && + ( externalFenceFeatures == rhs.externalFenceFeatures ); } - bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; }; - static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExternalImageFormatInfo; + using Type = ExternalFenceProperties; }; + using ExternalFencePropertiesKHR = ExternalFenceProperties; - struct PhysicalDeviceExternalMemoryHostPropertiesEXT + struct ImageFormatProperties { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT - : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, + uint32_t maxMipLevels_ = {}, + uint32_t maxArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxExtent( maxExtent_ ) + , maxMipLevels( maxMipLevels_ ) + , maxArrayLayers( maxArrayLayers_ ) + , sampleCounts( sampleCounts_ ) + , maxResourceSize( maxResourceSize_ ) {} - PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageFormatProperties & + operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) - offsetof( PhysicalDeviceExternalMemoryHostPropertiesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties const & ) const = default; #else - bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); + return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && + ( maxArrayLayers == rhs.maxArrayLayers ) && ( sampleCounts == rhs.sampleCounts ) && + ( maxResourceSize == rhs.maxResourceSize ); } - bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; + uint32_t maxMipLevels = {}; + uint32_t maxArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; + }; + static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + struct ExternalImageFormatPropertiesNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ) + , externalMemoryFeatures( externalMemoryFeatures_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalImageFormatPropertiesNV & + operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default; +#else + bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( imageFormatProperties == rhs.imageFormatProperties ) && + ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; - - }; - static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; }; + static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceExternalSemaphoreInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExternalSemaphoreInfo; - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT : handleType( handleType_ ) {} - PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalSemaphoreInfo ) - offsetof( PhysicalDeviceExternalSemaphoreInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & + operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalSemaphoreInfo & + operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceExternalSemaphoreInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } - - operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default; #else - bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); } - bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; - static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceExternalSemaphoreInfo; }; + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; - struct PhysicalDeviceFeatures2 + struct ExternalSemaphoreProperties { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; - VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT - : features( features_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) {} - PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalSemaphoreProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalSemaphoreProperties & + operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFeatures2 ) - offsetof( PhysicalDeviceFeatures2, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT - { - features = features_; - return *this; - } - - - operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFeatures2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalSemaphoreProperties const & ) const = default; #else - bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( features == rhs.features ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && + ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); } - bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; + }; + static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = ExternalSemaphoreProperties; + }; + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + + struct PhysicalDeviceFeatures2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT + : features( features_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & + operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFeatures2 & + setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT + { + features = features_; + return *this; + } + + operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features ); + } + + bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; - void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; + void * pNext = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; - }; - static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" ); + static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -53504,858 +64961,222 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceFeatures2; }; + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; - struct PhysicalDeviceFloatControlsProperties + struct FormatProperties { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT - : denormBehaviorIndependence( denormBehaviorIndependence_ ) - , roundingModeIndependence( roundingModeIndependence_ ) - , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) - , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) - , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) - , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) - , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) - , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) - , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) - , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) - , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) - , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) - , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) - , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) - , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) - , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) - , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : linearTilingFeatures( linearTilingFeatures_ ) + , optimalTilingFeatures( optimalTilingFeatures_ ) + , bufferFeatures( bufferFeatures_ ) {} - PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : FormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFloatControlsProperties ) - offsetof( PhysicalDeviceFloatControlsProperties, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceFloatControlsProperties& operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDeviceFloatControlsProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFloatControlsProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties const & ) const = default; #else - bool operator==( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) - && ( roundingModeIndependence == rhs.roundingModeIndependence ) - && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) - && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) - && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) - && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) - && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) - && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) - && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) - && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) - && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) - && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) - && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) - && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) - && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) - && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) - && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); + return ( linearTilingFeatures == rhs.linearTilingFeatures ) && + ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); } - bool operator!=( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; - + VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; }; - static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct FormatProperties2 { - using Type = PhysicalDeviceFloatControlsProperties; - }; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; - struct PhysicalDeviceFragmentDensityMapFeaturesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT - : fragmentDensityMap( fragmentDensityMap_ ) - , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) - , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ) {} - PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : FormatProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FormatProperties2 & + operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceFragmentDensityMapFeaturesEXT& operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties2 const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentDensityMap == rhs.fragmentDensityMap ) - && ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) - && ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties ); } - bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; }; - static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; + using Type = FormatProperties2; }; + using FormatProperties2KHR = FormatProperties2; - struct PhysicalDeviceFragmentDensityMapPropertiesEXT + struct PhysicalDeviceFragmentShadingRateKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT - : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) - , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) - , fragmentDensityInvocations( fragmentDensityInvocations_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleCounts( sampleCounts_ ) + , fragmentSize( fragmentSize_ ) {} - PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateKHR & + operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateKHR & + operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapPropertiesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceFragmentDensityMapPropertiesEXT& operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) - && ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) - && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && + ( fragmentSize == rhs.fragmentSize ); } - bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; }; - static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; - }; - - struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT - : fragmentShaderBarycentric( fragmentShaderBarycentric_ ) - {} - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) - offsetof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV, pNext ) ); - return *this; - } - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT - { - fragmentShaderBarycentric = fragmentShaderBarycentric_; - return *this; - } - - - operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); - } - - bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; - - }; - static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV; - }; - - struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT - : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) - , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) - , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) - {} - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) - offsetof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT - { - fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; - return *this; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT - { - fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; - return *this; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT - { - fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; - return *this; - } - - - operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) - && ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) - && ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); - } - - bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; - - }; - static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; - }; - - struct PhysicalDeviceGroupProperties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {}, - std::array const& physicalDevices_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT - : physicalDeviceCount( physicalDeviceCount_ ) - , physicalDevices( physicalDevices_ ) - , subsetAllocation( subsetAllocation_ ) - {} - - PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceGroupProperties ) - offsetof( PhysicalDeviceGroupProperties, pNext ) ); - return *this; - } - - PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceGroupProperties& operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceGroupProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceGroupProperties const& ) const = default; -#else - bool operator==( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( physicalDeviceCount == rhs.physicalDeviceCount ) - && ( physicalDevices == rhs.physicalDevices ) - && ( subsetAllocation == rhs.subsetAllocation ); - } - - bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; - void* pNext = {}; - uint32_t physicalDeviceCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D physicalDevices = {}; - VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; - - }; - static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceGroupProperties; - }; - - struct PhysicalDeviceHostQueryResetFeatures - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT - : hostQueryReset( hostQueryReset_ ) - {} - - PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceHostQueryResetFeatures ) - offsetof( PhysicalDeviceHostQueryResetFeatures, pNext ) ); - return *this; - } - - PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceHostQueryResetFeatures& operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceHostQueryResetFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT - { - hostQueryReset = hostQueryReset_; - return *this; - } - - - operator VkPhysicalDeviceHostQueryResetFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceHostQueryResetFeatures const& ) const = default; -#else - bool operator==( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( hostQueryReset == rhs.hostQueryReset ); - } - - bool operator!=( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; - - }; - static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceHostQueryResetFeatures; - }; - - struct PhysicalDeviceIDProperties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array const& deviceUUID_ = {}, - std::array const& driverUUID_ = {}, - std::array const& deviceLUID_ = {}, - uint32_t deviceNodeMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceUUID( deviceUUID_ ) - , driverUUID( driverUUID_ ) - , deviceLUID( deviceLUID_ ) - , deviceNodeMask( deviceNodeMask_ ) - , deviceLUIDValid( deviceLUIDValid_ ) - {} - - PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceIDProperties ) - offsetof( PhysicalDeviceIDProperties, pNext ) ); - return *this; - } - - PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceIDProperties& operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceIDProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceIDProperties const& ) const = default; -#else - bool operator==( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceUUID == rhs.deviceUUID ) - && ( driverUUID == rhs.driverUUID ) - && ( deviceLUID == rhs.deviceLUID ) - && ( deviceNodeMask == rhs.deviceNodeMask ) - && ( deviceLUIDValid == rhs.deviceLUIDValid ); - } - - bool operator!=( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; - uint32_t deviceNodeMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; - - }; - static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceIDProperties; - }; - - struct PhysicalDeviceImageDrmFormatModifierInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - {} - - PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) - offsetof( PhysicalDeviceImageDrmFormatModifierInfoEXT, pNext ) ); - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT - { - drmFormatModifier = drmFormatModifier_; - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT - { - sharingMode = sharingMode_; - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT - { - queueFamilyIndexCount = queueFamilyIndexCount_; - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT - { - pQueueFamilyIndices = pQueueFamilyIndices_; - return *this; - } - - - operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ) - && ( sharingMode == rhs.sharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); - } - - bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; - const void* pNext = {}; - uint64_t drmFormatModifier = {}; - VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t* pQueueFamilyIndices = {}; - - }; - static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; + using Type = PhysicalDeviceFragmentShadingRateKHR; }; struct PhysicalDeviceImageFormatInfo2 { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2; - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : format( format_ ) , type( type_ ) , tiling( tiling_ ) @@ -54363,24 +65184,24 @@ namespace VULKAN_HPP_NAMESPACE , flags( flags_ ) {} - PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageFormatInfo2 ) - offsetof( PhysicalDeviceImageFormatInfo2, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceImageFormatInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & + operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -54416,598 +65237,542 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceImageFormatInfo2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default; #else - bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( type == rhs.type ) - && ( tiling == rhs.tiling ) - && ( usage == rhs.usage ) - && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( flags == rhs.flags ); } - bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; }; - static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceImageFormatInfo2; }; + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; - struct PhysicalDeviceImageViewImageFormatInfoEXT + struct ImageFormatProperties2 { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT - : imageViewType( imageViewType_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ) {} - PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageFormatProperties2 & + operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) - offsetof( PhysicalDeviceImageViewImageFormatInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT - { - imageViewType = imageViewType_; - return *this; - } - - - operator VkPhysicalDeviceImageViewImageFormatInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties2 const & ) const = default; #else - bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageViewType == rhs.imageViewType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties ); } - bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; }; - static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceImageViewImageFormatInfoEXT; + using Type = ImageFormatProperties2; }; + using ImageFormatProperties2KHR = ImageFormatProperties2; - struct PhysicalDeviceImagelessFramebufferFeatures + struct MemoryType { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT - : imagelessFramebuffer( imagelessFramebuffer_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, + uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : propertyFlags( propertyFlags_ ) + , heapIndex( heapIndex_ ) {} - PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryType( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImagelessFramebufferFeatures ) - offsetof( PhysicalDeviceImagelessFramebufferFeatures, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceImagelessFramebufferFeatures& operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkMemoryType &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceImagelessFramebufferFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT - { - imagelessFramebuffer = imagelessFramebuffer_; - return *this; - } - - - operator VkPhysicalDeviceImagelessFramebufferFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryType const & ) const = default; #else - bool operator==( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); + return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex ); } - bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; - + VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; + uint32_t heapIndex = {}; }; - static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct MemoryHeap { - using Type = PhysicalDeviceImagelessFramebufferFeatures; - }; - - struct PhysicalDeviceIndexTypeUint8FeaturesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT - : indexTypeUint8( indexTypeUint8_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : size( size_ ) + , flags( flags_ ) {} - PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryHeap( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) - offsetof( PhysicalDeviceIndexTypeUint8FeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT - { - indexTypeUint8 = indexTypeUint8_; - return *this; - } - - - operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHeap const & ) const = default; #else - bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( indexTypeUint8 == rhs.indexTypeUint8 ); + return ( size == rhs.size ) && ( flags == rhs.flags ); } - bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; - + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; }; - static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct PhysicalDeviceMemoryProperties { - using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT; - }; - - struct PhysicalDeviceInlineUniformBlockFeaturesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT - : inlineUniformBlock( inlineUniformBlock_ ) - , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( + uint32_t memoryTypeCount_ = {}, + std::array const & memoryTypes_ = {}, + uint32_t memoryHeapCount_ = {}, + std::array const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeCount( memoryTypeCount_ ) + , memoryTypes( memoryTypes_ ) + , memoryHeapCount( memoryHeapCount_ ) + , memoryHeaps( memoryHeaps_ ) {} - PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties & + operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceInlineUniformBlockFeaturesEXT & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT - { - inlineUniformBlock = inlineUniformBlock_; - return *this; - } - - PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; - return *this; - } - - - operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default; #else - bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( inlineUniformBlock == rhs.inlineUniformBlock ) - && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); + return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) && + ( memoryHeapCount == rhs.memoryHeapCount ) && ( memoryHeaps == rhs.memoryHeaps ); } - bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; - + uint32_t memoryTypeCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; + uint32_t memoryHeapCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; }; - static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct PhysicalDeviceMemoryProperties2 { - using Type = PhysicalDeviceInlineUniformBlockFeaturesEXT; - }; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; - struct PhysicalDeviceInlineUniformBlockPropertiesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( uint32_t maxInlineUniformBlockSize_ = {}, - uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, - uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT - : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) - , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) - , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) - , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) - , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryProperties( memoryProperties_ ) {} - PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2 & + operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockPropertiesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceInlineUniformBlockPropertiesEXT& operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default; #else - bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) - && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) - && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) - && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) - && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties ); } - bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; - void* pNext = {}; - uint32_t maxInlineUniformBlockSize = {}; - uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; - uint32_t maxDescriptorSetInlineUniformBlocks = {}; - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; }; - static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceInlineUniformBlockPropertiesEXT; + using Type = PhysicalDeviceMemoryProperties2; + }; + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + + struct MultisamplePropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + {} + + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultisamplePropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MultisamplePropertiesEXT & + operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultisamplePropertiesEXT const & ) const = default; +#else + bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); + } + + bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + }; + static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MultisamplePropertiesEXT; }; struct PhysicalDeviceLimits { - - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, - uint32_t maxImageDimension2D_ = {}, - uint32_t maxImageDimension3D_ = {}, - uint32_t maxImageDimensionCube_ = {}, - uint32_t maxImageArrayLayers_ = {}, - uint32_t maxTexelBufferElements_ = {}, - uint32_t maxUniformBufferRange_ = {}, - uint32_t maxStorageBufferRange_ = {}, - uint32_t maxPushConstantsSize_ = {}, - uint32_t maxMemoryAllocationCount_ = {}, - uint32_t maxSamplerAllocationCount_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, - uint32_t maxBoundDescriptorSets_ = {}, - uint32_t maxPerStageDescriptorSamplers_ = {}, - uint32_t maxPerStageDescriptorUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorSampledImages_ = {}, - uint32_t maxPerStageDescriptorStorageImages_ = {}, - uint32_t maxPerStageDescriptorInputAttachments_ = {}, - uint32_t maxPerStageResources_ = {}, - uint32_t maxDescriptorSetSamplers_ = {}, - uint32_t maxDescriptorSetUniformBuffers_ = {}, - uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetStorageBuffers_ = {}, - uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetSampledImages_ = {}, - uint32_t maxDescriptorSetStorageImages_ = {}, - uint32_t maxDescriptorSetInputAttachments_ = {}, - uint32_t maxVertexInputAttributes_ = {}, - uint32_t maxVertexInputBindings_ = {}, - uint32_t maxVertexInputAttributeOffset_ = {}, - uint32_t maxVertexInputBindingStride_ = {}, - uint32_t maxVertexOutputComponents_ = {}, - uint32_t maxTessellationGenerationLevel_ = {}, - uint32_t maxTessellationPatchSize_ = {}, - uint32_t maxTessellationControlPerVertexInputComponents_ = {}, - uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, - uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, - uint32_t maxTessellationControlTotalOutputComponents_ = {}, - uint32_t maxTessellationEvaluationInputComponents_ = {}, - uint32_t maxTessellationEvaluationOutputComponents_ = {}, - uint32_t maxGeometryShaderInvocations_ = {}, - uint32_t maxGeometryInputComponents_ = {}, - uint32_t maxGeometryOutputComponents_ = {}, - uint32_t maxGeometryOutputVertices_ = {}, - uint32_t maxGeometryTotalOutputComponents_ = {}, - uint32_t maxFragmentInputComponents_ = {}, - uint32_t maxFragmentOutputAttachments_ = {}, - uint32_t maxFragmentDualSrcAttachments_ = {}, - uint32_t maxFragmentCombinedOutputResources_ = {}, - uint32_t maxComputeSharedMemorySize_ = {}, - std::array const& maxComputeWorkGroupCount_ = {}, - uint32_t maxComputeWorkGroupInvocations_ = {}, - std::array const& maxComputeWorkGroupSize_ = {}, - uint32_t subPixelPrecisionBits_ = {}, - uint32_t subTexelPrecisionBits_ = {}, - uint32_t mipmapPrecisionBits_ = {}, - uint32_t maxDrawIndexedIndexValue_ = {}, - uint32_t maxDrawIndirectCount_ = {}, - float maxSamplerLodBias_ = {}, - float maxSamplerAnisotropy_ = {}, - uint32_t maxViewports_ = {}, - std::array const& maxViewportDimensions_ = {}, - std::array const& viewportBoundsRange_ = {}, - uint32_t viewportSubPixelBits_ = {}, - size_t minMemoryMapAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, - int32_t minTexelOffset_ = {}, - uint32_t maxTexelOffset_ = {}, - int32_t minTexelGatherOffset_ = {}, - uint32_t maxTexelGatherOffset_ = {}, - float minInterpolationOffset_ = {}, - float maxInterpolationOffset_ = {}, - uint32_t subPixelInterpolationOffsetBits_ = {}, - uint32_t maxFramebufferWidth_ = {}, - uint32_t maxFramebufferHeight_ = {}, - uint32_t maxFramebufferLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, - uint32_t maxColorAttachments_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, - uint32_t maxSampleMaskWords_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, - float timestampPeriod_ = {}, - uint32_t maxClipDistances_ = {}, - uint32_t maxCullDistances_ = {}, - uint32_t maxCombinedClipAndCullDistances_ = {}, - uint32_t discreteQueuePriorities_ = {}, - std::array const& pointSizeRange_ = {}, - std::array const& lineWidthRange_ = {}, - float pointSizeGranularity_ = {}, - float lineWidthGranularity_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, + uint32_t maxImageDimension2D_ = {}, + uint32_t maxImageDimension3D_ = {}, + uint32_t maxImageDimensionCube_ = {}, + uint32_t maxImageArrayLayers_ = {}, + uint32_t maxTexelBufferElements_ = {}, + uint32_t maxUniformBufferRange_ = {}, + uint32_t maxStorageBufferRange_ = {}, + uint32_t maxPushConstantsSize_ = {}, + uint32_t maxMemoryAllocationCount_ = {}, + uint32_t maxSamplerAllocationCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, + uint32_t maxBoundDescriptorSets_ = {}, + uint32_t maxPerStageDescriptorSamplers_ = {}, + uint32_t maxPerStageDescriptorUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorSampledImages_ = {}, + uint32_t maxPerStageDescriptorStorageImages_ = {}, + uint32_t maxPerStageDescriptorInputAttachments_ = {}, + uint32_t maxPerStageResources_ = {}, + uint32_t maxDescriptorSetSamplers_ = {}, + uint32_t maxDescriptorSetUniformBuffers_ = {}, + uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetStorageBuffers_ = {}, + uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetSampledImages_ = {}, + uint32_t maxDescriptorSetStorageImages_ = {}, + uint32_t maxDescriptorSetInputAttachments_ = {}, + uint32_t maxVertexInputAttributes_ = {}, + uint32_t maxVertexInputBindings_ = {}, + uint32_t maxVertexInputAttributeOffset_ = {}, + uint32_t maxVertexInputBindingStride_ = {}, + uint32_t maxVertexOutputComponents_ = {}, + uint32_t maxTessellationGenerationLevel_ = {}, + uint32_t maxTessellationPatchSize_ = {}, + uint32_t maxTessellationControlPerVertexInputComponents_ = {}, + uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, + uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, + uint32_t maxTessellationControlTotalOutputComponents_ = {}, + uint32_t maxTessellationEvaluationInputComponents_ = {}, + uint32_t maxTessellationEvaluationOutputComponents_ = {}, + uint32_t maxGeometryShaderInvocations_ = {}, + uint32_t maxGeometryInputComponents_ = {}, + uint32_t maxGeometryOutputComponents_ = {}, + uint32_t maxGeometryOutputVertices_ = {}, + uint32_t maxGeometryTotalOutputComponents_ = {}, + uint32_t maxFragmentInputComponents_ = {}, + uint32_t maxFragmentOutputAttachments_ = {}, + uint32_t maxFragmentDualSrcAttachments_ = {}, + uint32_t maxFragmentCombinedOutputResources_ = {}, + uint32_t maxComputeSharedMemorySize_ = {}, + std::array const & maxComputeWorkGroupCount_ = {}, + uint32_t maxComputeWorkGroupInvocations_ = {}, + std::array const & maxComputeWorkGroupSize_ = {}, + uint32_t subPixelPrecisionBits_ = {}, + uint32_t subTexelPrecisionBits_ = {}, + uint32_t mipmapPrecisionBits_ = {}, + uint32_t maxDrawIndexedIndexValue_ = {}, + uint32_t maxDrawIndirectCount_ = {}, + float maxSamplerLodBias_ = {}, + float maxSamplerAnisotropy_ = {}, + uint32_t maxViewports_ = {}, + std::array const & maxViewportDimensions_ = {}, + std::array const & viewportBoundsRange_ = {}, + uint32_t viewportSubPixelBits_ = {}, + size_t minMemoryMapAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, + int32_t minTexelOffset_ = {}, + uint32_t maxTexelOffset_ = {}, + int32_t minTexelGatherOffset_ = {}, + uint32_t maxTexelGatherOffset_ = {}, + float minInterpolationOffset_ = {}, + float maxInterpolationOffset_ = {}, + uint32_t subPixelInterpolationOffsetBits_ = {}, + uint32_t maxFramebufferWidth_ = {}, + uint32_t maxFramebufferHeight_ = {}, + uint32_t maxFramebufferLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, + uint32_t maxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, + uint32_t maxSampleMaskWords_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, + float timestampPeriod_ = {}, + uint32_t maxClipDistances_ = {}, + uint32_t maxCullDistances_ = {}, + uint32_t maxCombinedClipAndCullDistances_ = {}, + uint32_t discreteQueuePriorities_ = {}, + std::array const & pointSizeRange_ = {}, + std::array const & lineWidthRange_ = {}, + float pointSizeGranularity_ = {}, + float lineWidthGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT : maxImageDimension1D( maxImageDimension1D_ ) , maxImageDimension2D( maxImageDimension2D_ ) , maxImageDimension3D( maxImageDimension3D_ ) @@ -55116,1873 +65881,265 @@ namespace VULKAN_HPP_NAMESPACE , nonCoherentAtomSize( nonCoherentAtomSize_ ) {} - PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceLimits& operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLimits( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits & + operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceLimits const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLimits const & ) const = default; #else - bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( maxImageDimension1D == rhs.maxImageDimension1D ) - && ( maxImageDimension2D == rhs.maxImageDimension2D ) - && ( maxImageDimension3D == rhs.maxImageDimension3D ) - && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) - && ( maxUniformBufferRange == rhs.maxUniformBufferRange ) - && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) - && ( maxPushConstantsSize == rhs.maxPushConstantsSize ) - && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) - && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) - && ( bufferImageGranularity == rhs.bufferImageGranularity ) - && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) - && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) - && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) - && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) - && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) - && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) - && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) - && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) - && ( maxPerStageResources == rhs.maxPerStageResources ) - && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) - && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) - && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) - && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) - && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) - && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) - && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) - && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) - && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) - && ( maxVertexInputBindings == rhs.maxVertexInputBindings ) - && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) - && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) - && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) - && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) - && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) - && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) - && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) - && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) - && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) - && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) - && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) - && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) - && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) - && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) - && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) - && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) - && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) - && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) - && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) - && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) - && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) - && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) - && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) - && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) - && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) - && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) - && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) - && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) - && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) - && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) - && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) - && ( maxViewports == rhs.maxViewports ) - && ( maxViewportDimensions == rhs.maxViewportDimensions ) - && ( viewportBoundsRange == rhs.viewportBoundsRange ) - && ( viewportSubPixelBits == rhs.viewportSubPixelBits ) - && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) - && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) - && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) - && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) - && ( minTexelOffset == rhs.minTexelOffset ) - && ( maxTexelOffset == rhs.maxTexelOffset ) - && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) - && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) - && ( minInterpolationOffset == rhs.minInterpolationOffset ) - && ( maxInterpolationOffset == rhs.maxInterpolationOffset ) - && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) - && ( maxFramebufferWidth == rhs.maxFramebufferWidth ) - && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) - && ( maxFramebufferLayers == rhs.maxFramebufferLayers ) - && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) - && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) - && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) - && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) - && ( maxColorAttachments == rhs.maxColorAttachments ) - && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) - && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) - && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) - && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) - && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) - && ( maxSampleMaskWords == rhs.maxSampleMaskWords ) - && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) - && ( timestampPeriod == rhs.timestampPeriod ) - && ( maxClipDistances == rhs.maxClipDistances ) - && ( maxCullDistances == rhs.maxCullDistances ) - && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) - && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) - && ( pointSizeRange == rhs.pointSizeRange ) - && ( lineWidthRange == rhs.lineWidthRange ) - && ( pointSizeGranularity == rhs.pointSizeGranularity ) - && ( lineWidthGranularity == rhs.lineWidthGranularity ) - && ( strictLines == rhs.strictLines ) - && ( standardSampleLocations == rhs.standardSampleLocations ) - && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) - && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) - && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); + return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) && + ( maxImageDimension3D == rhs.maxImageDimension3D ) && + ( maxImageDimensionCube == rhs.maxImageDimensionCube ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( maxTexelBufferElements == rhs.maxTexelBufferElements ) && + ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && + ( maxStorageBufferRange == rhs.maxStorageBufferRange ) && + ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && + ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) && + ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && + ( bufferImageGranularity == rhs.bufferImageGranularity ) && + ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && + ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) && + ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) && + ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) && + ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) && + ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) && + ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) && + ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && + ( maxPerStageResources == rhs.maxPerStageResources ) && + ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && + ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) && + ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) && + ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) && + ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) && + ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && + ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) && + ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && + ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) && + ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && + ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) && + ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && + ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) && + ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && + ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) && + ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) && + ( maxTessellationControlPerVertexOutputComponents == + rhs.maxTessellationControlPerVertexOutputComponents ) && + ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) && + ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) && + ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) && + ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) && + ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && + ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) && + ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && + ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) && + ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && + ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) && + ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && + ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) && + ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) && + ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && + ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) && + ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && + ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) && + ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && + ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) && + ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && + ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) && + ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) && + ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) && + ( maxViewportDimensions == rhs.maxViewportDimensions ) && + ( viewportBoundsRange == rhs.viewportBoundsRange ) && + ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && + ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) && + ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) && + ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) && + ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && + ( minTexelOffset == rhs.minTexelOffset ) && ( maxTexelOffset == rhs.maxTexelOffset ) && + ( minTexelGatherOffset == rhs.minTexelGatherOffset ) && + ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && + ( minInterpolationOffset == rhs.minInterpolationOffset ) && + ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && + ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) && + ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && + ( maxFramebufferHeight == rhs.maxFramebufferHeight ) && + ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && + ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) && + ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && + ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) && + ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && + ( maxColorAttachments == rhs.maxColorAttachments ) && + ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) && + ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) && + ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) && + ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && + ( storageImageSampleCounts == rhs.storageImageSampleCounts ) && + ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && + ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) && + ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && + ( maxCullDistances == rhs.maxCullDistances ) && + ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && + ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && ( pointSizeRange == rhs.pointSizeRange ) && + ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) && + ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) && + ( standardSampleLocations == rhs.standardSampleLocations ) && + ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) && + ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && + ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); } - bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t maxImageDimension1D = {}; - uint32_t maxImageDimension2D = {}; - uint32_t maxImageDimension3D = {}; - uint32_t maxImageDimensionCube = {}; - uint32_t maxImageArrayLayers = {}; - uint32_t maxTexelBufferElements = {}; - uint32_t maxUniformBufferRange = {}; - uint32_t maxStorageBufferRange = {}; - uint32_t maxPushConstantsSize = {}; - uint32_t maxMemoryAllocationCount = {}; - uint32_t maxSamplerAllocationCount = {}; - VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; - uint32_t maxBoundDescriptorSets = {}; - uint32_t maxPerStageDescriptorSamplers = {}; - uint32_t maxPerStageDescriptorUniformBuffers = {}; - uint32_t maxPerStageDescriptorStorageBuffers = {}; - uint32_t maxPerStageDescriptorSampledImages = {}; - uint32_t maxPerStageDescriptorStorageImages = {}; - uint32_t maxPerStageDescriptorInputAttachments = {}; - uint32_t maxPerStageResources = {}; - uint32_t maxDescriptorSetSamplers = {}; - uint32_t maxDescriptorSetUniformBuffers = {}; - uint32_t maxDescriptorSetUniformBuffersDynamic = {}; - uint32_t maxDescriptorSetStorageBuffers = {}; - uint32_t maxDescriptorSetStorageBuffersDynamic = {}; - uint32_t maxDescriptorSetSampledImages = {}; - uint32_t maxDescriptorSetStorageImages = {}; - uint32_t maxDescriptorSetInputAttachments = {}; - uint32_t maxVertexInputAttributes = {}; - uint32_t maxVertexInputBindings = {}; - uint32_t maxVertexInputAttributeOffset = {}; - uint32_t maxVertexInputBindingStride = {}; - uint32_t maxVertexOutputComponents = {}; - uint32_t maxTessellationGenerationLevel = {}; - uint32_t maxTessellationPatchSize = {}; - uint32_t maxTessellationControlPerVertexInputComponents = {}; - uint32_t maxTessellationControlPerVertexOutputComponents = {}; - uint32_t maxTessellationControlPerPatchOutputComponents = {}; - uint32_t maxTessellationControlTotalOutputComponents = {}; - uint32_t maxTessellationEvaluationInputComponents = {}; - uint32_t maxTessellationEvaluationOutputComponents = {}; - uint32_t maxGeometryShaderInvocations = {}; - uint32_t maxGeometryInputComponents = {}; - uint32_t maxGeometryOutputComponents = {}; - uint32_t maxGeometryOutputVertices = {}; - uint32_t maxGeometryTotalOutputComponents = {}; - uint32_t maxFragmentInputComponents = {}; - uint32_t maxFragmentOutputAttachments = {}; - uint32_t maxFragmentDualSrcAttachments = {}; - uint32_t maxFragmentCombinedOutputResources = {}; - uint32_t maxComputeSharedMemorySize = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupCount = {}; - uint32_t maxComputeWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupSize = {}; - uint32_t subPixelPrecisionBits = {}; - uint32_t subTexelPrecisionBits = {}; - uint32_t mipmapPrecisionBits = {}; - uint32_t maxDrawIndexedIndexValue = {}; - uint32_t maxDrawIndirectCount = {}; - float maxSamplerLodBias = {}; - float maxSamplerAnisotropy = {}; - uint32_t maxViewports = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxViewportDimensions = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D viewportBoundsRange = {}; - uint32_t viewportSubPixelBits = {}; - size_t minMemoryMapAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; - int32_t minTexelOffset = {}; - uint32_t maxTexelOffset = {}; - int32_t minTexelGatherOffset = {}; - uint32_t maxTexelGatherOffset = {}; - float minInterpolationOffset = {}; - float maxInterpolationOffset = {}; - uint32_t subPixelInterpolationOffsetBits = {}; - uint32_t maxFramebufferWidth = {}; - uint32_t maxFramebufferHeight = {}; - uint32_t maxFramebufferLayers = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; - uint32_t maxColorAttachments = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; - uint32_t maxSampleMaskWords = {}; - VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; - float timestampPeriod = {}; - uint32_t maxClipDistances = {}; - uint32_t maxCullDistances = {}; - uint32_t maxCombinedClipAndCullDistances = {}; - uint32_t discreteQueuePriorities = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D pointSizeRange = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D lineWidthRange = {}; - float pointSizeGranularity = {}; - float lineWidthGranularity = {}; - VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; - + uint32_t maxImageDimension1D = {}; + uint32_t maxImageDimension2D = {}; + uint32_t maxImageDimension3D = {}; + uint32_t maxImageDimensionCube = {}; + uint32_t maxImageArrayLayers = {}; + uint32_t maxTexelBufferElements = {}; + uint32_t maxUniformBufferRange = {}; + uint32_t maxStorageBufferRange = {}; + uint32_t maxPushConstantsSize = {}; + uint32_t maxMemoryAllocationCount = {}; + uint32_t maxSamplerAllocationCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; + uint32_t maxBoundDescriptorSets = {}; + uint32_t maxPerStageDescriptorSamplers = {}; + uint32_t maxPerStageDescriptorUniformBuffers = {}; + uint32_t maxPerStageDescriptorStorageBuffers = {}; + uint32_t maxPerStageDescriptorSampledImages = {}; + uint32_t maxPerStageDescriptorStorageImages = {}; + uint32_t maxPerStageDescriptorInputAttachments = {}; + uint32_t maxPerStageResources = {}; + uint32_t maxDescriptorSetSamplers = {}; + uint32_t maxDescriptorSetUniformBuffers = {}; + uint32_t maxDescriptorSetUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetStorageBuffers = {}; + uint32_t maxDescriptorSetStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetSampledImages = {}; + uint32_t maxDescriptorSetStorageImages = {}; + uint32_t maxDescriptorSetInputAttachments = {}; + uint32_t maxVertexInputAttributes = {}; + uint32_t maxVertexInputBindings = {}; + uint32_t maxVertexInputAttributeOffset = {}; + uint32_t maxVertexInputBindingStride = {}; + uint32_t maxVertexOutputComponents = {}; + uint32_t maxTessellationGenerationLevel = {}; + uint32_t maxTessellationPatchSize = {}; + uint32_t maxTessellationControlPerVertexInputComponents = {}; + uint32_t maxTessellationControlPerVertexOutputComponents = {}; + uint32_t maxTessellationControlPerPatchOutputComponents = {}; + uint32_t maxTessellationControlTotalOutputComponents = {}; + uint32_t maxTessellationEvaluationInputComponents = {}; + uint32_t maxTessellationEvaluationOutputComponents = {}; + uint32_t maxGeometryShaderInvocations = {}; + uint32_t maxGeometryInputComponents = {}; + uint32_t maxGeometryOutputComponents = {}; + uint32_t maxGeometryOutputVertices = {}; + uint32_t maxGeometryTotalOutputComponents = {}; + uint32_t maxFragmentInputComponents = {}; + uint32_t maxFragmentOutputAttachments = {}; + uint32_t maxFragmentDualSrcAttachments = {}; + uint32_t maxFragmentCombinedOutputResources = {}; + uint32_t maxComputeSharedMemorySize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupCount = {}; + uint32_t maxComputeWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupSize = {}; + uint32_t subPixelPrecisionBits = {}; + uint32_t subTexelPrecisionBits = {}; + uint32_t mipmapPrecisionBits = {}; + uint32_t maxDrawIndexedIndexValue = {}; + uint32_t maxDrawIndirectCount = {}; + float maxSamplerLodBias = {}; + float maxSamplerAnisotropy = {}; + uint32_t maxViewports = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxViewportDimensions = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D viewportBoundsRange = {}; + uint32_t viewportSubPixelBits = {}; + size_t minMemoryMapAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; + int32_t minTexelOffset = {}; + uint32_t maxTexelOffset = {}; + int32_t minTexelGatherOffset = {}; + uint32_t maxTexelGatherOffset = {}; + float minInterpolationOffset = {}; + float maxInterpolationOffset = {}; + uint32_t subPixelInterpolationOffsetBits = {}; + uint32_t maxFramebufferWidth = {}; + uint32_t maxFramebufferHeight = {}; + uint32_t maxFramebufferLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; + uint32_t maxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; + uint32_t maxSampleMaskWords = {}; + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; + float timestampPeriod = {}; + uint32_t maxClipDistances = {}; + uint32_t maxCullDistances = {}; + uint32_t maxCombinedClipAndCullDistances = {}; + uint32_t discreteQueuePriorities = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pointSizeRange = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lineWidthRange = {}; + float pointSizeGranularity = {}; + float lineWidthGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; }; - static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" ); + static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PhysicalDeviceLineRasterizationFeaturesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT - : rectangularLines( rectangularLines_ ) - , bresenhamLines( bresenhamLines_ ) - , smoothLines( smoothLines_ ) - , stippledRectangularLines( stippledRectangularLines_ ) - , stippledBresenhamLines( stippledBresenhamLines_ ) - , stippledSmoothLines( stippledSmoothLines_ ) - {} - - PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) - offsetof( PhysicalDeviceLineRasterizationFeaturesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT - { - rectangularLines = rectangularLines_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT - { - bresenhamLines = bresenhamLines_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT - { - smoothLines = smoothLines_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT - { - stippledRectangularLines = stippledRectangularLines_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT - { - stippledBresenhamLines = stippledBresenhamLines_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT - { - stippledSmoothLines = stippledSmoothLines_; - return *this; - } - - - operator VkPhysicalDeviceLineRasterizationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( rectangularLines == rhs.rectangularLines ) - && ( bresenhamLines == rhs.bresenhamLines ) - && ( smoothLines == rhs.smoothLines ) - && ( stippledRectangularLines == rhs.stippledRectangularLines ) - && ( stippledBresenhamLines == rhs.stippledBresenhamLines ) - && ( stippledSmoothLines == rhs.stippledSmoothLines ); - } - - bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; - - }; - static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceLineRasterizationFeaturesEXT; - }; - - struct PhysicalDeviceLineRasterizationPropertiesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT - : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) - {} - - PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceLineRasterizationPropertiesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceLineRasterizationPropertiesEXT& operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceLineRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); - } - - bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; - void* pNext = {}; - uint32_t lineSubPixelPrecisionBits = {}; - - }; - static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceLineRasterizationPropertiesEXT; - }; - - struct PhysicalDeviceMaintenance3Properties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxPerSetDescriptors( maxPerSetDescriptors_ ) - , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) - {} - - PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMaintenance3Properties ) - offsetof( PhysicalDeviceMaintenance3Properties, pNext ) ); - return *this; - } - - PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMaintenance3Properties& operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceMaintenance3Properties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMaintenance3Properties const& ) const = default; -#else - bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) - && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); - } - - bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; - void* pNext = {}; - uint32_t maxPerSetDescriptors = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; - - }; - static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceMaintenance3Properties; - }; - - struct PhysicalDeviceMemoryBudgetPropertiesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const& heapBudget_ = {}, - std::array const& heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT - : heapBudget( heapBudget_ ) - , heapUsage( heapUsage_ ) - {} - - PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) - offsetof( PhysicalDeviceMemoryBudgetPropertiesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMemoryBudgetPropertiesEXT& operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( heapBudget == rhs.heapBudget ) - && ( heapUsage == rhs.heapUsage ); - } - - bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; - - }; - static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; - }; - - struct PhysicalDeviceMemoryPriorityFeaturesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryPriority( memoryPriority_ ) - {} - - PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) - offsetof( PhysicalDeviceMemoryPriorityFeaturesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT - { - memoryPriority = memoryPriority_; - return *this; - } - - - operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryPriority == rhs.memoryPriority ); - } - - bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; - - }; - static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; - }; - - struct PhysicalDeviceMemoryProperties - { - - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {}, - std::array const& memoryTypes_ = {}, - uint32_t memoryHeapCount_ = {}, - std::array const& memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryTypeCount( memoryTypeCount_ ) - , memoryTypes( memoryTypes_ ) - , memoryHeapCount( memoryHeapCount_ ) - , memoryHeaps( memoryHeaps_ ) - {} - - PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMemoryProperties& operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMemoryProperties const& ) const = default; -#else - bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( memoryTypeCount == rhs.memoryTypeCount ) - && ( memoryTypes == rhs.memoryTypes ) - && ( memoryHeapCount == rhs.memoryHeapCount ) - && ( memoryHeaps == rhs.memoryHeaps ); - } - - bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint32_t memoryTypeCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; - uint32_t memoryHeapCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; - - }; - static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PhysicalDeviceMemoryProperties2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryProperties( memoryProperties_ ) - {} - - PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryProperties2 ) - offsetof( PhysicalDeviceMemoryProperties2, pNext ) ); - return *this; - } - - PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMemoryProperties2& operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMemoryProperties2 const& ) const = default; -#else - bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryProperties == rhs.memoryProperties ); - } - - bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; - - }; - static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceMemoryProperties2; - }; - - struct PhysicalDeviceMeshShaderFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT - : taskShader( taskShader_ ) - , meshShader( meshShader_ ) - {} - - PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) - offsetof( PhysicalDeviceMeshShaderFeaturesNV, pNext ) ); - return *this; - } - - PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceMeshShaderFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT - { - taskShader = taskShader_; - return *this; - } - - PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT - { - meshShader = meshShader_; - return *this; - } - - - operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( taskShader == rhs.taskShader ) - && ( meshShader == rhs.meshShader ); - } - - bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; - - }; - static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceMeshShaderFeaturesNV; - }; - - struct PhysicalDeviceMeshShaderPropertiesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, - uint32_t maxTaskWorkGroupInvocations_ = {}, - std::array const& maxTaskWorkGroupSize_ = {}, - uint32_t maxTaskTotalMemorySize_ = {}, - uint32_t maxTaskOutputCount_ = {}, - uint32_t maxMeshWorkGroupInvocations_ = {}, - std::array const& maxMeshWorkGroupSize_ = {}, - uint32_t maxMeshTotalMemorySize_ = {}, - uint32_t maxMeshOutputVertices_ = {}, - uint32_t maxMeshOutputPrimitives_ = {}, - uint32_t maxMeshMultiviewViewCount_ = {}, - uint32_t meshOutputPerVertexGranularity_ = {}, - uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT - : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) - , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) - , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) - , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) - , maxTaskOutputCount( maxTaskOutputCount_ ) - , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) - , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ) - , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) - , maxMeshOutputVertices( maxMeshOutputVertices_ ) - , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) - , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) - , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) - , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) - {} - - PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) - offsetof( PhysicalDeviceMeshShaderPropertiesNV, pNext ) ); - return *this; - } - - PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) - && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) - && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) - && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) - && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) - && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) - && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) - && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) - && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) - && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) - && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) - && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) - && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); - } - - bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; - void* pNext = {}; - uint32_t maxDrawMeshTasksCount = {}; - uint32_t maxTaskWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; - uint32_t maxTaskTotalMemorySize = {}; - uint32_t maxTaskOutputCount = {}; - uint32_t maxMeshWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; - uint32_t maxMeshTotalMemorySize = {}; - uint32_t maxMeshOutputVertices = {}; - uint32_t maxMeshOutputPrimitives = {}; - uint32_t maxMeshMultiviewViewCount = {}; - uint32_t meshOutputPerVertexGranularity = {}; - uint32_t meshOutputPerPrimitiveGranularity = {}; - - }; - static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceMeshShaderPropertiesNV; - }; - - struct PhysicalDeviceMultiviewFeatures - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT - : multiview( multiview_ ) - , multiviewGeometryShader( multiviewGeometryShader_ ) - , multiviewTessellationShader( multiviewTessellationShader_ ) - {} - - PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewFeatures ) - offsetof( PhysicalDeviceMultiviewFeatures, pNext ) ); - return *this; - } - - PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceMultiviewFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT - { - multiview = multiview_; - return *this; - } - - PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT - { - multiviewGeometryShader = multiviewGeometryShader_; - return *this; - } - - PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT - { - multiviewTessellationShader = multiviewTessellationShader_; - return *this; - } - - - operator VkPhysicalDeviceMultiviewFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMultiviewFeatures const& ) const = default; -#else - bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( multiview == rhs.multiview ) - && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) - && ( multiviewTessellationShader == rhs.multiviewTessellationShader ); - } - - bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; - - }; - static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceMultiviewFeatures; - }; - - struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT - : perViewPositionAllComponents( perViewPositionAllComponents_ ) - {} - - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) - offsetof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, pNext ) ); - return *this; - } - - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& ) const = default; -#else - bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); - } - - bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; - - }; - static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - }; - - struct PhysicalDeviceMultiviewProperties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, - uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : maxMultiviewViewCount( maxMultiviewViewCount_ ) - , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) - {} - - PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewProperties ) - offsetof( PhysicalDeviceMultiviewProperties, pNext ) ); - return *this; - } - - PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceMultiviewProperties& operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceMultiviewProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceMultiviewProperties const& ) const = default; -#else - bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) - && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); - } - - bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; - void* pNext = {}; - uint32_t maxMultiviewViewCount = {}; - uint32_t maxMultiviewInstanceIndex = {}; - - }; - static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceMultiviewProperties; - }; - - struct PhysicalDevicePCIBusInfoPropertiesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, - uint32_t pciBus_ = {}, - uint32_t pciDevice_ = {}, - uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT - : pciDomain( pciDomain_ ) - , pciBus( pciBus_ ) - , pciDevice( pciDevice_ ) - , pciFunction( pciFunction_ ) - {} - - PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) - offsetof( PhysicalDevicePCIBusInfoPropertiesEXT, pNext ) ); - return *this; - } - - PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDevicePCIBusInfoPropertiesEXT& operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const& ) const = default; -#else - bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pciDomain == rhs.pciDomain ) - && ( pciBus == rhs.pciBus ) - && ( pciDevice == rhs.pciDevice ) - && ( pciFunction == rhs.pciFunction ); - } - - bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; - void* pNext = {}; - uint32_t pciDomain = {}; - uint32_t pciBus = {}; - uint32_t pciDevice = {}; - uint32_t pciFunction = {}; - - }; - static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDevicePCIBusInfoPropertiesEXT; - }; - - struct PhysicalDevicePerformanceQueryFeaturesKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; - - VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT - : performanceCounterQueryPools( performanceCounterQueryPools_ ) - , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ ) - {} - - PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) - offsetof( PhysicalDevicePerformanceQueryFeaturesKHR, pNext ) ); - return *this; - } - - PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDevicePerformanceQueryFeaturesKHR& operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT - { - performanceCounterQueryPools = performanceCounterQueryPools_; - return *this; - } - - PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT - { - performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; - return *this; - } - - - operator VkPhysicalDevicePerformanceQueryFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const& ) const = default; -#else - bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) - && ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); - } - - bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; - VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; - - }; - static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDevicePerformanceQueryFeaturesKHR; - }; - - struct PhysicalDevicePerformanceQueryPropertiesKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; - - VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT - : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ ) - {} - - PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) - offsetof( PhysicalDevicePerformanceQueryPropertiesKHR, pNext ) ); - return *this; - } - - PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDevicePerformanceQueryPropertiesKHR& operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDevicePerformanceQueryPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const& ) const = default; -#else - bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); - } - - bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; - - }; - static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDevicePerformanceQueryPropertiesKHR; - }; - - struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {} ) VULKAN_HPP_NOEXCEPT - : pipelineCreationCacheControl( pipelineCreationCacheControl_ ) - {} - - PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) - offsetof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT, pNext ) ); - return *this; - } - - PhysicalDevicePipelineCreationCacheControlFeaturesEXT( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDevicePipelineCreationCacheControlFeaturesEXT& operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT - { - pipelineCreationCacheControl = pipelineCreationCacheControl_; - return *this; - } - - - operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& ) const = default; -#else - bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); - } - - bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; - - }; - static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) == sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDevicePipelineCreationCacheControlFeaturesEXT; - }; - - struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; - - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : pipelineExecutableInfo( pipelineExecutableInfo_ ) - {} - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) - offsetof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, pNext ) ); - return *this; - } - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT - { - pipelineExecutableInfo = pipelineExecutableInfo_; - return *this; - } - - - operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& ) const = default; -#else - bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); - } - - bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; - - }; - static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; - }; - - struct PhysicalDevicePointClippingProperties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties; - - VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT - : pointClippingBehavior( pointClippingBehavior_ ) - {} - - PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePointClippingProperties ) - offsetof( PhysicalDevicePointClippingProperties, pNext ) ); - return *this; - } - - PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDevicePointClippingProperties& operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDevicePointClippingProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePointClippingProperties const& ) const = default; -#else - bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pointClippingBehavior == rhs.pointClippingBehavior ); - } - - bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; - - }; - static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDevicePointClippingProperties; - }; - - struct PhysicalDevicePrivateDataFeaturesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {} ) VULKAN_HPP_NOEXCEPT - : privateData( privateData_ ) - {} - - PhysicalDevicePrivateDataFeaturesEXT & operator=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePrivateDataFeaturesEXT ) - offsetof( PhysicalDevicePrivateDataFeaturesEXT, pNext ) ); - return *this; - } - - PhysicalDevicePrivateDataFeaturesEXT( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDevicePrivateDataFeaturesEXT& operator=( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDevicePrivateDataFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDevicePrivateDataFeaturesEXT & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT - { - privateData = privateData_; - return *this; - } - - - operator VkPhysicalDevicePrivateDataFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePrivateDataFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePrivateDataFeaturesEXT const& ) const = default; -#else - bool operator==( PhysicalDevicePrivateDataFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( privateData == rhs.privateData ); - } - - bool operator!=( PhysicalDevicePrivateDataFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; - - }; - static_assert( sizeof( PhysicalDevicePrivateDataFeaturesEXT ) == sizeof( VkPhysicalDevicePrivateDataFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDevicePrivateDataFeaturesEXT; - }; - struct PhysicalDeviceSparseProperties { - - - VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ) , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ) , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ) @@ -56990,73 +66147,76 @@ namespace VULKAN_HPP_NAMESPACE , residencyNonResidentStrict( residencyNonResidentStrict_ ) {} - PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSparseProperties& operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseProperties & + operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSparseProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; #else - bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) - && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) - && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) - && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) - && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && + ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && + ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && + ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && + ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); } - bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; - + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; }; - static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PhysicalDeviceProperties { - - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {}, - uint32_t driverVersion_ = {}, - uint32_t vendorID_ = {}, - uint32_t deviceID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, - std::array const& deviceName_ = {}, - std::array const& pipelineCacheUUID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( + uint32_t apiVersion_ = {}, + uint32_t driverVersion_ = {}, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, + std::array const & deviceName_ = {}, + std::array const & pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT : apiVersion( apiVersion_ ) , driverVersion( driverVersion_ ) , vendorID( vendorID_ ) @@ -57068,2637 +66228,399 @@ namespace VULKAN_HPP_NAMESPACE , sparseProperties( sparseProperties_ ) {} - PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceProperties& operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties & + operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties const & ) const = default; #else - bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( apiVersion == rhs.apiVersion ) - && ( driverVersion == rhs.driverVersion ) - && ( vendorID == rhs.vendorID ) - && ( deviceID == rhs.deviceID ) - && ( deviceType == rhs.deviceType ) - && ( deviceName == rhs.deviceName ) - && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) - && ( limits == rhs.limits ) - && ( sparseProperties == rhs.sparseProperties ); + return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && + ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && ( deviceType == rhs.deviceType ) && + ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); } - bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t apiVersion = {}; - uint32_t driverVersion = {}; - uint32_t vendorID = {}; - uint32_t deviceID = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; - + uint32_t apiVersion = {}; + uint32_t driverVersion = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; }; - static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" ); + static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PhysicalDeviceProperties2 { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT : properties( properties_ ) {} - PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProperties2 ) - offsetof( PhysicalDeviceProperties2, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceProperties2& operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2 & + operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceProperties2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; #else - bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( properties == rhs.properties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); } - bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; - void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + void * pNext = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; - }; - static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceProperties2; }; + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; - struct PhysicalDeviceProtectedMemoryFeatures + struct QueryPoolPerformanceCreateInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueryPoolPerformanceCreateInfoKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT - : protectedMemory( protectedMemory_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, + uint32_t counterIndexCount_ = {}, + const uint32_t * pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( counterIndexCount_ ) + , pCounterIndices( pCounterIndices_ ) {} - PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR( + uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) + : queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( static_cast( counterIndices_.size() ) ) + , pCounterIndices( counterIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & + operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProtectedMemoryFeatures ) - offsetof( PhysicalDeviceProtectedMemoryFeatures, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceProtectedMemoryFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT { - protectedMemory = protectedMemory_; + queueFamilyIndex = queueFamilyIndex_; return *this; } - - operator VkPhysicalDeviceProtectedMemoryFeatures const&() const VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + counterIndexCount = counterIndexCount_; + return *this; } - operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pCounterIndices = pCounterIndices_; + return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR & setCounterIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = static_cast( counterIndices_.size() ); + pCounterIndices = counterIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const& ) const = default; + operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( protectedMemory == rhs.protectedMemory ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices ); } - bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + uint32_t counterIndexCount = {}; + const uint32_t * pCounterIndices = {}; }; - static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceProtectedMemoryFeatures; + using Type = QueryPoolPerformanceCreateInfoKHR; }; - struct PhysicalDeviceProtectedMemoryProperties + struct QueueFamilyProperties { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT - : protectedNoFault( protectedNoFault_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, + uint32_t queueCount_ = {}, + uint32_t timestampValidBits_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFlags( queueFlags_ ) + , queueCount( queueCount_ ) + , timestampValidBits( timestampValidBits_ ) + , minImageTransferGranularity( minImageTransferGranularity_ ) {} - PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyProperties & + operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProtectedMemoryProperties ) - offsetof( PhysicalDeviceProtectedMemoryProperties, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - - operator VkPhysicalDeviceProtectedMemoryProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceProtectedMemoryProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties const & ) const = default; #else - bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( protectedNoFault == rhs.protectedNoFault ); + return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && + ( timestampValidBits == rhs.timestampValidBits ) && + ( minImageTransferGranularity == rhs.minImageTransferGranularity ); } - bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; - + VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; + uint32_t queueCount = {}; + uint32_t timestampValidBits = {}; + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; }; - static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - template <> - struct CppType + struct QueueFamilyProperties2 { - using Type = PhysicalDeviceProtectedMemoryProperties; - }; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2; - struct PhysicalDevicePushDescriptorPropertiesKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; - - VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT - : maxPushDescriptors( maxPushDescriptors_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyProperties( queueFamilyProperties_ ) {} - PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) - offsetof( PhysicalDevicePushDescriptorPropertiesKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const& ) const = default; -#else - bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxPushDescriptors == rhs.maxPushDescriptors ); - } - - bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; - void* pNext = {}; - uint32_t maxPushDescriptors = {}; - - }; - static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDevicePushDescriptorPropertiesKHR; - }; - -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct PhysicalDeviceRayTracingFeaturesKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ = {} ) VULKAN_HPP_NOEXCEPT - : rayTracing( rayTracing_ ) - , rayTracingShaderGroupHandleCaptureReplay( rayTracingShaderGroupHandleCaptureReplay_ ) - , rayTracingShaderGroupHandleCaptureReplayMixed( rayTracingShaderGroupHandleCaptureReplayMixed_ ) - , rayTracingAccelerationStructureCaptureReplay( rayTracingAccelerationStructureCaptureReplay_ ) - , rayTracingIndirectTraceRays( rayTracingIndirectTraceRays_ ) - , rayTracingIndirectAccelerationStructureBuild( rayTracingIndirectAccelerationStructureBuild_ ) - , rayTracingHostAccelerationStructureCommands( rayTracingHostAccelerationStructureCommands_ ) - , rayQuery( rayQuery_ ) - , rayTracingPrimitiveCulling( rayTracingPrimitiveCulling_ ) + QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties2( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceRayTracingFeaturesKHR & operator=( PhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueueFamilyProperties2 & + operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRayTracingFeaturesKHR ) - offsetof( PhysicalDeviceRayTracingFeaturesKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceRayTracingFeaturesKHR( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - PhysicalDeviceRayTracingFeaturesKHR& operator=( VkPhysicalDeviceRayTracingFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - PhysicalDeviceRayTracingFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceRayTracingFeaturesKHR & setRayTracing( VULKAN_HPP_NAMESPACE::Bool32 rayTracing_ ) VULKAN_HPP_NOEXCEPT - { - rayTracing = rayTracing_; - return *this; - } - - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT - { - rayTracingShaderGroupHandleCaptureReplay = rayTracingShaderGroupHandleCaptureReplay_; - return *this; - } - - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT - { - rayTracingShaderGroupHandleCaptureReplayMixed = rayTracingShaderGroupHandleCaptureReplayMixed_; - return *this; - } - - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT - { - rayTracingAccelerationStructureCaptureReplay = rayTracingAccelerationStructureCaptureReplay_; - return *this; - } - - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectTraceRays( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays_ ) VULKAN_HPP_NOEXCEPT - { - rayTracingIndirectTraceRays = rayTracingIndirectTraceRays_; - return *this; - } - - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingIndirectAccelerationStructureBuild( VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild_ ) VULKAN_HPP_NOEXCEPT - { - rayTracingIndirectAccelerationStructureBuild = rayTracingIndirectAccelerationStructureBuild_; - return *this; - } - - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingHostAccelerationStructureCommands( VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands_ ) VULKAN_HPP_NOEXCEPT - { - rayTracingHostAccelerationStructureCommands = rayTracingHostAccelerationStructureCommands_; - return *this; - } - - PhysicalDeviceRayTracingFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT - { - rayQuery = rayQuery_; - return *this; - } - - PhysicalDeviceRayTracingFeaturesKHR & setRayTracingPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT - { - rayTracingPrimitiveCulling = rayTracingPrimitiveCulling_; - return *this; - } - - - operator VkPhysicalDeviceRayTracingFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceRayTracingFeaturesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceRayTracingFeaturesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties2 const & ) const = default; #else - bool operator==( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( rayTracing == rhs.rayTracing ) - && ( rayTracingShaderGroupHandleCaptureReplay == rhs.rayTracingShaderGroupHandleCaptureReplay ) - && ( rayTracingShaderGroupHandleCaptureReplayMixed == rhs.rayTracingShaderGroupHandleCaptureReplayMixed ) - && ( rayTracingAccelerationStructureCaptureReplay == rhs.rayTracingAccelerationStructureCaptureReplay ) - && ( rayTracingIndirectTraceRays == rhs.rayTracingIndirectTraceRays ) - && ( rayTracingIndirectAccelerationStructureBuild == rhs.rayTracingIndirectAccelerationStructureBuild ) - && ( rayTracingHostAccelerationStructureCommands == rhs.rayTracingHostAccelerationStructureCommands ) - && ( rayQuery == rhs.rayQuery ) - && ( rayTracingPrimitiveCulling == rhs.rayTracingPrimitiveCulling ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties ); } - bool operator!=( PhysicalDeviceRayTracingFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingFeaturesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracing = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingShaderGroupHandleCaptureReplayMixed = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingAccelerationStructureCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectTraceRays = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingIndirectAccelerationStructureBuild = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingHostAccelerationStructureCommands = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPrimitiveCulling = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; }; - static_assert( sizeof( PhysicalDeviceRayTracingFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRayTracingFeaturesKHR; - }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct PhysicalDeviceRayTracingPropertiesKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesKHR; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, - uint32_t maxRecursionDepth_ = {}, - uint32_t maxShaderGroupStride_ = {}, - uint32_t shaderGroupBaseAlignment_ = {}, - uint64_t maxGeometryCount_ = {}, - uint64_t maxInstanceCount_ = {}, - uint64_t maxPrimitiveCount_ = {}, - uint32_t maxDescriptorSetAccelerationStructures_ = {}, - uint32_t shaderGroupHandleCaptureReplaySize_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderGroupHandleSize( shaderGroupHandleSize_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , maxShaderGroupStride( maxShaderGroupStride_ ) - , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) - , maxGeometryCount( maxGeometryCount_ ) - , maxInstanceCount( maxInstanceCount_ ) - , maxPrimitiveCount( maxPrimitiveCount_ ) - , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) - , shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ) - {} - - PhysicalDeviceRayTracingPropertiesKHR & operator=( PhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRayTracingPropertiesKHR ) - offsetof( PhysicalDeviceRayTracingPropertiesKHR, pNext ) ); - return *this; - } - - PhysicalDeviceRayTracingPropertiesKHR( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceRayTracingPropertiesKHR& operator=( VkPhysicalDeviceRayTracingPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceRayTracingPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceRayTracingPropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceRayTracingPropertiesKHR const& ) const = default; -#else - bool operator==( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) - && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) - && ( maxGeometryCount == rhs.maxGeometryCount ) - && ( maxInstanceCount == rhs.maxInstanceCount ) - && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) - && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) - && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ); - } - - bool operator!=( PhysicalDeviceRayTracingPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesKHR; - void* pNext = {}; - uint32_t shaderGroupHandleSize = {}; - uint32_t maxRecursionDepth = {}; - uint32_t maxShaderGroupStride = {}; - uint32_t shaderGroupBaseAlignment = {}; - uint64_t maxGeometryCount = {}; - uint64_t maxInstanceCount = {}; - uint64_t maxPrimitiveCount = {}; - uint32_t maxDescriptorSetAccelerationStructures = {}; - uint32_t shaderGroupHandleCaptureReplaySize = {}; - - }; - static_assert( sizeof( PhysicalDeviceRayTracingPropertiesKHR ) == sizeof( VkPhysicalDeviceRayTracingPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceRayTracingPropertiesKHR; - }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - struct PhysicalDeviceRayTracingPropertiesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, - uint32_t maxRecursionDepth_ = {}, - uint32_t maxShaderGroupStride_ = {}, - uint32_t shaderGroupBaseAlignment_ = {}, - uint64_t maxGeometryCount_ = {}, - uint64_t maxInstanceCount_ = {}, - uint64_t maxTriangleCount_ = {}, - uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderGroupHandleSize( shaderGroupHandleSize_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , maxShaderGroupStride( maxShaderGroupStride_ ) - , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) - , maxGeometryCount( maxGeometryCount_ ) - , maxInstanceCount( maxInstanceCount_ ) - , maxTriangleCount( maxTriangleCount_ ) - , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) - {} - - PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRayTracingPropertiesNV ) - offsetof( PhysicalDeviceRayTracingPropertiesNV, pNext ) ); - return *this; - } - - PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceRayTracingPropertiesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) - && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) - && ( maxGeometryCount == rhs.maxGeometryCount ) - && ( maxInstanceCount == rhs.maxInstanceCount ) - && ( maxTriangleCount == rhs.maxTriangleCount ) - && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); - } - - bool operator!=( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; - void* pNext = {}; - uint32_t shaderGroupHandleSize = {}; - uint32_t maxRecursionDepth = {}; - uint32_t maxShaderGroupStride = {}; - uint32_t shaderGroupBaseAlignment = {}; - uint64_t maxGeometryCount = {}; - uint64_t maxInstanceCount = {}; - uint64_t maxTriangleCount = {}; - uint32_t maxDescriptorSetAccelerationStructures = {}; - - }; - static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceRayTracingPropertiesNV; - }; - - struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT - : representativeFragmentTest( representativeFragmentTest_ ) - {} - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) - offsetof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV, pNext ) ); - return *this; - } - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT - { - representativeFragmentTest = representativeFragmentTest_; - return *this; - } - - - operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( representativeFragmentTest == rhs.representativeFragmentTest ); - } - - bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; - - }; - static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV; - }; - - struct PhysicalDeviceRobustness2FeaturesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT - : robustBufferAccess2( robustBufferAccess2_ ) - , robustImageAccess2( robustImageAccess2_ ) - , nullDescriptor( nullDescriptor_ ) - {} - - PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRobustness2FeaturesEXT ) - offsetof( PhysicalDeviceRobustness2FeaturesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceRobustness2FeaturesEXT& operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceRobustness2FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceRobustness2FeaturesEXT & setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT - { - robustBufferAccess2 = robustBufferAccess2_; - return *this; - } - - PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT - { - robustImageAccess2 = robustImageAccess2_; - return *this; - } - - PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT - { - nullDescriptor = nullDescriptor_; - return *this; - } - - - operator VkPhysicalDeviceRobustness2FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceRobustness2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) - && ( robustImageAccess2 == rhs.robustImageAccess2 ) - && ( nullDescriptor == rhs.nullDescriptor ); - } - - bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {}; - VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {}; - - }; - static_assert( sizeof( PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceRobustness2FeaturesEXT; - }; - - struct PhysicalDeviceRobustness2PropertiesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {} ) VULKAN_HPP_NOEXCEPT - : robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ) - , robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ ) - {} - - PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRobustness2PropertiesEXT ) - offsetof( PhysicalDeviceRobustness2PropertiesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceRobustness2PropertiesEXT& operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceRobustness2PropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceRobustness2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) - && ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment ); - } - - bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {}; - - }; - static_assert( sizeof( PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceRobustness2PropertiesEXT; - }; - - struct PhysicalDeviceSampleLocationsPropertiesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, - std::array const& sampleLocationCoordinateRange_ = {}, - uint32_t sampleLocationSubPixelBits_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT - : sampleLocationSampleCounts( sampleLocationSampleCounts_ ) - , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) - , sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ) - , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ) - , variableSampleLocations( variableSampleLocations_ ) - {} - - PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) - offsetof( PhysicalDeviceSampleLocationsPropertiesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceSampleLocationsPropertiesEXT& operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) - && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) - && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) - && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) - && ( variableSampleLocations == rhs.variableSampleLocations ); - } - - bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; - uint32_t sampleLocationSubPixelBits = {}; - VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; - - }; - static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceSampleLocationsPropertiesEXT; - }; - - struct PhysicalDeviceSamplerFilterMinmaxProperties - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT - : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) - , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) - {} - - PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) - offsetof( PhysicalDeviceSamplerFilterMinmaxProperties, pNext ) ); - return *this; - } - - PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceSamplerFilterMinmaxProperties& operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceSamplerFilterMinmaxProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const& ) const = default; -#else - bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) - && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); - } - - bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; - - }; - static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceSamplerFilterMinmaxProperties; - }; - - struct PhysicalDeviceSamplerYcbcrConversionFeatures - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT - : samplerYcbcrConversion( samplerYcbcrConversion_ ) - {} - - PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) - offsetof( PhysicalDeviceSamplerYcbcrConversionFeatures, pNext ) ); - return *this; - } - - PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT - { - samplerYcbcrConversion = samplerYcbcrConversion_; - return *this; - } - - - operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const& ) const = default; -#else - bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); - } - - bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; - - }; - static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceSamplerYcbcrConversionFeatures; - }; - - struct PhysicalDeviceScalarBlockLayoutFeatures - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) VULKAN_HPP_NOEXCEPT - : scalarBlockLayout( scalarBlockLayout_ ) - {} - - PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) - offsetof( PhysicalDeviceScalarBlockLayoutFeatures, pNext ) ); - return *this; - } - - PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceScalarBlockLayoutFeatures& operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceScalarBlockLayoutFeatures & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT - { - scalarBlockLayout = scalarBlockLayout_; - return *this; - } - - - operator VkPhysicalDeviceScalarBlockLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const& ) const = default; -#else - bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( scalarBlockLayout == rhs.scalarBlockLayout ); - } - - bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; - - }; - static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceScalarBlockLayoutFeatures; - }; - - struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT - : separateDepthStencilLayouts( separateDepthStencilLayouts_ ) - {} - - PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) - offsetof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures, pNext ) ); - return *this; - } - - PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceSeparateDepthStencilLayoutsFeatures& operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT - { - separateDepthStencilLayouts = separateDepthStencilLayouts_; - return *this; - } - - - operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& ) const = default; -#else - bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ); - } - - bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; - - }; - static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; - }; - - struct PhysicalDeviceShaderAtomicInt64Features - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) - , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) - {} - - PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderAtomicInt64Features ) - offsetof( PhysicalDeviceShaderAtomicInt64Features, pNext ) ); - return *this; - } - - PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderAtomicInt64Features& operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceShaderAtomicInt64Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderAtomicInt64Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT - { - shaderBufferInt64Atomics = shaderBufferInt64Atomics_; - return *this; - } - - PhysicalDeviceShaderAtomicInt64Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT - { - shaderSharedInt64Atomics = shaderSharedInt64Atomics_; - return *this; - } - - - operator VkPhysicalDeviceShaderAtomicInt64Features const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) - && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); - } - - bool operator!=( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderAtomicInt64Features; - }; - - struct PhysicalDeviceShaderClockFeaturesKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderSubgroupClock( shaderSubgroupClock_ ) - , shaderDeviceClock( shaderDeviceClock_ ) - {} - - PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderClockFeaturesKHR ) - offsetof( PhysicalDeviceShaderClockFeaturesKHR, pNext ) ); - return *this; - } - - PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderClockFeaturesKHR& operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceShaderClockFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT - { - shaderSubgroupClock = shaderSubgroupClock_; - return *this; - } - - PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT - { - shaderDeviceClock = shaderDeviceClock_; - return *this; - } - - - operator VkPhysicalDeviceShaderClockFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) - && ( shaderDeviceClock == rhs.shaderDeviceClock ); - } - - bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderClockFeaturesKHR; - }; - - struct PhysicalDeviceShaderCoreProperties2AMD - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, - uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderCoreFeatures( shaderCoreFeatures_ ) - , activeComputeUnitCount( activeComputeUnitCount_ ) - {} - - PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderCoreProperties2AMD ) - offsetof( PhysicalDeviceShaderCoreProperties2AMD, pNext ) ); - return *this; - } - - PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderCoreProperties2AMD& operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceShaderCoreProperties2AMD const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) - && ( activeComputeUnitCount == rhs.activeComputeUnitCount ); - } - - bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; - uint32_t activeComputeUnitCount = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderCoreProperties2AMD; - }; - - struct PhysicalDeviceShaderCorePropertiesAMD - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, - uint32_t shaderArraysPerEngineCount_ = {}, - uint32_t computeUnitsPerShaderArray_ = {}, - uint32_t simdPerComputeUnit_ = {}, - uint32_t wavefrontsPerSimd_ = {}, - uint32_t wavefrontSize_ = {}, - uint32_t sgprsPerSimd_ = {}, - uint32_t minSgprAllocation_ = {}, - uint32_t maxSgprAllocation_ = {}, - uint32_t sgprAllocationGranularity_ = {}, - uint32_t vgprsPerSimd_ = {}, - uint32_t minVgprAllocation_ = {}, - uint32_t maxVgprAllocation_ = {}, - uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderEngineCount( shaderEngineCount_ ) - , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) - , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) - , simdPerComputeUnit( simdPerComputeUnit_ ) - , wavefrontsPerSimd( wavefrontsPerSimd_ ) - , wavefrontSize( wavefrontSize_ ) - , sgprsPerSimd( sgprsPerSimd_ ) - , minSgprAllocation( minSgprAllocation_ ) - , maxSgprAllocation( maxSgprAllocation_ ) - , sgprAllocationGranularity( sgprAllocationGranularity_ ) - , vgprsPerSimd( vgprsPerSimd_ ) - , minVgprAllocation( minVgprAllocation_ ) - , maxVgprAllocation( maxVgprAllocation_ ) - , vgprAllocationGranularity( vgprAllocationGranularity_ ) - {} - - PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderCorePropertiesAMD ) - offsetof( PhysicalDeviceShaderCorePropertiesAMD, pNext ) ); - return *this; - } - - PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderCorePropertiesAMD& operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderEngineCount == rhs.shaderEngineCount ) - && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) - && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) - && ( simdPerComputeUnit == rhs.simdPerComputeUnit ) - && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) - && ( wavefrontSize == rhs.wavefrontSize ) - && ( sgprsPerSimd == rhs.sgprsPerSimd ) - && ( minSgprAllocation == rhs.minSgprAllocation ) - && ( maxSgprAllocation == rhs.maxSgprAllocation ) - && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) - && ( vgprsPerSimd == rhs.vgprsPerSimd ) - && ( minVgprAllocation == rhs.minVgprAllocation ) - && ( maxVgprAllocation == rhs.maxVgprAllocation ) - && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); - } - - bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; - void* pNext = {}; - uint32_t shaderEngineCount = {}; - uint32_t shaderArraysPerEngineCount = {}; - uint32_t computeUnitsPerShaderArray = {}; - uint32_t simdPerComputeUnit = {}; - uint32_t wavefrontsPerSimd = {}; - uint32_t wavefrontSize = {}; - uint32_t sgprsPerSimd = {}; - uint32_t minSgprAllocation = {}; - uint32_t maxSgprAllocation = {}; - uint32_t sgprAllocationGranularity = {}; - uint32_t vgprsPerSimd = {}; - uint32_t minVgprAllocation = {}; - uint32_t maxVgprAllocation = {}; - uint32_t vgprAllocationGranularity = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderCorePropertiesAMD; - }; - - struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) - {} - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) - offsetof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT - { - shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; - return *this; - } - - - operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); - } - - bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; - }; - - struct PhysicalDeviceShaderDrawParametersFeatures - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderDrawParameters( shaderDrawParameters_ ) - {} - - PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderDrawParametersFeatures ) - offsetof( PhysicalDeviceShaderDrawParametersFeatures, pNext ) ); - return *this; - } - - PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceShaderDrawParametersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT - { - shaderDrawParameters = shaderDrawParameters_; - return *this; - } - - - operator VkPhysicalDeviceShaderDrawParametersFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderDrawParameters == rhs.shaderDrawParameters ); - } - - bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderDrawParametersFeatures; - }; - - struct PhysicalDeviceShaderFloat16Int8Features - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderFloat16( shaderFloat16_ ) - , shaderInt8( shaderInt8_ ) - {} - - PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderFloat16Int8Features ) - offsetof( PhysicalDeviceShaderFloat16Int8Features, pNext ) ); - return *this; - } - - PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderFloat16Int8Features& operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceShaderFloat16Int8Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT - { - shaderFloat16 = shaderFloat16_; - return *this; - } - - PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT - { - shaderInt8 = shaderInt8_; - return *this; - } - - - operator VkPhysicalDeviceShaderFloat16Int8Features const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderFloat16 == rhs.shaderFloat16 ) - && ( shaderInt8 == rhs.shaderInt8 ); - } - - bool operator!=( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderFloat16Int8Features; - }; - - struct PhysicalDeviceShaderImageFootprintFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT - : imageFootprint( imageFootprint_ ) - {} - - PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) - offsetof( PhysicalDeviceShaderImageFootprintFeaturesNV, pNext ) ); - return *this; - } - - PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT - { - imageFootprint = imageFootprint_; - return *this; - } - - - operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageFootprint == rhs.imageFootprint ); - } - - bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderImageFootprintFeaturesNV; - }; - - struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderIntegerFunctions2( shaderIntegerFunctions2_ ) - {} - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) - offsetof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, pNext ) ); - return *this; - } - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT - { - shaderIntegerFunctions2 = shaderIntegerFunctions2_; - return *this; - } - - - operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); - } - - bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; - }; - - struct PhysicalDeviceShaderSMBuiltinsFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderSMBuiltins( shaderSMBuiltins_ ) - {} - - PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsFeaturesNV, pNext ) ); - return *this; - } - - PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT - { - shaderSMBuiltins = shaderSMBuiltins_; - return *this; - } - - - operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); - } - - bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV; - }; - - struct PhysicalDeviceShaderSMBuiltinsPropertiesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, - uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderSMCount( shaderSMCount_ ) - , shaderWarpsPerSM( shaderWarpsPerSM_ ) - {} - - PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsPropertiesNV, pNext ) ); - return *this; - } - - PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderSMCount == rhs.shaderSMCount ) - && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); - } - - bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; - void* pNext = {}; - uint32_t shaderSMCount = {}; - uint32_t shaderWarpsPerSM = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV; - }; - - struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) - {} - - PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) - offsetof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures, pNext ) ); - return *this; - } - - PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShaderSubgroupExtendedTypesFeatures& operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT - { - shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; - return *this; - } - - - operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& ) const = default; -#else - bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ); - } - - bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; - - }; - static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; - }; - - struct PhysicalDeviceShadingRateImageFeaturesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRateImage( shadingRateImage_ ) - , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) - {} - - PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) - offsetof( PhysicalDeviceShadingRateImageFeaturesNV, pNext ) ); - return *this; - } - - PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT - { - shadingRateImage = shadingRateImage_; - return *this; - } - - PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT - { - shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; - return *this; - } - - - operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shadingRateImage == rhs.shadingRateImage ) - && ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); - } - - bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; - VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; - - }; - static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShadingRateImageFeaturesNV; - }; - - struct PhysicalDeviceShadingRateImagePropertiesNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, - uint32_t shadingRatePaletteSize_ = {}, - uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRateTexelSize( shadingRateTexelSize_ ) - , shadingRatePaletteSize( shadingRatePaletteSize_ ) - , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) - {} - - PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) - offsetof( PhysicalDeviceShadingRateImagePropertiesNV, pNext ) ); - return *this; - } - - PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceShadingRateImagePropertiesNV& operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const& ) const = default; -#else - bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) - && ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) - && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); - } - - bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; - uint32_t shadingRatePaletteSize = {}; - uint32_t shadingRateMaxCoarseSamples = {}; - - }; - static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceShadingRateImagePropertiesNV; + using Type = QueueFamilyProperties2; }; + using QueueFamilyProperties2KHR = QueueFamilyProperties2; struct PhysicalDeviceSparseImageFormatInfo2 { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSparseImageFormatInfo2; - VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT : format( format_ ) , type( type_ ) , samples( samples_ ) @@ -59706,24 +66628,25 @@ namespace VULKAN_HPP_NAMESPACE , tiling( tiling_ ) {} - PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) - offsetof( PhysicalDeviceSparseImageFormatInfo2, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & + operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseImageFormatInfo2 & + operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -59741,7 +66664,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSparseImageFormatInfo2 & + setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT { samples = samples_; return *this; @@ -59759,135 +66683,20183 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default; #else - bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( type == rhs.type ) - && ( samples == rhs.samples ) - && ( usage == rhs.usage ) - && ( tiling == rhs.tiling ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && + ( samples == rhs.samples ) && ( usage == rhs.usage ) && ( tiling == rhs.tiling ); } - bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; - + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; }; - static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceSparseImageFormatInfo2; }; + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + + struct SparseImageFormatProperties2 + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT + : properties( properties_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SparseImageFormatProperties2 & + operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties2 const & ) const = default; +#else + bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); + } + + bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; + }; + static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SparseImageFormatProperties2; + }; + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + + struct FramebufferMixedSamplesCombinationNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eFramebufferMixedSamplesCombinationNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT + : coverageReductionMode( coverageReductionMode_ ) + , rasterizationSamples( rasterizationSamples_ ) + , depthStencilSamples( depthStencilSamples_ ) + , colorSamples( colorSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferMixedSamplesCombinationNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferMixedSamplesCombinationNV & + operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferMixedSamplesCombinationNV & + operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default; +#else + bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( coverageReductionMode == rhs.coverageReductionMode ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && + ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples ); + } + + bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; + }; + static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferMixedSamplesCombinationNV; + }; + + struct SurfaceCapabilities2EXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( + uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ) + , maxImageCount( maxImageCount_ ) + , currentExtent( currentExtent_ ) + , minImageExtent( minImageExtent_ ) + , maxImageExtent( maxImageExtent_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , supportedTransforms( supportedTransforms_ ) + , currentTransform( currentTransform_ ) + , supportedCompositeAlpha( supportedCompositeAlpha_ ) + , supportedUsageFlags( supportedUsageFlags_ ) + , supportedSurfaceCounters( supportedSurfaceCounters_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2EXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilities2EXT & + operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2EXT const & ) const = default; +#else + bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && + ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) && + ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) && + ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ) && + ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); + } + + bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; + void * pNext = {}; + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; + }; + static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilities2EXT; + }; + + struct SurfaceCapabilitiesKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ) + , maxImageCount( maxImageCount_ ) + , currentExtent( currentExtent_ ) + , minImageExtent( minImageExtent_ ) + , maxImageExtent( maxImageExtent_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , supportedTransforms( supportedTransforms_ ) + , currentTransform( currentTransform_ ) + , supportedCompositeAlpha( supportedCompositeAlpha_ ) + , supportedUsageFlags( supportedUsageFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesKHR & + operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && + ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && + ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ); + } + + bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + }; + static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SurfaceCapabilities2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceCapabilities( surfaceCapabilities_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilities2KHR & + operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2KHR const & ) const = default; +#else + bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities ); + } + + bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; + }; + static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilities2KHR; + }; + + struct SurfaceFormatKHR + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = + VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , colorSpace( colorSpace_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFormatKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormatKHR const & ) const = default; +#else + bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace ); + } + + bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + }; + static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SurfaceFormat2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceFormat( surfaceFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFormat2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceFormat2KHR & + operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormat2KHR const & ) const = default; +#else + bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat ); + } + + bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; + }; + static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = SurfaceFormat2KHR; + }; + + struct PhysicalDeviceToolPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT( + std::array const & name_ = {}, + std::array const & version_ = {}, + VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {}, + std::array const & description_ = {}, + std::array const & layer_ = {} ) VULKAN_HPP_NOEXCEPT + : name( name_ ) + , version( version_ ) + , purposes( purposes_ ) + , description( description_ ) + , layer( layer_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceToolPropertiesEXT( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceToolPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT & + operator=( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceToolPropertiesEXT & operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceToolPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceToolPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceToolPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) && + ( purposes == rhs.purposes ) && ( description == rhs.description ) && ( layer == rhs.layer ); + } + + bool operator!=( PhysicalDeviceToolPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; + VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; + }; + static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceToolPropertiesEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoCapabilitiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoCapabilitiesFlagsKHR capabilityFlags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxExtent_ = {}, + uint32_t maxReferencePicturesSlotsCount_ = {}, + uint32_t maxReferencePicturesActiveCount_ = {} ) VULKAN_HPP_NOEXCEPT + : capabilityFlags( capabilityFlags_ ) + , minBitstreamBufferOffsetAlignment( minBitstreamBufferOffsetAlignment_ ) + , minBitstreamBufferSizeAlignment( minBitstreamBufferSizeAlignment_ ) + , videoPictureExtentGranularity( videoPictureExtentGranularity_ ) + , minExtent( minExtent_ ) + , maxExtent( maxExtent_ ) + , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ ) + , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR & + operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoCapabilitiesKHR const & ) const = default; +# else + bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilityFlags == rhs.capabilityFlags ) && + ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment ) && + ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) && + ( videoPictureExtentGranularity == rhs.videoPictureExtentGranularity ) && ( minExtent == rhs.minExtent ) && + ( maxExtent == rhs.maxExtent ) && + ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) && + ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount ); + } + + bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesFlagsKHR capabilityFlags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity = {}; + VULKAN_HPP_NAMESPACE::Extent2D minExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxExtent = {}; + uint32_t maxReferencePicturesSlotsCount = {}; + uint32_t maxReferencePicturesActiveCount = {}; + }; + static_assert( sizeof( VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoCapabilitiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoProfilesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfilesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoProfilesKHR( uint32_t profileCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ = {} ) VULKAN_HPP_NOEXCEPT + : profileCount( profileCount_ ) + , pProfiles( pProfiles_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoProfilesKHR( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfilesKHR( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoProfilesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & operator=( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfilesKHR & operator=( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoProfilesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoProfilesKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT + { + profileCount = profileCount_; + return *this; + } + + VideoProfilesKHR & setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT + { + pProfiles = pProfiles_; + return *this; + } + + operator VkVideoProfilesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoProfilesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoProfilesKHR const & ) const = default; +# else + bool operator==( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( profileCount == rhs.profileCount ) && + ( pProfiles == rhs.pProfiles ); + } + + bool operator!=( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfilesKHR; + void * pNext = {}; + uint32_t profileCount = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles = {}; + }; + static_assert( sizeof( VideoProfilesKHR ) == sizeof( VkVideoProfilesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoProfilesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceVideoFormatInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles_ = {} ) VULKAN_HPP_NOEXCEPT + : imageUsage( imageUsage_ ) + , pVideoProfiles( pVideoProfiles_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & + operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default; +# else + bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage ) && + ( pVideoProfiles == rhs.pVideoProfiles ); + } + + bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles = {}; + }; + static_assert( sizeof( PhysicalDeviceVideoFormatInfoKHR ) == sizeof( VkPhysicalDeviceVideoFormatInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoFormatInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoFormatPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoFormatPropertiesKHR & + operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatPropertiesKHR const & ) const = default; +# else + bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ); + } + + bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + static_assert( sizeof( VideoFormatPropertiesKHR ) == sizeof( VkVideoFormatPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoFormatPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDevice = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class PhysicalDevice + { + public: + using CType = VkPhysicalDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + + public: + VULKAN_HPP_CONSTEXPR PhysicalDevice() = default; + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( physicalDevice ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PhysicalDevice & operator=( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = physicalDevice; + return *this; + } +#endif + + PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice const & ) const = default; +#else + bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == rhs.m_physicalDevice; + } + + bool operator!=( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != rhs.m_physicalDevice; + } + + bool operator<( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice < rhs.m_physicalDevice; + } +#endif + + //=== VK_VERSION_1_0 === + + template + void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties + getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename QueueFamilyPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = QueueFamilyPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDevice( const DeviceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDeviceUnique( const DeviceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( + const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = ExtensionPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = LayerPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename SparseImageFormatPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageFormatPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename QueueFamilyProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = QueueFamilyProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = StructureChainAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename SparseImageFormatProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageFormatProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( + const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( + const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( + const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_surface === + + template + VULKAN_HPP_NODISCARD Result + getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SurfaceFormatKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PresentModeKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD Result + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Rect2DAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Rect2DAllocator & rect2DAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_display === + + template + VULKAN_HPP_NODISCARD Result + getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayPlanePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayPlanePropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( + uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + DisplayKHRAllocator & displayKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayModePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayModePropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayModeCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayModeCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display & dpy, + VisualID visualID, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display * display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + template + Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + template + VULKAN_HPP_NODISCARD Result + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename VideoFormatPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = VideoFormatPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_external_memory_capabilities === + + template + VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_get_physical_device_properties2 === + + template + void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename QueueFamilyProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = QueueFamilyProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = StructureChainAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename SparseImageFormatProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageFormatProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_memory_capabilities === + + template + void getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( + const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_semaphore_capabilities === + + template + void getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( + const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_direct_mode_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + template + VULKAN_HPP_NODISCARD Result + acquireXlibDisplayEXT( Display * dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireXlibDisplayEXT( Display & dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getRandROutputDisplayEXT( + Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + getRandROutputDisplayEXTUnique( Display & dpy, + RROutput rrOutput, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_fence_capabilities === + + template + void getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( + const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_performance_query === + + template + VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + ArrayProxy const & counters, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Allocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + ArrayProxy const & counters, + Allocator const & vectorAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename PerformanceCounterDescriptionKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::pair, + std::vector>>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename PerformanceCounterDescriptionKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PerformanceCounterKHRAllocator, + typename B2 = PerformanceCounterDescriptionKHRAllocator, + typename std::enable_if::value && + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::pair, + std::vector>>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR( + const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_get_surface_capabilities2 === + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SurfaceFormat2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_get_display_properties2 === + + template + VULKAN_HPP_NODISCARD Result + getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayPlaneProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayPlaneProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayModeProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayModeProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_sample_locations === + + template + void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( + uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = TimeDomainEXTAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_fragment_shading_rate === + + template + VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR( + uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceFragmentShadingRateKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getFragmentShadingRatesKHR( + PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_tooling_info === + + template + VULKAN_HPP_NODISCARD Result + getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceToolPropertiesEXTAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getToolPropertiesEXT( PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_cooperative_matrix === + + template + VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CooperativeMatrixPropertiesNVAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_coverage_reduction_mode === + + template + VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, + VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getSupportedFramebufferMixedSamplesCombinationsNV( + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = FramebufferMixedSamplesCombinationNVAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getSupportedFramebufferMixedSamplesCombinationsNV( + FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PresentModeKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + getWinrtDisplayNVUnique( uint32_t deviceRelativeId, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + template + Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB * dfb, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB & dfb, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + template + Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window * window, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window & window, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == VK_NULL_HANDLE; + } + + private: + VkPhysicalDevice m_physicalDevice = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + struct DeviceGroupDeviceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( + uint32_t physicalDeviceCount_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT + : physicalDeviceCount( physicalDeviceCount_ ) + , pPhysicalDevices( pPhysicalDevices_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupDeviceCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + physicalDevices_ ) + : physicalDeviceCount( static_cast( physicalDevices_.size() ) ) + , pPhysicalDevices( physicalDevices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & + operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = physicalDeviceCount_; + return *this; + } + + DeviceGroupDeviceCreateInfo & + setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + pPhysicalDevices = pPhysicalDevices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo & setPhysicalDevices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + physicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = static_cast( physicalDevices_.size() ); + pPhysicalDevices = physicalDevices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default; +#else + bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( pPhysicalDevices == rhs.pPhysicalDevices ); + } + + bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + const void * pNext = {}; + uint32_t physicalDeviceCount = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {}; + }; + static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupDeviceCreateInfo; + }; + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + + struct DeviceGroupPresentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {}, + const uint32_t * pDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pDeviceMasks( pDeviceMasks_ ) + , mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) + : swapchainCount( static_cast( deviceMasks_.size() ) ) + , pDeviceMasks( deviceMasks_.data() ) + , mode( mode_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & + operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceMasks = pDeviceMasks_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR & setDeviceMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( deviceMasks_.size() ); + pDeviceMasks = deviceMasks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupPresentInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pDeviceMasks == rhs.pDeviceMasks ) && ( mode == rhs.mode ); + } + + bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint32_t * pDeviceMasks = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; + }; + static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupPresentInfoKHR; + }; + + struct DeviceGroupRenderPassBeginInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, + uint32_t deviceRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {} ) + VULKAN_HPP_NOEXCEPT + : deviceMask( deviceMask_ ) + , deviceRenderAreaCount( deviceRenderAreaCount_ ) + , pDeviceRenderAreas( pDeviceRenderAreas_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupRenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo( + uint32_t deviceMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) + : deviceMask( deviceMask_ ) + , deviceRenderAreaCount( static_cast( deviceRenderAreas_.size() ) ) + , pDeviceRenderAreas( deviceRenderAreas_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & + operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = deviceRenderAreaCount_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & + setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceRenderAreas = pDeviceRenderAreas_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) + VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = static_cast( deviceRenderAreas_.size() ); + pDeviceRenderAreas = deviceRenderAreas_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default; +#else + bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && + ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); + } + + bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + uint32_t deviceRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {}; + }; + static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupRenderPassBeginInfo; + }; + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + + struct DeviceGroupSubmitInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, + uint32_t commandBufferCount_ = {}, + const uint32_t * pCommandBufferDeviceMasks_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const uint32_t * pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSubmitInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphoreDeviceIndices_.size() ) ) + , pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ) + , commandBufferCount( static_cast( commandBufferDeviceMasks_.size() ) ) + , pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphoreDeviceIndices_.size() ) ) + , pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & + operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo & + setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphoreDeviceIndices_.size() ); + pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + DeviceGroupSubmitInfo & + setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ ) + VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBufferDeviceMasks_.size() ); + pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo & + setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ ) + VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphoreDeviceIndices_.size() ); + pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSubmitInfo const & ) const = default; +#else + bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && + ( commandBufferCount == rhs.commandBufferCount ) && + ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && + ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); + } + + bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const uint32_t * pWaitSemaphoreDeviceIndices = {}; + uint32_t commandBufferCount = {}; + const uint32_t * pCommandBufferDeviceMasks = {}; + uint32_t signalSemaphoreCount = {}; + const uint32_t * pSignalSemaphoreDeviceIndices = {}; + }; + static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupSubmitInfo; + }; + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + + struct DeviceGroupSwapchainCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceGroupSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT : modes( modes_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & + operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR & + setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT + { + modes = modes_; + return *this; + } + + operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceGroupSwapchainCreateInfoKHR; + }; + + struct DeviceMemoryOverallocationCreateInfoAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT + : overallocationBehavior( overallocationBehavior_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOverallocationCreateInfoAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & + operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOverallocationCreateInfoAMD & + operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT + { + overallocationBehavior = overallocationBehavior_; + return *this; + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default; +#else + bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( overallocationBehavior == rhs.overallocationBehavior ); + } + + bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; + }; + static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == + sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceMemoryOverallocationCreateInfoAMD; + }; + + struct DeviceMemoryReportCallbackDataEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceMemoryReportCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, + uint64_t memoryObjectId_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , type( type_ ) + , memoryObjectId( memoryObjectId_ ) + , size( size_ ) + , objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , heapIndex( heapIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryReportCallbackDataEXT & + operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default; +#else + bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && + ( memoryObjectId == rhs.memoryObjectId ) && ( size == rhs.size ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex ); + } + + bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate; + uint64_t memoryObjectId = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint32_t heapIndex = {}; + }; + static_assert( sizeof( DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceMemoryReportCallbackDataEXT; + }; + + struct DevicePrivateDataCreateInfoEXT + { + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DevicePrivateDataCreateInfoEXT( uint32_t privateDataSlotRequestCount_ = {} ) VULKAN_HPP_NOEXCEPT + : privateDataSlotRequestCount( privateDataSlotRequestCount_ ) + {} + + VULKAN_HPP_CONSTEXPR + DevicePrivateDataCreateInfoEXT( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePrivateDataCreateInfoEXT( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DevicePrivateDataCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfoEXT & + operator=( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePrivateDataCreateInfoEXT & operator=( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DevicePrivateDataCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DevicePrivateDataCreateInfoEXT & + setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT + { + privateDataSlotRequestCount = privateDataSlotRequestCount_; + return *this; + } + + operator VkDevicePrivateDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDevicePrivateDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DevicePrivateDataCreateInfoEXT const & ) const = default; +#else + bool operator==( DevicePrivateDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount ); + } + + bool operator!=( DevicePrivateDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfoEXT; + const void * pNext = {}; + uint32_t privateDataSlotRequestCount = {}; + }; + static_assert( sizeof( DevicePrivateDataCreateInfoEXT ) == sizeof( VkDevicePrivateDataCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DevicePrivateDataCreateInfoEXT; + }; + + struct DeviceQueueGlobalPriorityCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT + : globalPriority( globalPriority_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueGlobalPriorityCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoEXT & + operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueGlobalPriorityCreateInfoEXT & + operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT & + setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT + { + globalPriority = globalPriority_; + return *this; + } + + operator VkDeviceQueueGlobalPriorityCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const & ) const = default; +#else + bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority ); + } + + bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow; + }; + static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DeviceQueueGlobalPriorityCreateInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + struct DirectFBSurfaceCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, + IDirectFB * dfb_ = {}, + IDirectFBSurface * surface_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , dfb( dfb_ ) + , surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR + DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & + operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DirectFBSurfaceCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT + { + dfb = dfb_; + return *this; + } + + DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) && + ( surface == rhs.surface ); + } + + bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {}; + IDirectFB * dfb = {}; + IDirectFBSurface * surface = {}; + }; + static_assert( sizeof( DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DirectFBSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + struct DispatchIndirectCommand + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + {} + + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchIndirectCommand( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & + operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DispatchIndirectCommand const & ) const = default; +#else + bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); + } + + bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t x = {}; + uint32_t y = {}; + uint32_t z = {}; + }; + static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayNativeHdrSurfaceCapabilitiesAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT + : localDimmingSupport( localDimmingSupport_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayNativeHdrSurfaceCapabilitiesAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayNativeHdrSurfaceCapabilitiesAMD & + operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayNativeHdrSurfaceCapabilitiesAMD & + operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default; +#else + bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport ); + } + + bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; + }; + static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; + }; + + struct DisplayPresentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcRect( srcRect_ ) + , dstRect( dstRect_ ) + , persistent( persistent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPresentInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & + operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT + { + srcRect = srcRect_; + return *this; + } + + DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT + { + dstRect = dstRect_; + return *this; + } + + DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT + { + persistent = persistent_; + return *this; + } + + operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPresentInfoKHR const & ) const = default; +#else + bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && + ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent ); + } + + bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; + VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; + }; + static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplayPresentInfoKHR; + }; + + struct DisplaySurfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + uint32_t planeIndex_ = {}, + uint32_t planeStackIndex_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + float globalAlpha_ = {}, + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , displayMode( displayMode_ ) + , planeIndex( planeIndex_ ) + , planeStackIndex( planeStackIndex_ ) + , transform( transform_ ) + , globalAlpha( globalAlpha_ ) + , alphaMode( alphaMode_ ) + , imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplaySurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & + operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & + setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT + { + displayMode = displayMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeStackIndex = planeStackIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT + { + globalAlpha = globalAlpha_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & + setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT + { + alphaMode = alphaMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & + setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default; +#else + bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( displayMode == rhs.displayMode ) && ( planeIndex == rhs.planeIndex ) && + ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) && + ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + uint32_t planeIndex = {}; + uint32_t planeStackIndex = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + float globalAlpha = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + }; + static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DisplaySurfaceCreateInfoKHR; + }; + + struct DrawIndexedIndirectCommand + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstIndex_ = {}, + int32_t vertexOffset_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : indexCount( indexCount_ ) + , instanceCount( instanceCount_ ) + , firstIndex( firstIndex_ ) + , vertexOffset( vertexOffset_ ) + , firstInstance( firstInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR + DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndexedIndirectCommand( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & + operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } + + operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndexedIndirectCommand const & ) const = default; +#else + bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && + ( firstIndex == rhs.firstIndex ) && ( vertexOffset == rhs.vertexOffset ) && + ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t indexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstIndex = {}; + int32_t vertexOffset = {}; + uint32_t firstInstance = {}; + }; + static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct DrawIndirectCommand + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstVertex_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexCount( vertexCount_ ) + , instanceCount( instanceCount_ ) + , firstVertex( firstVertex_ ) + , firstInstance( firstInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndirectCommand( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & + operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } + + operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndirectCommand const & ) const = default; +#else + bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && + ( firstVertex == rhs.firstVertex ) && ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t vertexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstVertex = {}; + uint32_t firstInstance = {}; + }; + static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DrawMeshTasksIndirectCommandNV + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, + uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT + : taskCount( taskCount_ ) + , firstTask( firstTask_ ) + {} + + VULKAN_HPP_CONSTEXPR + DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawMeshTasksIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & + operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT + { + taskCount = taskCount_; + return *this; + } + + DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT + { + firstTask = firstTask_; + return *this; + } + + operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default; +#else + bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask ); + } + + bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t taskCount = {}; + uint32_t firstTask = {}; + }; + static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct DrmFormatModifierPropertiesEXT + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( + uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR + DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DrmFormatModifierPropertiesEXT & + operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); + } + + bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; + }; + static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct DrmFormatModifierPropertiesListEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDrmFormatModifierPropertiesListEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( + uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesListEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DrmFormatModifierPropertiesListEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + drmFormatModifierProperties_ ) + : drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ) + , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 DrmFormatModifierPropertiesListEXT & + operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesListEXT & + operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); + } + + bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {}; + }; + static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = DrmFormatModifierPropertiesListEXT; + }; + + struct ExportFenceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & + operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportFenceCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceCreateInfo const & ) const = default; +#else + bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportFenceCreateInfo; + }; + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportFenceWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & + operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); + } + + bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportFenceWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportMemoryAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & + operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfo const & ) const = default; +#else + bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfo; + }; + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + + struct ExportMemoryAllocateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & + operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default; +#else + bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfoNV; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportMemoryWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & + operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); + } + + bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportMemoryWin32HandleInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & + operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ); + } + + bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportSemaphoreCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & + operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default; +#else + bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportSemaphoreCreateInfo; + }; + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportSemaphoreWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eExportSemaphoreWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & + operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); + } + + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExportSemaphoreWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct ExternalFormatANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : externalFormat( externalFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFormatANDROID( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & + operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT + { + externalFormat = externalFormat_; + return *this; + } + + operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFormatANDROID const & ) const = default; +# else + bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); + } + + bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; + void * pNext = {}; + uint64_t externalFormat = {}; + }; + static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalFormatANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ExternalImageFormatProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryProperties( externalMemoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalImageFormatProperties & + operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatProperties const & ) const = default; +#else + bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalImageFormatProperties; + }; + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + + struct ExternalMemoryBufferCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryBufferCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & + operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryBufferCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryBufferCreateInfo; + }; + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + + struct ExternalMemoryImageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & + operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfo; + }; + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + + struct ExternalMemoryImageCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & + operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfoNV; + }; + + struct FilterCubicImageViewImageFormatPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( + VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT + : filterCubic( filterCubic_ ) + , filterCubicMinmax( filterCubicMinmax_ ) + {} + + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( + FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : FilterCubicImageViewImageFormatPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FilterCubicImageViewImageFormatPropertiesEXT & + operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FilterCubicImageViewImageFormatPropertiesEXT & + operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default; +#else + bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && + ( filterCubicMinmax == rhs.filterCubicMinmax ); + } + + bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; + }; + static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == + sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FilterCubicImageViewImageFormatPropertiesEXT; + }; + + struct FragmentShadingRateAttachmentInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eFragmentShadingRateAttachmentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {} ) VULKAN_HPP_NOEXCEPT + : pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ ) + , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ ) + {} + + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & + operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FragmentShadingRateAttachmentInfoKHR & + operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_; + return *this; + } + + FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( + VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + return *this; + } + + operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default; +#else + bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) && + ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); + } + + bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; + }; + static_assert( sizeof( FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FragmentShadingRateAttachmentInfoKHR; + }; + + struct FramebufferAttachmentImageInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + {} + + VULKAN_HPP_CONSTEXPR + FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentImageInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo( + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + uint32_t width_, + uint32_t height_, + uint32_t layerCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + : flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( static_cast( viewFormats_.size() ) ) + , pViewFormats( viewFormats_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & + operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + FramebufferAttachmentImageInfo & + setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo & setViewFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && + ( width == rhs.width ) && ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && + ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); + } + + bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layerCount = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + }; + static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferAttachmentImageInfo; + }; + using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; + + struct FramebufferAttachmentsCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( + uint32_t attachmentImageInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentImageInfoCount( attachmentImageInfoCount_ ) + , pAttachmentImageInfos( pAttachmentImageInfos_ ) + {} + + VULKAN_HPP_CONSTEXPR + FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentsCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentImageInfos_ ) + : attachmentImageInfoCount( static_cast( attachmentImageInfos_.size() ) ) + , pAttachmentImageInfos( attachmentImageInfos_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & + operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferAttachmentsCreateInfo & + setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = attachmentImageInfoCount_; + return *this; + } + + FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + pAttachmentImageInfos = pAttachmentImageInfos_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = static_cast( attachmentImageInfos_.size() ); + pAttachmentImageInfos = attachmentImageInfos_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) && + ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); + } + + bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; + const void * pNext = {}; + uint32_t attachmentImageInfoCount = {}; + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {}; + }; + static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = FramebufferAttachmentsCreateInfo; + }; + using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; + + struct GraphicsShaderGroupCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} ) VULKAN_HPP_NOEXCEPT + : stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + {} + + VULKAN_HPP_CONSTEXPR + GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} ) + : stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsShaderGroupCreateInfoNV & setPVertexInputState( + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + GraphicsShaderGroupCreateInfoNV & setPTessellationState( + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } + + operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default; +#else + bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && + ( pStages == rhs.pStages ) && ( pVertexInputState == rhs.pVertexInputState ) && + ( pTessellationState == rhs.pTessellationState ); + } + + bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; + const void * pNext = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + }; + static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GraphicsShaderGroupCreateInfoNV; + }; + + struct GraphicsPipelineShaderGroupsCreateInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t pipelineCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {} ) VULKAN_HPP_NOEXCEPT + : groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , pipelineCount( pipelineCount_ ) + , pPipelines( pPipelines_ ) + {} + + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( + GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : GraphicsPipelineShaderGroupsCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + groups_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ = {} ) + : groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , pipelineCount( static_cast( pipelines_.size() ) ) + , pPipelines( pipelines_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & + operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineShaderGroupsCreateInfoNV & + operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = pipelineCount_; + return *this; + } + + GraphicsPipelineShaderGroupsCreateInfoNV & + setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT + { + pPipelines = pPipelines_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ ) + VULKAN_HPP_NOEXCEPT + { + pipelineCount = static_cast( pipelines_.size() ); + pPipelines = pipelines_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default; +#else + bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines ); + } + + bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + const void * pNext = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {}; + uint32_t pipelineCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {}; + }; + static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) == + sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = GraphicsPipelineShaderGroupsCreateInfoNV; + }; + + struct HeadlessSurfaceCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & + operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + HeadlessSurfaceCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default; +#else + bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + } + + bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; + }; + static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = HeadlessSurfaceCreateInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + struct IOSSurfaceCreateInfoMVK + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pView( pView_ ) + {} + + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : IOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & + operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } + + operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); + } + + bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; + }; + static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = IOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + + struct ImageDrmFormatModifierExplicitCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( + uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , pPlaneLayouts( pPlaneLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( + ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierExplicitCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT( + uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + planeLayouts_ ) + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( static_cast( planeLayouts_.size() ) ) + , pPlaneLayouts( planeLayouts_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierExplicitCreateInfoEXT & + operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & + setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pPlaneLayouts = pPlaneLayouts_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + planeLayouts_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = static_cast( planeLayouts_.size() ); + pPlaneLayouts = planeLayouts_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( pPlaneLayouts == rhs.pPlaneLayouts ); + } + + bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {}; + }; + static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == + sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; + }; + + struct ImageDrmFormatModifierListCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageDrmFormatModifierListCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, + const uint64_t * pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifiers( pDrmFormatModifiers_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierListCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) + : drmFormatModifierCount( static_cast( drmFormatModifiers_.size() ) ) + , pDrmFormatModifiers( drmFormatModifiers_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & + operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierListCreateInfoEXT & + operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & + setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = drmFormatModifierCount_; + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & + setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + pDrmFormatModifiers = pDrmFormatModifiers_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = static_cast( drmFormatModifiers_.size() ); + pDrmFormatModifiers = drmFormatModifiers_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); + } + + bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + const void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + const uint64_t * pDrmFormatModifiers = {}; + }; + static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == + sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierListCreateInfoEXT; + }; + + struct ImageFormatListCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT + : viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatListCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + : viewFormatCount( static_cast( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & + operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + ImageFormatListCreateInfo & + setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo & setViewFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatListCreateInfo const & ) const = default; +#else + bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && + ( pViewFormats == rhs.pViewFormats ); + } + + bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; + const void * pNext = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + }; + static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageFormatListCreateInfo; + }; + using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImagePipeSurfaceCreateInfoFUCHSIA + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, + zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , imagePipeHandle( imagePipeHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & + operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & + setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT + { + imagePipeHandle = imagePipeHandle_; + return *this; + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; + zx_handle_t imagePipeHandle = {}; + }; + static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImagePipeSurfaceCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImagePlaneMemoryRequirementsInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT + : planeAspect( planeAspect_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & + operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImagePlaneMemoryRequirementsInfo & + setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } + + operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default; +#else + bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); + } + + bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImagePlaneMemoryRequirementsInfo; + }; + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + + struct ImageStencilUsageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : stencilUsage( stencilUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageStencilUsageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & + operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageStencilUsageCreateInfo & + setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT + { + stencilUsage = stencilUsage_; + return *this; + } + + operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default; +#else + bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage ); + } + + bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; + }; + static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageStencilUsageCreateInfo; + }; + using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; + + struct ImageSwapchainCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & + operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ); + } + + bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + }; + static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageSwapchainCreateInfoKHR; + }; + + struct ImageViewASTCDecodeModeEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( + VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT + : decodeMode( decodeMode_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewASTCDecodeModeEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & + operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT + { + decodeMode = decodeMode_; + return *this; + } + + operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default; +#else + bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode ); + } + + bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewASTCDecodeModeEXT; + }; + + struct ImageViewUsageCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT + : usage( usage_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewUsageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & + operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewUsageCreateInfo const & ) const = default; +#else + bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); + } + + bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + }; + static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImageViewUsageCreateInfo; + }; + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct ImportAndroidHardwareBufferInfoANDROID + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportAndroidHardwareBufferInfoANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportAndroidHardwareBufferInfoANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & + operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportAndroidHardwareBufferInfoANDROID & + operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); + } + + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + struct AHardwareBuffer * buffer = {}; + }; + static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportAndroidHardwareBufferInfoANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ImportMemoryFdInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & + operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } + + operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default; +#else + bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); + } + + bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryFdInfoKHR; + }; + + struct ImportMemoryHostPointerInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void * pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , pHostPointer( pHostPointer_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryHostPointerInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & + operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryHostPointerInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } + + operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default; +#else + bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( pHostPointer == rhs.pHostPointer ); + } + + bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void * pHostPointer = {}; + }; + static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryHostPointerInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportMemoryWin32HandleInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & + operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ) && ( name == rhs.name ); + } + + bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportMemoryWin32HandleInfoNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, + HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & + operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ); + } + + bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; + HANDLE handle = {}; + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportMemoryZirconHandleInfoFUCHSIA + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + zx_handle_t handle_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & + operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryZirconHandleInfoFUCHSIA & + operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + zx_handle_t handle = {}; + }; + static_assert( sizeof( ImportMemoryZirconHandleInfoFUCHSIA ) == sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = ImportMemoryZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct InputAttachmentAspectReference + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + InputAttachmentAspectReference( uint32_t subpass_ = {}, + uint32_t inputAttachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + : subpass( subpass_ ) + , inputAttachmentIndex( inputAttachmentIndex_ ) + , aspectMask( aspectMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + : InputAttachmentAspectReference( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & + operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentIndex = inputAttachmentIndex_; + return *this; + } + + InputAttachmentAspectReference & + setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InputAttachmentAspectReference const & ) const = default; +#else + bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && + ( aspectMask == rhs.aspectMask ); + } + + bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpass = {}; + uint32_t inputAttachmentIndex = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + + struct InstanceCreateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + {} + + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : InstanceCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo( + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {} ) + : flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & + operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + InstanceCreateInfo & + setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationInfo = pApplicationInfo_; + return *this; + } + + InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & setPEnabledLayerNames( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) + VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & setPEnabledExtensionNames( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) + VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InstanceCreateInfo const & ) const = default; +#else + bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pApplicationInfo == rhs.pApplicationInfo ) && ( enabledLayerCount == rhs.enabledLayerCount ) && + ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ); + } + + bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + }; + static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = InstanceCreateInfo; + }; + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + struct MacOSSurfaceCreateInfoMVK + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pView( pView_ ) + {} + + VULKAN_HPP_CONSTEXPR + MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : MacOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & + operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } + + operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); + } + + bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; + }; + static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MacOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + struct MemoryAllocateFlagsInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, + uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryAllocateFlagsInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & + operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default; +#else + bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; + uint32_t deviceMask = {}; + }; + static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryAllocateFlagsInfo; + }; + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + + struct MemoryDedicatedAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & + operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default; +#else + bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); + } + + bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryDedicatedAllocateInfo; + }; + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + + struct MemoryDedicatedRequirements + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : prefersDedicatedAllocation( prefersDedicatedAllocation_ ) + , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedRequirements & + operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedRequirements const & ) const = default; +#else + bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) && + ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); + } + + bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; + }; + static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryDedicatedRequirements; + }; + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + + struct MemoryOpaqueCaptureAddressAllocateInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : opaqueCaptureAddress( opaqueCaptureAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryOpaqueCaptureAddressAllocateInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & + operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryOpaqueCaptureAddressAllocateInfo & + operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryOpaqueCaptureAddressAllocateInfo & + setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureAddress = opaqueCaptureAddress_; + return *this; + } + + operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default; +#else + bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); + } + + bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; + }; + static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryOpaqueCaptureAddressAllocateInfo; + }; + using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; + + struct MemoryPriorityAllocateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {} ) VULKAN_HPP_NOEXCEPT + : priority( priority_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryPriorityAllocateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & + operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT + { + priority = priority_; + return *this; + } + + operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default; +#else + bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority ); + } + + bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; + const void * pNext = {}; + float priority = {}; + }; + static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MemoryPriorityAllocateInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct MetalSurfaceCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, + const CAMetalLayer * pLayer_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pLayer( pLayer_ ) + {} + + VULKAN_HPP_CONSTEXPR + MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MetalSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & + operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT + { + pLayer = pLayer_; + return *this; + } + + operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer ); + } + + bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; + const CAMetalLayer * pLayer = {}; + }; + static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MetalSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + struct MutableDescriptorTypeListVALVE + { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE( + uint32_t descriptorTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorTypeCount( descriptorTypeCount_ ) + , pDescriptorTypes( pDescriptorTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + MutableDescriptorTypeListVALVE( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeListVALVE( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeListVALVE( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListVALVE( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorTypes_ ) + : descriptorTypeCount( static_cast( descriptorTypes_.size() ) ) + , pDescriptorTypes( descriptorTypes_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE & + operator=( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeListVALVE & operator=( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MutableDescriptorTypeListVALVE & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = descriptorTypeCount_; + return *this; + } + + MutableDescriptorTypeListVALVE & + setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorTypes = pDescriptorTypes_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListVALVE & setDescriptorTypes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = static_cast( descriptorTypes_.size() ); + pDescriptorTypes = descriptorTypes_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkMutableDescriptorTypeListVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeListVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeListVALVE const & ) const = default; +#else + bool operator==( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes ); + } + + bool operator!=( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t descriptorTypeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {}; + }; + static_assert( sizeof( MutableDescriptorTypeListVALVE ) == sizeof( VkMutableDescriptorTypeListVALVE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + struct MutableDescriptorTypeCreateInfoVALVE + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMutableDescriptorTypeCreateInfoVALVE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE( + uint32_t mutableDescriptorTypeListCount_ = {}, + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ = {} ) + VULKAN_HPP_NOEXCEPT + : mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ ) + , pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ ) + {} + + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE( MutableDescriptorTypeCreateInfoVALVE const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeCreateInfoVALVE( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeCreateInfoVALVE( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoVALVE( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + mutableDescriptorTypeLists_ ) + : mutableDescriptorTypeListCount( static_cast( mutableDescriptorTypeLists_.size() ) ) + , pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & + operator=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeCreateInfoVALVE & + operator=( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + MutableDescriptorTypeCreateInfoVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MutableDescriptorTypeCreateInfoVALVE & + setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_; + return *this; + } + + MutableDescriptorTypeCreateInfoVALVE & setPMutableDescriptorTypeLists( + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT + { + pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoVALVE & setMutableDescriptorTypeLists( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = static_cast( mutableDescriptorTypeLists_.size() ); + pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkMutableDescriptorTypeCreateInfoVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeCreateInfoVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeCreateInfoVALVE const & ) const = default; +#else + bool operator==( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) && + ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists ); + } + + bool operator!=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoVALVE; + const void * pNext = {}; + uint32_t mutableDescriptorTypeListCount = {}; + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists = {}; + }; + static_assert( sizeof( MutableDescriptorTypeCreateInfoVALVE ) == sizeof( VkMutableDescriptorTypeCreateInfoVALVE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = MutableDescriptorTypeCreateInfoVALVE; + }; + + union PerformanceCounterResultKHR + { + PerformanceCounterResultKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); + } + + PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {} + + PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {} + + PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {} + + PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {} + + PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {} + + PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {} + + PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT + { + int32 = int32_; + return *this; + } + + PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT + { + int64 = int64_; + return *this; + } + + PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT + { + uint32 = uint32_; + return *this; + } + + PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT + { + uint64 = uint64_; + return *this; + } + + PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT + { + float32 = float32_; + return *this; + } + + PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT + { + float64 = float64_; + return *this; + } + + VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & + operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) ); + return *this; + } + + operator VkPerformanceCounterResultKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterResultKHR &() + { + return *reinterpret_cast( this ); + } + + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; + }; + + struct PerformanceQuerySubmitInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : counterPassIndex( counterPassIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceQuerySubmitInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & + operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT + { + counterPassIndex = counterPassIndex_; + return *this; + } + + operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default; +#else + bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex ); + } + + bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; + const void * pNext = {}; + uint32_t counterPassIndex = {}; + }; + static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PerformanceQuerySubmitInfoKHR; + }; + + struct PhysicalDevice16BitStorageFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevice16BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT + : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) + , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) + , storagePushConstant16( storagePushConstant16_ ) + , storageInputOutput16( storageInputOutput16_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice16BitStorageFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice16BitStorageFeatures & + operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } + + operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && + ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ); + } + + bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + }; + static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevice16BitStorageFeatures; + }; + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + + struct PhysicalDevice4444FormatsFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {} ) VULKAN_HPP_NOEXCEPT + : formatA4R4G4B4( formatA4R4G4B4_ ) + , formatA4B4G4R4( formatA4B4G4R4_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & + operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice4444FormatsFeaturesEXT & + operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevice4444FormatsFeaturesEXT & + setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4R4G4B4 = formatA4R4G4B4_; + return *this; + } + + PhysicalDevice4444FormatsFeaturesEXT & + setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4B4G4R4 = formatA4B4G4R4_; + return *this; + } + + operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && + ( formatA4B4G4R4 == rhs.formatA4B4G4R4 ); + } + + bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {}; + }; + static_assert( sizeof( PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevice4444FormatsFeaturesEXT; + }; + + struct PhysicalDevice8BitStorageFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevice8BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT + : storageBuffer8BitAccess( storageBuffer8BitAccess_ ) + , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) + , storagePushConstant8( storagePushConstant8_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice8BitStorageFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevice8BitStorageFeatures & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + PhysicalDevice8BitStorageFeatures & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } + + operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && + ( storagePushConstant8 == rhs.storagePushConstant8 ); + } + + bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + }; + static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevice8BitStorageFeatures; + }; + using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; + + struct PhysicalDeviceASTCDecodeFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT + : decodeModeSharedExponent( decodeModeSharedExponent_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & + operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceASTCDecodeFeaturesEXT & + operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceASTCDecodeFeaturesEXT & + setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT + { + decodeModeSharedExponent = decodeModeSharedExponent_; + return *this; + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); + } + + bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; + }; + static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceASTCDecodeFeaturesEXT; + }; + + struct PhysicalDeviceAccelerationStructureFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ) + , accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ ) + , accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ ) + , accelerationStructureHostCommands( accelerationStructureHostCommands_ ) + , descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( + PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructureFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructureFeaturesKHR & + operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_; + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_; + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureHostCommands = accelerationStructureHostCommands_; + return *this; + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_; + return *this; + } + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructure == rhs.accelerationStructure ) && + ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) && + ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) && + ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) && + ( descriptorBindingAccelerationStructureUpdateAfterBind == + rhs.descriptorBindingAccelerationStructureUpdateAfterBind ); + } + + bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {}; + }; + static_assert( sizeof( PhysicalDeviceAccelerationStructureFeaturesKHR ) == + sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructureFeaturesKHR; + }; + + struct PhysicalDeviceAccelerationStructurePropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxPrimitiveCount_ = {}, + uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, + uint32_t minAccelerationStructureScratchOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxPrimitiveCount( maxPrimitiveCount_ ) + , maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ ) + , maxPerStageDescriptorUpdateAfterBindAccelerationStructures( + maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + , maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ ) + , minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( + PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructurePropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructurePropertiesKHR & + operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructurePropertiesKHR & + operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) && + ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) && + ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) && + ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == + rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) && + ( maxDescriptorSetUpdateAfterBindAccelerationStructures == + rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) && + ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment ); + } + + bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + void * pNext = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxPrimitiveCount = {}; + uint32_t maxPerStageDescriptorAccelerationStructures = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {}; + uint32_t minAccelerationStructureScratchOffsetAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceAccelerationStructurePropertiesKHR ) == + sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructurePropertiesKHR; + }; + + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( + PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT + { + advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; + }; + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + }; + + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( + uint32_t advancedBlendMaxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) + , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) + , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) + , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ) + , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ) + , advancedBlendAllOperations( advancedBlendAllOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( + PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) && + ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) && + ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) && + ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) && + ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && + ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + void * pNext = {}; + uint32_t advancedBlendMaxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; + }; + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + }; + + struct PhysicalDeviceBufferDeviceAddressFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( + PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeatures & + operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } + + operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) == + sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeatures; + }; + using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; + + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( + PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & + operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == + sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + }; + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + + struct PhysicalDeviceCoherentMemoryFeaturesAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceCoherentMemory( deviceCoherentMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoherentMemoryFeaturesAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & + operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoherentMemoryFeaturesAMD & + operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCoherentMemoryFeaturesAMD & + setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT + { + deviceCoherentMemory = deviceCoherentMemory_; + return *this; + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); + } + + bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; + }; + static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == + sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; + }; + + struct PhysicalDeviceColorWriteEnableFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {} ) VULKAN_HPP_NOEXCEPT : colorWriteEnable( colorWriteEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( + PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceColorWriteEnableFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & + operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceColorWriteEnableFeaturesEXT & + operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceColorWriteEnableFeaturesEXT & + setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteEnable = colorWriteEnable_; + return *this; + } + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable ); + } + + bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {}; + }; + static_assert( sizeof( PhysicalDeviceColorWriteEnableFeaturesEXT ) == + sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; + }; + + struct PhysicalDeviceComputeShaderDerivativesFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT + : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) + , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( + PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceComputeShaderDerivativesFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & + operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupQuads = computeDerivativeGroupQuads_; + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupLinear = computeDerivativeGroupLinear_; + return *this; + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) && + ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; + }; + static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV; + }; + + struct PhysicalDeviceConditionalRenderingFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT + : conditionalRendering( conditionalRendering_ ) + , inheritedConditionalRendering( inheritedConditionalRendering_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( + PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConditionalRenderingFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & + operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConditionalRenderingFeaturesEXT & + operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & + setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRendering = conditionalRendering_; + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + inheritedConditionalRendering = inheritedConditionalRendering_; + return *this; + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) && + ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); + } + + bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; + }; + static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == + sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; + }; + + struct PhysicalDeviceConservativeRasterizationPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( + float primitiveOverestimationSize_ = {}, + float maxExtraPrimitiveOverestimationSize_ = {}, + float extraPrimitiveOverestimationSizeGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT + : primitiveOverestimationSize( primitiveOverestimationSize_ ) + , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) + , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) + , primitiveUnderestimation( primitiveUnderestimation_ ) + , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) + , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) + , degenerateLinesRasterized( degenerateLinesRasterized_ ) + , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) + , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( + PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConservativeRasterizationPropertiesEXT( + VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConservativeRasterizationPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) && + ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) && + ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) && + ( primitiveUnderestimation == rhs.primitiveUnderestimation ) && + ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) && + ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && + ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) && + ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) && + ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); + } + + bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + void * pNext = {}; + float primitiveOverestimationSize = {}; + float maxExtraPrimitiveOverestimationSize = {}; + float extraPrimitiveOverestimationSizeGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; + }; + static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; + }; + + struct PhysicalDeviceCooperativeMatrixFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : cooperativeMatrix( cooperativeMatrix_ ) + , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( + PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & + operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesNV & + operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & + setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrix = cooperativeMatrix_; + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && + ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); + } + + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; + static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; + }; + + struct PhysicalDeviceCooperativeMatrixPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT + : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( + PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixPropertiesNV & + operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesNV & + operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); + } + + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; + static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; + }; + + struct PhysicalDeviceCornerSampledImageFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT + : cornerSampledImage( cornerSampledImage_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( + PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCornerSampledImageFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & + operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCornerSampledImageFeaturesNV & + operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCornerSampledImageFeaturesNV & + setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT + { + cornerSampledImage = cornerSampledImage_; + return *this; + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); + } + + bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; + }; + static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == + sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCornerSampledImageFeaturesNV; + }; + + struct PhysicalDeviceCoverageReductionModeFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT + : coverageReductionMode( coverageReductionMode_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( + PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoverageReductionModeFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & + operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoverageReductionModeFeaturesNV & + operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCoverageReductionModeFeaturesNV & + setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ); + } + + bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; + }; + static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == + sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; + }; + + struct PhysicalDeviceCustomBorderColorFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : customBorderColors( customBorderColors_ ) + , customBorderColorWithoutFormat( customBorderColorWithoutFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( + PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & + operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorFeaturesEXT & + operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & + setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColors = customBorderColors_; + return *this; + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColorWithoutFormat = customBorderColorWithoutFormat_; + return *this; + } + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) && + ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat ); + } + + bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {}; + }; + static_assert( sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) == + sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; + }; + + struct PhysicalDeviceCustomBorderColorPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( + PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorPropertiesEXT & + operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorPropertiesEXT & + operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers ); + } + + bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + void * pNext = {}; + uint32_t maxCustomBorderColorSamplers = {}; + }; + static_assert( sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) == + sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; + }; + + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT + : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( + &rhs ); + return *this; + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; + return *this; + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); + } + + bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; + }; + static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == + sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + }; + + struct PhysicalDeviceDepthClipEnableFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : depthClipEnable( depthClipEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( + PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClipEnableFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & + operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipEnableFeaturesEXT & + operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDepthClipEnableFeaturesEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable ); + } + + bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == + sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; + }; + + struct PhysicalDeviceDepthStencilResolveProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDepthStencilResolveProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedDepthResolveModes( supportedDepthResolveModes_ ) + , supportedStencilResolveModes( supportedStencilResolveModes_ ) + , independentResolveNone( independentResolveNone_ ) + , independentResolve( independentResolve_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( + PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthStencilResolveProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthStencilResolveProperties & + operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthStencilResolveProperties & + operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ); + } + + bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + }; + static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) == + sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthStencilResolveProperties; + }; + using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; + + struct PhysicalDeviceDescriptorIndexingFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) + , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) + , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) + , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) + , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) + , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) + , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) + , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) + , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) + , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) + , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) + , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) + , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) + , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) + , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) + , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) + , runtimeDescriptorArray( runtimeDescriptorArray_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( + PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingFeatures & + operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeatures & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == + rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == + rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == + rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == + rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); + } + + bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + }; + static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) == + sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingFeatures; + }; + using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; + + struct PhysicalDeviceDescriptorIndexingProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDescriptorIndexingProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) + , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) + , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) + , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) + , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) + , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) + , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) + , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) + , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) + , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) + , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) + , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) + , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( + PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingProperties & + operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingProperties & + operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == + rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == + rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == + rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == + rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == + rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && + ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == + rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == + rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == + rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); + } + + bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + void * pNext = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + }; + static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == + sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingProperties; + }; + using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceGeneratedCommands( deviceGeneratedCommands_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + }; + static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( + uint32_t maxGraphicsShaderGroupCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsStreamCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsStreamStride_ = {}, + uint32_t minSequencesCountBufferOffsetAlignment_ = {}, + uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, + uint32_t minIndirectCommandsBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ) + , maxIndirectSequenceCount( maxIndirectSequenceCount_ ) + , maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ) + , maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ) + , maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ) + , maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ) + , minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ) + , minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ) + , minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) && + ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && + ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && + ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) && + ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && + ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) && + ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) && + ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) && + ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + void * pNext = {}; + uint32_t maxGraphicsShaderGroupCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsStreamCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsStreamStride = {}; + uint32_t minSequencesCountBufferOffsetAlignment = {}; + uint32_t minSequencesIndexBufferOffsetAlignment = {}; + uint32_t minIndirectCommandsBufferOffsetAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + }; + + struct PhysicalDeviceDeviceMemoryReportFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceMemoryReport( deviceMemoryReport_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( + PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceMemoryReportFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & + operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & + operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & + setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT + { + deviceMemoryReport = deviceMemoryReport_; + return *this; + } + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport ); + } + + bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {}; + }; + static_assert( sizeof( PhysicalDeviceDeviceMemoryReportFeaturesEXT ) == + sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; + }; + + struct PhysicalDeviceDiagnosticsConfigFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {} ) VULKAN_HPP_NOEXCEPT + : diagnosticsConfig( diagnosticsConfig_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( + PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiagnosticsConfigFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & + operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiagnosticsConfigFeaturesNV & + operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & + setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + { + diagnosticsConfig = diagnosticsConfig_; + return *this; + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); + } + + bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; + }; + static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) == + sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; + }; + + struct PhysicalDeviceDiscardRectanglePropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : maxDiscardRectangles( maxDiscardRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( + PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiscardRectanglePropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiscardRectanglePropertiesEXT & + operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiscardRectanglePropertiesEXT & + operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); + } + + bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + void * pNext = {}; + uint32_t maxDiscardRectangles = {}; + }; + static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == + sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; + }; + + struct PhysicalDeviceDriverProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : driverID( driverID_ ) + , driverName( driverName_ ) + , driverInfo( driverInfo_ ) + , conformanceVersion( conformanceVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDriverProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties & + operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && + ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) && + ( conformanceVersion == rhs.conformanceVersion ); + } + + bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + }; + static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDriverProperties; + }; + using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; + + struct PhysicalDeviceExclusiveScissorFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) + VULKAN_HPP_NOEXCEPT : exclusiveScissor( exclusiveScissor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( + PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExclusiveScissorFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & + operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExclusiveScissorFeaturesNV & + operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExclusiveScissorFeaturesNV & + setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissor = exclusiveScissor_; + return *this; + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor ); + } + + bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; + }; + static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == + sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExclusiveScissorFeaturesNV; + }; + + struct PhysicalDeviceExtendedDynamicState2FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT + : extendedDynamicState2( extendedDynamicState2_ ) + , extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ ) + , extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( + PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState2FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & + operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2 = extendedDynamicState2_; + return *this; + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_; + return *this; + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2PatchControlPoints( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_; + return *this; + } + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( extendedDynamicState2 == rhs.extendedDynamicState2 ) && + ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) && + ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints ); + } + + bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {}; + }; + static_assert( sizeof( PhysicalDeviceExtendedDynamicState2FeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT; + }; + + struct PhysicalDeviceExtendedDynamicStateFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT + : extendedDynamicState( extendedDynamicState_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( + PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicStateFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & + operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & + operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & + setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState = extendedDynamicState_; + return *this; + } + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState ); + } + + bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {}; + }; + static_assert( sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; + }; + + struct PhysicalDeviceExternalImageFormatInfo + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExternalImageFormatInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalImageFormatInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & + operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalImageFormatInfo & + operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalImageFormatInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalImageFormatInfo; + }; + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + + struct PhysicalDeviceExternalMemoryHostPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( + PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryHostPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryHostPropertiesEXT & + operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryHostPropertiesEXT & + operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); + } + + bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == + sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; + }; + + struct PhysicalDeviceFloatControlsProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFloatControlsProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT + : denormBehaviorIndependence( denormBehaviorIndependence_ ) + , roundingModeIndependence( roundingModeIndependence_ ) + , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) + , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) + , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) + , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) + , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) + , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) + , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) + , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) + , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) + , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) + , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) + , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) + , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) + , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) + , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFloatControlsProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFloatControlsProperties & + operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFloatControlsProperties & + operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && + ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && + ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && + ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && + ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && + ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); + } + + bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + }; + static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFloatControlsProperties; + }; + using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; + + struct PhysicalDeviceFragmentDensityMap2FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMapDeferred( fragmentDensityMapDeferred_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( + PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & + operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & + operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & + setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDeferred = fragmentDensityMapDeferred_; + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMap2PropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( + VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, + uint32_t maxSubsampledArrayLayers_ = {}, + uint32_t maxDescriptorSetSubsampledSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : subsampledLoads( subsampledLoads_ ) + , subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ) + , maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ) + , maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( + PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2PropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2PropertiesEXT & + operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & + operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) && + ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) && + ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) && + ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {}; + uint32_t maxSubsampledArrayLayers = {}; + uint32_t maxDescriptorSetSubsampledSamplers = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMap( fragmentDensityMap_ ) + , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) + , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( + PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapFeaturesEXT & + operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMap = fragmentDensityMap_; + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDynamic = fragmentDensityMapDynamic_; + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) && + ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) && + ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT + : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) + , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) + , fragmentDensityInvocations( fragmentDensityInvocations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( + PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapPropertiesEXT & + operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapPropertiesEXT & + operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) && + ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && + ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; + }; + + struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShaderBarycentric( fragmentShaderBarycentric_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( + PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderBarycentricFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderBarycentric = fragmentShaderBarycentric_; + return *this; + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == + sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV; + }; + + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) + , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) + , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( + PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; + return *this; + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) && + ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) && + ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); + } + + bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + }; + + struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShadingRateEnums( fragmentShadingRateEnums_ ) + , supersampleFragmentShadingRates( supersampleFragmentShadingRates_ ) + , noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShadingRateEnums = fragmentShadingRateEnums_; + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates( + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + supersampleFragmentShadingRates = supersampleFragmentShadingRates_; + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates( + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_; + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) && + ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) && + ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates ); + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {}; + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {}; + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) == + sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + }; + + struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 ) VULKAN_HPP_NOEXCEPT + : maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setMaxFragmentShadingRateInvocationCount( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT + { + maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_; + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount ); + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + static_assert( sizeof( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) == + sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + }; + + struct PhysicalDeviceFragmentShadingRateFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineFragmentShadingRate( pipelineFragmentShadingRate_ ) + , primitiveFragmentShadingRate( primitiveFragmentShadingRate_ ) + , attachmentFragmentShadingRate( attachmentFragmentShadingRate_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( + PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateFeaturesKHR & + operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + pipelineFragmentShadingRate = pipelineFragmentShadingRate_; + return *this; + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + primitiveFragmentShadingRate = primitiveFragmentShadingRate_; + return *this; + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate( + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFragmentShadingRate = attachmentFragmentShadingRate_; + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) && + ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && + ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate ); + } + + bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShadingRateFeaturesKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR; + }; + + struct PhysicalDeviceFragmentShadingRatePropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, + uint32_t maxFragmentSizeAspectRatio_ = {}, + uint32_t maxFragmentShadingRateCoverageSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {} ) VULKAN_HPP_NOEXCEPT + : minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ ) + , maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ ) + , maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ ) + , primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ ) + , layeredShadingRateAttachments( layeredShadingRateAttachments_ ) + , fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ ) + , maxFragmentSize( maxFragmentSize_ ) + , maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ ) + , maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ ) + , maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ ) + , fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ ) + , fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ ) + , fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ ) + , fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ ) + , fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ ) + , fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ ) + , fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( + PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRatePropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRatePropertiesKHR & + operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRatePropertiesKHR & + operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == + rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) && + ( primitiveFragmentShadingRateWithMultipleViewports == + rhs.primitiveFragmentShadingRateWithMultipleViewports ) && + ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) && + ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && + ( maxFragmentSize == rhs.maxFragmentSize ) && + ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) && + ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) && + ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) && + ( fragmentShadingRateWithShaderDepthStencilWrites == + rhs.fragmentShadingRateWithShaderDepthStencilWrites ) && + ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) && + ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) && + ( fragmentShadingRateWithConservativeRasterization == + rhs.fragmentShadingRateWithConservativeRasterization ) && + ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) && + ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) && + ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner ); + } + + bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {}; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {}; + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {}; + uint32_t maxFragmentSizeAspectRatio = {}; + uint32_t maxFragmentShadingRateCoverageSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShadingRatePropertiesKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR; + }; + + struct PhysicalDeviceGlobalPriorityQueryFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {} ) VULKAN_HPP_NOEXCEPT + : globalPriorityQuery( globalPriorityQuery_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesEXT( + PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGlobalPriorityQueryFeaturesEXT( VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGlobalPriorityQueryFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesEXT & + operator=( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGlobalPriorityQueryFeaturesEXT & + operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceGlobalPriorityQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceGlobalPriorityQueryFeaturesEXT & + setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT + { + globalPriorityQuery = globalPriorityQuery_; + return *this; + } + + operator VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ); + } + + bool operator!=( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; + }; + static_assert( sizeof( PhysicalDeviceGlobalPriorityQueryFeaturesEXT ) == + sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceGlobalPriorityQueryFeaturesEXT; + }; + + struct PhysicalDeviceGroupProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( + uint32_t physicalDeviceCount_ = {}, + std::array const & physicalDevices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : physicalDeviceCount( physicalDeviceCount_ ) + , physicalDevices( physicalDevices_ ) + , subsetAllocation( subsetAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGroupProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties & + operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation ); + } + + bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; + void * pNext = {}; + uint32_t physicalDeviceCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D + physicalDevices = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; + }; + static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceGroupProperties; + }; + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + + struct PhysicalDeviceHostQueryResetFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceHostQueryResetFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT + : hostQueryReset( hostQueryReset_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & + operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostQueryResetFeatures & + operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceHostQueryResetFeatures & + setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } + + operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset ); + } + + bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + }; + static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceHostQueryResetFeatures; + }; + using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + + struct PhysicalDeviceIDProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceIDProperties( std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceUUID( deviceUUID_ ) + , driverUUID( driverUUID_ ) + , deviceLUID( deviceLUID_ ) + , deviceNodeMask( deviceNodeMask_ ) + , deviceLUIDValid( deviceLUIDValid_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIDProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties & + operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIDProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && + ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) && + ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ); + } + + bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + }; + static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceIDProperties; + }; + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + + struct PhysicalDeviceImageDrmFormatModifierInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( + uint64_t drmFormatModifier_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( + PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageDrmFormatModifierInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT( + uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) + : drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & + operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + } + + bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + }; + static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == + sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; + }; + + struct PhysicalDeviceImageRobustnessFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : robustImageAccess( robustImageAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT( + PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageRobustnessFeaturesEXT( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageRobustnessFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeaturesEXT & + operator=( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageRobustnessFeaturesEXT & + operator=( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImageRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageRobustnessFeaturesEXT & + setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess = robustImageAccess_; + return *this; + } + + operator VkPhysicalDeviceImageRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageRobustnessFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ); + } + + bool operator!=( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + }; + static_assert( sizeof( PhysicalDeviceImageRobustnessFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageRobustnessFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageRobustnessFeaturesEXT; + }; + + struct PhysicalDeviceImageViewImageFormatInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = + VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT + : imageViewType( imageViewType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( + PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageViewImageFormatInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & + operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewImageFormatInfoEXT & + operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageViewImageFormatInfoEXT & + setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT + { + imageViewType = imageViewType_; + return *this; + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType ); + } + + bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + }; + static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == + sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewImageFormatInfoEXT; + }; + + struct PhysicalDeviceImagelessFramebufferFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT + : imagelessFramebuffer( imagelessFramebuffer_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( + PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImagelessFramebufferFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & + operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImagelessFramebufferFeatures & + operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImagelessFramebufferFeatures & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } + + operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); + } + + bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + }; + static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) == + sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImagelessFramebufferFeatures; + }; + using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; + + struct PhysicalDeviceIndexTypeUint8FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT + : indexTypeUint8( indexTypeUint8_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIndexTypeUint8FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & + operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIndexTypeUint8FeaturesEXT & + operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceIndexTypeUint8FeaturesEXT & + setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeUint8 = indexTypeUint8_; + return *this; + } + + operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 ); + } + + bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + }; + static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == + sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT; + }; + + struct PhysicalDeviceInheritedViewportScissorFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {} ) VULKAN_HPP_NOEXCEPT + : inheritedViewportScissor2D( inheritedViewportScissor2D_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( + PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInheritedViewportScissorFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & + operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInheritedViewportScissorFeaturesNV & + operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceInheritedViewportScissorFeaturesNV & + setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + inheritedViewportScissor2D = inheritedViewportScissor2D_; + return *this; + } + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D ); + } + + bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {}; + }; + static_assert( sizeof( PhysicalDeviceInheritedViewportScissorFeaturesNV ) == + sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV; + }; + + struct PhysicalDeviceInlineUniformBlockFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT + : inlineUniformBlock( inlineUniformBlock_ ) + , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( + PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeaturesEXT & + operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockFeaturesEXT & + operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT & + setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + { + inlineUniformBlock = inlineUniformBlock_; + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; + } + + operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == + rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); + } + + bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + }; + static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == + sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockFeaturesEXT; + }; + + struct PhysicalDeviceInlineUniformBlockPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( + uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT + : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) + , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) + , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( + PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockPropertiesEXT & + operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockPropertiesEXT & + operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == + rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == + rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + } + + bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; + void * pNext = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + }; + static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == + sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockPropertiesEXT; + }; + + struct PhysicalDeviceLineRasterizationFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT + : rectangularLines( rectangularLines_ ) + , bresenhamLines( bresenhamLines_ ) + , smoothLines( smoothLines_ ) + , stippledRectangularLines( stippledRectangularLines_ ) + , stippledBresenhamLines( stippledBresenhamLines_ ) + , stippledSmoothLines( stippledSmoothLines_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( + PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationFeaturesEXT & + operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + rectangularLines = rectangularLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + bresenhamLines = bresenhamLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + { + smoothLines = smoothLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledRectangularLines = stippledRectangularLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledBresenhamLines = stippledBresenhamLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledSmoothLines = stippledSmoothLines_; + return *this; + } + + operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && + ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) && + ( stippledRectangularLines == rhs.stippledRectangularLines ) && + ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && + ( stippledSmoothLines == rhs.stippledSmoothLines ); + } + + bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + }; + static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == + sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationFeaturesEXT; + }; + + struct PhysicalDeviceLineRasterizationPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT + : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( + PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationPropertiesEXT & + operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationPropertiesEXT & + operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); + } + + bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + void * pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; + }; + static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationPropertiesEXT; + }; + + struct PhysicalDeviceMaintenance3Properties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMaintenance3Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPerSetDescriptors( maxPerSetDescriptors_ ) + , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance3Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance3Properties & + operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance3Properties & + operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + } + + bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; + void * pNext = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance3Properties; + }; + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + + struct PhysicalDeviceMemoryBudgetPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( + std::array const & heapBudget_ = {}, + std::array const & heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : heapBudget( heapBudget_ ) + , heapUsage( heapUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( + PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryBudgetPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT & + operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryBudgetPropertiesEXT & + operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && + ( heapUsage == rhs.heapUsage ); + } + + bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; + }; + static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == + sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; + }; + + struct PhysicalDeviceMemoryPriorityFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryPriority( memoryPriority_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryPriorityFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & + operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryPriorityFeaturesEXT & + operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMemoryPriorityFeaturesEXT & + setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT + { + memoryPriority = memoryPriority_; + return *this; + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority ); + } + + bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; + }; + static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == + sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; + }; + + struct PhysicalDeviceMeshShaderFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT + : taskShader( taskShader_ ) + , meshShader( meshShader_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & + operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesNV & + operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT + { + taskShader = taskShader_; + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + { + meshShader = meshShader_; + return *this; + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && + ( meshShader == rhs.meshShader ); + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + }; + static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderFeaturesNV; + }; + + struct PhysicalDeviceMeshShaderPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const & maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskTotalMemorySize_ = {}, + uint32_t maxTaskOutputCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const & maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshTotalMemorySize_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) + , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) + , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) + , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) + , maxTaskOutputCount( maxTaskOutputCount_ ) + , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) + , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ) + , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) + , maxMeshOutputVertices( maxMeshOutputVertices_ ) + , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) + , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) + , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) + , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV & + operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesNV & + operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) && + ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && + ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && + ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && + ( maxTaskOutputCount == rhs.maxTaskOutputCount ) && + ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && + ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && + ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && + ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && + ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && + ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && + ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && + ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + void * pNext = {}; + uint32_t maxDrawMeshTasksCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; + uint32_t maxTaskTotalMemorySize = {}; + uint32_t maxTaskOutputCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; + uint32_t maxMeshTotalMemorySize = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; + }; + static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderPropertiesNV; + }; + + struct PhysicalDeviceMultiviewFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT + : multiview( multiview_ ) + , multiviewGeometryShader( multiviewGeometryShader_ ) + , multiviewTessellationShader( multiviewTessellationShader_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + PhysicalDeviceMultiviewFeatures & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + PhysicalDeviceMultiviewFeatures & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } + + operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ); + } + + bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + }; + static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewFeatures; + }; + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT + : perViewPositionAllComponents( perViewPositionAllComponents_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); + } + + bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; + }; + static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == + sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + }; + + struct PhysicalDeviceMultiviewProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMultiviewProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : maxMultiviewViewCount( maxMultiviewViewCount_ ) + , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewProperties & + operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); + } + + bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; + void * pNext = {}; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + }; + static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewProperties; + }; + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + + struct PhysicalDeviceMutableDescriptorTypeFeaturesVALVE + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( + VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {} ) VULKAN_HPP_NOEXCEPT + : mutableDescriptorType( mutableDescriptorType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & + operator=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & + operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & + setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorType = mutableDescriptorType_; + return *this; + } + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & ) const = default; +#else + bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType ); + } + + bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {}; + }; + static_assert( sizeof( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE ) == + sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + }; + + struct PhysicalDevicePCIBusInfoPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, + uint32_t pciBus_ = {}, + uint32_t pciDevice_ = {}, + uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT + : pciDomain( pciDomain_ ) + , pciBus( pciBus_ ) + , pciDevice( pciDevice_ ) + , pciFunction( pciFunction_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePCIBusInfoPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePCIBusInfoPropertiesEXT & + operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePCIBusInfoPropertiesEXT & + operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && + ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && ( pciFunction == rhs.pciFunction ); + } + + bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + void * pNext = {}; + uint32_t pciDomain = {}; + uint32_t pciBus = {}; + uint32_t pciDevice = {}; + uint32_t pciFunction = {}; + }; + static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePCIBusInfoPropertiesEXT; + }; + + struct PhysicalDevicePerformanceQueryFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT + : performanceCounterQueryPools( performanceCounterQueryPools_ ) + , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( + PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & + operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryFeaturesKHR & + operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePerformanceQueryFeaturesKHR & + setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterQueryPools = performanceCounterQueryPools_; + return *this; + } + + PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; + return *this; + } + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) && + ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); + } + + bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; + }; + static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == + sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryFeaturesKHR; + }; + + struct PhysicalDevicePerformanceQueryPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT + : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( + PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryPropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryPropertiesKHR & + operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryPropertiesKHR & + operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); + } + + bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; + }; + static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == + sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryPropertiesKHR; + }; + + struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineCreationCacheControl( pipelineCreationCacheControl_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( + PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT( + VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineCreationCacheControlFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeaturesEXT & + operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & + operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePipelineCreationCacheControlFeaturesEXT & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); + } + + bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + }; + static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) == + sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineCreationCacheControlFeaturesEXT; + }; + + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineExecutableInfo( pipelineExecutableInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT + { + pipelineExecutableInfo = pipelineExecutableInfo_; + return *this; + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); + } + + bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; + }; + static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == + sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + }; + + struct PhysicalDevicePointClippingProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePointClippingProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT + : pointClippingBehavior( pointClippingBehavior_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePointClippingProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePointClippingProperties & + operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePointClippingProperties & + operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default; +#else + bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior ); + } + + bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + }; + static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePointClippingProperties; + }; + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDevicePortabilitySubsetFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {} ) VULKAN_HPP_NOEXCEPT + : constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ) + , events( events_ ) + , imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ) + , imageViewFormatSwizzle( imageViewFormatSwizzle_ ) + , imageView2DOn3DImage( imageView2DOn3DImage_ ) + , multisampleArrayImage( multisampleArrayImage_ ) + , mutableComparisonSamplers( mutableComparisonSamplers_ ) + , pointPolygons( pointPolygons_ ) + , samplerMipLodBias( samplerMipLodBias_ ) + , separateStencilMaskRef( separateStencilMaskRef_ ) + , shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ) + , tessellationIsolines( tessellationIsolines_ ) + , tessellationPointMode( tessellationPointMode_ ) + , triangleFans( triangleFans_ ) + , vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( + PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetFeaturesKHR & + operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT + { + constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT + { + events = events_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatReinterpretation = imageViewFormatReinterpretation_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatSwizzle = imageViewFormatSwizzle_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT + { + imageView2DOn3DImage = imageView2DOn3DImage_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT + { + multisampleArrayImage = multisampleArrayImage_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT + { + mutableComparisonSamplers = mutableComparisonSamplers_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT + { + pointPolygons = pointPolygons_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + samplerMipLodBias = samplerMipLodBias_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT + { + separateStencilMaskRef = separateStencilMaskRef_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT + { + tessellationIsolines = tessellationIsolines_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT + { + tessellationPointMode = tessellationPointMode_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & + setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT + { + triangleFans = triangleFans_; + return *this; + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; + return *this; + } + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) && ( events == rhs.events ) && + ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) && + ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) && + ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) && + ( multisampleArrayImage == rhs.multisampleArrayImage ) && + ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) && ( pointPolygons == rhs.pointPolygons ) && + ( samplerMipLodBias == rhs.samplerMipLodBias ) && + ( separateStencilMaskRef == rhs.separateStencilMaskRef ) && + ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) && + ( tessellationIsolines == rhs.tessellationIsolines ) && + ( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) && + ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride ); + } + + bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {}; + VULKAN_HPP_NAMESPACE::Bool32 events = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {}; + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {}; + }; + static_assert( sizeof( PhysicalDevicePortabilitySubsetFeaturesKHR ) == + sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDevicePortabilitySubsetPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( + uint32_t minVertexInputBindingStrideAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( + PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetPropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & + operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetPropertiesKHR & + operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePortabilitySubsetPropertiesKHR & + setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT + { + minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_; + return *this; + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment ); + } + + bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + void * pNext = {}; + uint32_t minVertexInputBindingStrideAlignment = {}; + }; + static_assert( sizeof( PhysicalDevicePortabilitySubsetPropertiesKHR ) == + sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDevicePrivateDataFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePrivateDataFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevicePrivateDataFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {} ) VULKAN_HPP_NOEXCEPT + : privateData( privateData_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrivateDataFeaturesEXT( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrivateDataFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeaturesEXT & + operator=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrivateDataFeaturesEXT & + operator=( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDevicePrivateDataFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePrivateDataFeaturesEXT & + setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + { + privateData = privateData_; + return *this; + } + + operator VkPhysicalDevicePrivateDataFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrivateDataFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrivateDataFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData ); + } + + bool operator!=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + }; + static_assert( sizeof( PhysicalDevicePrivateDataFeaturesEXT ) == sizeof( VkPhysicalDevicePrivateDataFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePrivateDataFeaturesEXT; + }; + + struct PhysicalDeviceProtectedMemoryFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProtectedMemoryFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedMemory( protectedMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & + operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryFeatures & + operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceProtectedMemoryFeatures & + setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory ); + } + + bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + }; + static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryFeatures; + }; + + struct PhysicalDeviceProtectedMemoryProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProtectedMemoryProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedNoFault( protectedNoFault_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryProperties & + operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryProperties & + operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault ); + } + + bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + }; + static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == + sizeof( VkPhysicalDeviceProtectedMemoryProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryProperties; + }; + + struct PhysicalDeviceProvokingVertexFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {} ) VULKAN_HPP_NOEXCEPT + : provokingVertexLast( provokingVertexLast_ ) + , transformFeedbackPreservesProvokingVertex( transformFeedbackPreservesProvokingVertex_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( + PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & + operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexFeaturesEXT & + operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceProvokingVertexFeaturesEXT & + setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT + { + provokingVertexLast = provokingVertexLast_; + return *this; + } + + PhysicalDeviceProvokingVertexFeaturesEXT & setTransformFeedbackPreservesProvokingVertex( + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_; + return *this; + } + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) && + ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex ); + } + + bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {}; + }; + static_assert( sizeof( PhysicalDeviceProvokingVertexFeaturesEXT ) == + sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexFeaturesEXT; + }; + + struct PhysicalDeviceProvokingVertexPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {} ) VULKAN_HPP_NOEXCEPT + : provokingVertexModePerPipeline( provokingVertexModePerPipeline_ ) + , transformFeedbackPreservesTriangleFanProvokingVertex( transformFeedbackPreservesTriangleFanProvokingVertex_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( + PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexPropertiesEXT & + operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexPropertiesEXT & + operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) && + ( transformFeedbackPreservesTriangleFanProvokingVertex == + rhs.transformFeedbackPreservesTriangleFanProvokingVertex ); + } + + bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {}; + }; + static_assert( sizeof( PhysicalDeviceProvokingVertexPropertiesEXT ) == + sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexPropertiesEXT; + }; + + struct PhysicalDevicePushDescriptorPropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPushDescriptors( maxPushDescriptors_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( + PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePushDescriptorPropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushDescriptorPropertiesKHR & + operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushDescriptorPropertiesKHR & + operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors ); + } + + bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + void * pNext = {}; + uint32_t maxPushDescriptors = {}; + }; + static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == + sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePushDescriptorPropertiesKHR; + }; + + struct PhysicalDeviceRayQueryFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {} ) VULKAN_HPP_NOEXCEPT + : rayQuery( rayQuery_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & + operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT + { + rayQuery = rayQuery_; + return *this; + } + + operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery ); + } + + bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; + }; + static_assert( sizeof( PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayQueryFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingPipelineFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {} ) VULKAN_HPP_NOEXCEPT + : rayTracingPipeline( rayTracingPipeline_ ) + , rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ ) + , rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) + , rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ ) + , rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( + PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelineFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelineFeaturesKHR & + operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipeline = rayTracingPipeline_; + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_; + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_; + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineTraceRaysIndirect( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_; + return *this; + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT + { + rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_; + return *this; + } + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPipeline == rhs.rayTracingPipeline ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplay == + rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == + rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) && + ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect ) && + ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling ); + } + + bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {}; + }; + static_assert( sizeof( PhysicalDeviceRayTracingPipelineFeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingPipelinePropertiesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRayRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint32_t shaderGroupHandleCaptureReplaySize_ = {}, + uint32_t maxRayDispatchInvocationCount_ = {}, + uint32_t shaderGroupHandleAlignment_ = {}, + uint32_t maxRayHitAttributeSize_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRayRecursionDepth( maxRayRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ) + , maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ ) + , shaderGroupHandleAlignment( shaderGroupHandleAlignment_ ) + , maxRayHitAttributeSize( maxRayHitAttributeSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( + PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelinePropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelinePropertiesKHR & + operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelinePropertiesKHR & + operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && + ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) && + ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && + ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) && + ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) && + ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) && + ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize ); + } + + bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRayRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint32_t shaderGroupHandleCaptureReplaySize = {}; + uint32_t maxRayDispatchInvocationCount = {}; + uint32_t shaderGroupHandleAlignment = {}; + uint32_t maxRayHitAttributeSize = {}; + }; + static_assert( sizeof( PhysicalDeviceRayTracingPipelinePropertiesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR; + }; + + struct PhysicalDeviceRayTracingPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayTracingPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxTriangleCount_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxTriangleCount( maxTriangleCount_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPropertiesNV & + operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPropertiesNV & + operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && + ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && + ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) && + ( maxTriangleCount == rhs.maxTriangleCount ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); + } + + bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxTriangleCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + }; + static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPropertiesNV; + }; + + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT + : representativeFragmentTest( representativeFragmentTest_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTest = representativeFragmentTest_; + return *this; + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( representativeFragmentTest == rhs.representativeFragmentTest ); + } + + bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; + }; + static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == + sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + }; + + struct PhysicalDeviceRobustness2FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT + : robustBufferAccess2( robustBufferAccess2_ ) + , robustImageAccess2( robustImageAccess2_ ) + , nullDescriptor( nullDescriptor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & + operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2FeaturesEXT & + operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & + setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess2 = robustBufferAccess2_; + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & + setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess2 = robustImageAccess2_; + return *this; + } + + PhysicalDeviceRobustness2FeaturesEXT & + setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + nullDescriptor = nullDescriptor_; + return *this; + } + + operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) && + ( robustImageAccess2 == rhs.robustImageAccess2 ) && ( nullDescriptor == rhs.nullDescriptor ); + } + + bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {}; + }; + static_assert( sizeof( PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2FeaturesEXT; + }; + + struct PhysicalDeviceRobustness2PropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( + VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ) + , robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2PropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2PropertiesEXT & + operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2PropertiesEXT & + operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) && + ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment ); + } + + bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2PropertiesEXT; + }; + + struct PhysicalDeviceSampleLocationsPropertiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, + std::array const & sampleLocationCoordinateRange_ = {}, + uint32_t sampleLocationSubPixelBits_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleLocationSampleCounts( sampleLocationSampleCounts_ ) + , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + , sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ) + , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ) + , variableSampleLocations( variableSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( + PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSampleLocationsPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT & + operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSampleLocationsPropertiesEXT & + operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && + ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) && + ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && + ( variableSampleLocations == rhs.variableSampleLocations ); + } + + bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; + uint32_t sampleLocationSubPixelBits = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; + }; + static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == + sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSampleLocationsPropertiesEXT; + }; + + struct PhysicalDeviceSamplerFilterMinmaxProperties + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT + : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) + , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( + PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerFilterMinmaxProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerFilterMinmaxProperties & + operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerFilterMinmaxProperties & + operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); + } + + bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + }; + static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) == + sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerFilterMinmaxProperties; + }; + using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + + struct PhysicalDeviceSamplerYcbcrConversionFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT + : samplerYcbcrConversion( samplerYcbcrConversion_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( + PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerYcbcrConversionFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & + operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerYcbcrConversionFeatures & + operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); + } + + bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + }; + static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == + sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerYcbcrConversionFeatures; + }; + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + + struct PhysicalDeviceScalarBlockLayoutFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) + VULKAN_HPP_NOEXCEPT : scalarBlockLayout( scalarBlockLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceScalarBlockLayoutFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & + operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceScalarBlockLayoutFeatures & + operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceScalarBlockLayoutFeatures & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } + + operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout ); + } + + bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + }; + static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) == + sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceScalarBlockLayoutFeatures; + }; + using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; + + struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : separateDepthStencilLayouts( separateDepthStencilLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( + PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + { + separateDepthStencilLayouts = separateDepthStencilLayouts_; + return *this; + } + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ); + } + + bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + }; + static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == + sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + }; + using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + struct PhysicalDeviceShaderAtomicFloatFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ) + , shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ) + , shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ) + , shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ) + , shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ) + , shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ) + , shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ) + , shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ) + , shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ) + , shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ) + , sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ) + , sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( + PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloatFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32Atomics = shaderImageFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32Atomics = sparseImageFloat32Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_; + return *this; + } + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) && + ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) && + ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) && + ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) && + ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) && + ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) && + ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) && + ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) && + ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) && + ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) && + ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) && + ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd ); + } + + bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT; + }; + + struct PhysicalDeviceShaderAtomicInt64Features + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderAtomicInt64Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) + , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicInt64Features( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & + operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicInt64Features & + operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderAtomicInt64Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicInt64Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } + + operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); + } + + bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) == + sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicInt64Features; + }; + using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; + + struct PhysicalDeviceShaderClockFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSubgroupClock( shaderSubgroupClock_ ) + , shaderDeviceClock( shaderDeviceClock_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & + operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderClockFeaturesKHR & + operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderClockFeaturesKHR & + setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupClock = shaderSubgroupClock_; + return *this; + } + + PhysicalDeviceShaderClockFeaturesKHR & + setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderDeviceClock = shaderDeviceClock_; + return *this; + } + + operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) && + ( shaderDeviceClock == rhs.shaderDeviceClock ); + } + + bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderClockFeaturesKHR; + }; + + struct PhysicalDeviceShaderCoreProperties2AMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, + uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderCoreFeatures( shaderCoreFeatures_ ) + , activeComputeUnitCount( activeComputeUnitCount_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreProperties2AMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreProperties2AMD & + operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreProperties2AMD & + operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) && + ( activeComputeUnitCount == rhs.activeComputeUnitCount ); + } + + bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; + uint32_t activeComputeUnitCount = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreProperties2AMD; + }; + + struct PhysicalDeviceShaderCorePropertiesAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, + uint32_t shaderArraysPerEngineCount_ = {}, + uint32_t computeUnitsPerShaderArray_ = {}, + uint32_t simdPerComputeUnit_ = {}, + uint32_t wavefrontsPerSimd_ = {}, + uint32_t wavefrontSize_ = {}, + uint32_t sgprsPerSimd_ = {}, + uint32_t minSgprAllocation_ = {}, + uint32_t maxSgprAllocation_ = {}, + uint32_t sgprAllocationGranularity_ = {}, + uint32_t vgprsPerSimd_ = {}, + uint32_t minVgprAllocation_ = {}, + uint32_t maxVgprAllocation_ = {}, + uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderEngineCount( shaderEngineCount_ ) + , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) + , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) + , simdPerComputeUnit( simdPerComputeUnit_ ) + , wavefrontsPerSimd( wavefrontsPerSimd_ ) + , wavefrontSize( wavefrontSize_ ) + , sgprsPerSimd( sgprsPerSimd_ ) + , minSgprAllocation( minSgprAllocation_ ) + , maxSgprAllocation( maxSgprAllocation_ ) + , sgprAllocationGranularity( sgprAllocationGranularity_ ) + , vgprsPerSimd( vgprsPerSimd_ ) + , minVgprAllocation( minVgprAllocation_ ) + , maxVgprAllocation( maxVgprAllocation_ ) + , vgprAllocationGranularity( vgprAllocationGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCorePropertiesAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCorePropertiesAMD & + operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesAMD & + operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) && + ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) && + ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) && + ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) && + ( wavefrontSize == rhs.wavefrontSize ) && ( sgprsPerSimd == rhs.sgprsPerSimd ) && + ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) && + ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) && + ( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) && + ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + void * pNext = {}; + uint32_t shaderEngineCount = {}; + uint32_t shaderArraysPerEngineCount = {}; + uint32_t computeUnitsPerShaderArray = {}; + uint32_t simdPerComputeUnit = {}; + uint32_t wavefrontsPerSimd = {}; + uint32_t wavefrontSize = {}; + uint32_t sgprsPerSimd = {}; + uint32_t minSgprAllocation = {}; + uint32_t maxSgprAllocation = {}; + uint32_t sgprAllocationGranularity = {}; + uint32_t vgprsPerSimd = {}; + uint32_t minVgprAllocation = {}; + uint32_t maxVgprAllocation = {}; + uint32_t vgprAllocationGranularity = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCorePropertiesAMD; + }; + + struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( + VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & + operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & + operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( + &rhs ); + return *this; + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); + } + + bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + }; + + struct PhysicalDeviceShaderDrawParametersFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderDrawParameters( shaderDrawParameters_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( + PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDrawParametersFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & + operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDrawParametersFeatures & + operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderDrawParametersFeatures & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters ); + } + + bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == + sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDrawParametersFeatures; + }; + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + + struct PhysicalDeviceShaderFloat16Int8Features + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderFloat16Int8Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderFloat16( shaderFloat16_ ) + , shaderInt8( shaderInt8_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderFloat16Int8Features( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & + operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloat16Int8Features & + operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderFloat16Int8Features & + setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + PhysicalDeviceShaderFloat16Int8Features & + setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } + + operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && + ( shaderInt8 == rhs.shaderInt8 ); + } + + bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) == + sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderFloat16Int8Features; + }; + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderImageInt64Atomics( shaderImageInt64Atomics_ ) + , sparseImageInt64Atomics( sparseImageInt64Atomics_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageInt64Atomics = shaderImageInt64Atomics_; + return *this; + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageInt64Atomics = sparseImageInt64Atomics_; + return *this; + } + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) && + ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics ); + } + + bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + }; + + struct PhysicalDeviceShaderImageFootprintFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT : imageFootprint( imageFootprint_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( + PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageFootprintFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & + operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageFootprintFeaturesNV & + operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderImageFootprintFeaturesNV & + setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT + { + imageFootprint = imageFootprint_; + return *this; + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint ); + } + + bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageFootprintFeaturesNV; + }; + + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderIntegerFunctions2( shaderIntegerFunctions2_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerFunctions2 = shaderIntegerFunctions2_; + return *this; + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); + } + + bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == + sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + }; + + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) + VULKAN_HPP_NOEXCEPT : shaderSMBuiltins( shaderSMBuiltins_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( + PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & + operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & + operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & + setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT + { + shaderSMBuiltins = shaderSMBuiltins_; + return *this; + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV; + }; + + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, + uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSMCount( shaderSMCount_ ) + , shaderWarpsPerSM( shaderWarpsPerSM_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( + PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsPropertiesNV & + operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & + operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) && + ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + void * pNext = {}; + uint32_t shaderSMCount = {}; + uint32_t shaderWarpsPerSM = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == + sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV; + }; + + struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( + PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + return *this; + } + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ); + } + + bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == + sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + }; + using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + + struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( + &rhs ); + return *this; + } + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setShaderSubgroupUniformControlFlow( + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_; + return *this; + } + + operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow ); + } + + bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + }; + + struct PhysicalDeviceShaderTerminateInvocationFeaturesKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderTerminateInvocation( shaderTerminateInvocation_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeaturesKHR( + PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTerminateInvocationFeaturesKHR( + VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTerminateInvocationFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeaturesKHR & + operator=( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTerminateInvocationFeaturesKHR & + operator=( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderTerminateInvocationFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderTerminateInvocationFeaturesKHR & + setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderTerminateInvocation = shaderTerminateInvocation_; + return *this; + } + + operator VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ); + } + + bool operator!=( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderTerminateInvocationFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTerminateInvocationFeaturesKHR; + }; + + struct PhysicalDeviceShadingRateImageFeaturesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateImage( shadingRateImage_ ) + , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( + PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImageFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & + operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImageFeaturesNV & + operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImage = shadingRateImage_; + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; + return *this; + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) && + ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); + } + + bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; + }; + static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == + sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImageFeaturesNV; + }; + + struct PhysicalDeviceShadingRateImagePropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, + uint32_t shadingRatePaletteSize_ = {}, + uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateTexelSize( shadingRateTexelSize_ ) + , shadingRatePaletteSize( shadingRatePaletteSize_ ) + , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( + PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImagePropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImagePropertiesNV & + operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImagePropertiesNV & + operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) && + ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) && + ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); + } + + bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; + uint32_t shadingRatePaletteSize = {}; + uint32_t shadingRateMaxCoarseSamples = {}; + }; + static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == + sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImagePropertiesNV; + }; struct PhysicalDeviceSubgroupProperties { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties; - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT : subgroupSize( subgroupSize_ ) , supportedStages( supportedStages_ ) , supportedOperations( supportedOperations_ ) , quadOperationsInAllStages( quadOperationsInAllStages_ ) {} - PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupProperties ) - offsetof( PhysicalDeviceSubgroupProperties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceSubgroupProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSubgroupProperties& operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupProperties & + operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkPhysicalDeviceSubgroupProperties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSubgroupProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default; #else - bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( subgroupSize == rhs.subgroupSize ) - && ( supportedStages == rhs.supportedStages ) - && ( supportedOperations == rhs.supportedOperations ) - && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && + ( supportedStages == rhs.supportedStages ) && ( supportedOperations == rhs.supportedOperations ) && + ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); } - bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; - void* pNext = {}; - uint32_t subgroupSize = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; + void * pNext = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; - VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; - + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; }; - static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -59897,90 +86869,94 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceSubgroupSizeControlFeaturesEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT : subgroupSizeControl( subgroupSizeControl_ ) , computeFullSubgroups( computeFullSubgroups_ ) {} - PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( + PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeaturesEXT & + operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlFeaturesEXT & + operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceSubgroupSizeControlFeaturesEXT & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeaturesEXT & + setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT { subgroupSizeControl = subgroupSizeControl_; return *this; } - PhysicalDeviceSubgroupSizeControlFeaturesEXT & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeaturesEXT & + setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT { computeFullSubgroups = computeFullSubgroups_; return *this; } - - operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( subgroupSizeControl == rhs.subgroupSizeControl ) - && ( computeFullSubgroups == rhs.computeFullSubgroups ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) && + ( computeFullSubgroups == rhs.computeFullSubgroups ); } - bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; }; - static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == + sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -59990,80 +86966,82 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceSubgroupSizeControlPropertiesEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( uint32_t minSubgroupSize_ = {}, - uint32_t maxSubgroupSize_ = {}, - uint32_t maxComputeWorkgroupSubgroups_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( + uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT : minSubgroupSize( minSubgroupSize_ ) , maxSubgroupSize( maxSubgroupSize_ ) , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) {} - PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( + PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlPropertiesEXT & + operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlPropertiesEXT & + operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlPropertiesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - PhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minSubgroupSize == rhs.minSubgroupSize ) - && ( maxSubgroupSize == rhs.maxSubgroupSize ) - && ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) - && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && + ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && + ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); } - bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; - void* pNext = {}; - uint32_t minSubgroupSize = {}; - uint32_t maxSubgroupSize = {}; - uint32_t maxComputeWorkgroupSubgroups = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; }; - static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == + sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -60071,165 +87049,170 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceSubgroupSizeControlPropertiesEXT; }; - struct PhysicalDeviceSurfaceInfo2KHR + struct PhysicalDeviceSynchronization2FeaturesKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSynchronization2FeaturesKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT - : surface( surface_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2FeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {} ) VULKAN_HPP_NOEXCEPT : synchronization2( synchronization2_ ) {} - PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2FeaturesKHR( + PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSynchronization2FeaturesKHR( VkPhysicalDeviceSynchronization2FeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSynchronization2FeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2FeaturesKHR & + operator=( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSynchronization2FeaturesKHR & + operator=( VkPhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSurfaceInfo2KHR ) - offsetof( PhysicalDeviceSurfaceInfo2KHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSynchronization2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSynchronization2FeaturesKHR & + setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT { - surface = surface_; + synchronization2 = synchronization2_; return *this; } - - operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSynchronization2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSynchronization2FeaturesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSynchronization2FeaturesKHR const & ) const = default; #else - bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surface == rhs.surface ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 ); } - bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; }; - static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceSynchronization2FeaturesKHR ) == + sizeof( VkPhysicalDeviceSynchronization2FeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSurfaceInfo2KHR; + using Type = PhysicalDeviceSynchronization2FeaturesKHR; }; struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; - VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT : texelBufferAlignment( texelBufferAlignment_ ) {} - PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( + PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT { texelBufferAlignment = texelBufferAlignment_; return *this; } - - operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( texelBufferAlignment == rhs.texelBufferAlignment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment ); } - bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; }; - static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -60239,80 +87222,83 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; - VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) {} - PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( + PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentPropertiesEXT & + operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentPropertiesEXT & + operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - PhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) - && ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) - && ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) - && ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); } - bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; }; - static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -60322,80 +87308,85 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; - VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) {} - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( + VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & + operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & + operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) - offsetof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT { textureCompressionASTC_HDR = textureCompressionASTC_HDR_; return *this; } - - operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); } - bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; }; - static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == + sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -60405,331 +87396,253 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceTimelineSemaphoreFeatures { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; - VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} ) VULKAN_HPP_NOEXCEPT - : timelineSemaphore( timelineSemaphore_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} ) + VULKAN_HPP_NOEXCEPT : timelineSemaphore( timelineSemaphore_ ) {} - PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) - offsetof( PhysicalDeviceTimelineSemaphoreFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceTimelineSemaphoreFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceTimelineSemaphoreFeatures& operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & + operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreFeatures & + operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceTimelineSemaphoreFeatures & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreFeatures & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT { timelineSemaphore = timelineSemaphore_; return *this; } - - operator VkPhysicalDeviceTimelineSemaphoreFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( timelineSemaphore == rhs.timelineSemaphore ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore ); } - bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; }; - static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) == + sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceTimelineSemaphoreFeatures; }; + using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; struct PhysicalDeviceTimelineSemaphoreProperties { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTimelineSemaphoreProperties; - VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} ) VULKAN_HPP_NOEXCEPT - : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} ) + VULKAN_HPP_NOEXCEPT : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) {} - PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( + PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreProperties & + operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreProperties & + operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTimelineSemaphoreProperties ) - offsetof( PhysicalDeviceTimelineSemaphoreProperties, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - PhysicalDeviceTimelineSemaphoreProperties& operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceTimelineSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default; #else - bool operator==( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ); } - bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; - void* pNext = {}; - uint64_t maxTimelineSemaphoreValueDifference = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + void * pNext = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; }; - static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) == + sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceTimelineSemaphoreProperties; }; - - struct PhysicalDeviceToolPropertiesEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolPropertiesEXT; - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT( std::array const& name_ = {}, - std::array const& version_ = {}, - VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {}, - std::array const& description_ = {}, - std::array const& layer_ = {} ) VULKAN_HPP_NOEXCEPT - : name( name_ ) - , version( version_ ) - , purposes( purposes_ ) - , description( description_ ) - , layer( layer_ ) - {} - - PhysicalDeviceToolPropertiesEXT & operator=( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceToolPropertiesEXT ) - offsetof( PhysicalDeviceToolPropertiesEXT, pNext ) ); - return *this; - } - - PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceToolPropertiesEXT& operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceToolPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceToolPropertiesEXT const& ) const = default; -#else - bool operator==( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( name == rhs.name ) - && ( version == rhs.version ) - && ( purposes == rhs.purposes ) - && ( description == rhs.description ) - && ( layer == rhs.layer ); - } - - bool operator!=( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; - VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; - - }; - static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PhysicalDeviceToolPropertiesEXT; - }; + using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; struct PhysicalDeviceTransformFeedbackFeaturesEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; - VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT : transformFeedback( transformFeedback_ ) , geometryStreams( geometryStreams_ ) {} - PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( + PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & + operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackFeaturesEXT & + operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) - offsetof( PhysicalDeviceTransformFeedbackFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT & + setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT { transformFeedback = transformFeedback_; return *this; } - PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT & + setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT { geometryStreams = geometryStreams_; return *this; } - - operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( transformFeedback == rhs.transformFeedback ) - && ( geometryStreams == rhs.geometryStreams ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) && + ( geometryStreams == rhs.geometryStreams ); } - bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; - VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; }; - static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == + sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -60739,19 +87652,22 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceTransformFeedbackPropertiesEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; - VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {}, - uint32_t maxTransformFeedbackBuffers_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, - uint32_t maxTransformFeedbackStreamDataSize_ = {}, - uint32_t maxTransformFeedbackBufferDataSize_ = {}, - uint32_t maxTransformFeedbackBufferDataStride_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( + uint32_t maxTransformFeedbackStreams_ = {}, + uint32_t maxTransformFeedbackBuffers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, + uint32_t maxTransformFeedbackStreamDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataStride_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ) , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ) , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ) @@ -60764,79 +87680,79 @@ namespace VULKAN_HPP_NAMESPACE , transformFeedbackDraw( transformFeedbackDraw_ ) {} - PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( + PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackPropertiesEXT & + operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackPropertiesEXT & + operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) - offsetof( PhysicalDeviceTransformFeedbackPropertiesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - PhysicalDeviceTransformFeedbackPropertiesEXT& operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) - && ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) - && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) - && ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) - && ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) - && ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) - && ( transformFeedbackQueries == rhs.transformFeedbackQueries ) - && ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) - && ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) - && ( transformFeedbackDraw == rhs.transformFeedbackDraw ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) && + ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) && + ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) && + ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) && + ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) && + ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) && + ( transformFeedbackQueries == rhs.transformFeedbackQueries ) && + ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) && + ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) && + ( transformFeedbackDraw == rhs.transformFeedbackDraw ); } - bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; - void* pNext = {}; - uint32_t maxTransformFeedbackStreams = {}; - uint32_t maxTransformFeedbackBuffers = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; - uint32_t maxTransformFeedbackStreamDataSize = {}; - uint32_t maxTransformFeedbackBufferDataSize = {}; - uint32_t maxTransformFeedbackBufferDataStride = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + void * pNext = {}; + uint32_t maxTransformFeedbackStreams = {}; + uint32_t maxTransformFeedbackBuffers = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; + uint32_t maxTransformFeedbackStreamDataSize = {}; + uint32_t maxTransformFeedbackBufferDataSize = {}; + uint32_t maxTransformFeedbackBufferDataStride = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; }; - static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == + sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -60846,266 +87762,283 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceUniformBufferStandardLayoutFeatures { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; - VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT : uniformBufferStandardLayout( uniformBufferStandardLayout_ ) {} - PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( + PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceUniformBufferStandardLayoutFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) - offsetof( PhysicalDeviceUniformBufferStandardLayoutFeatures, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceUniformBufferStandardLayoutFeatures& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceUniformBufferStandardLayoutFeatures & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceUniformBufferStandardLayoutFeatures & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT { uniformBufferStandardLayout = uniformBufferStandardLayout_; return *this; } - - operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); } - bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; }; - static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) == + sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures; }; + using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; struct PhysicalDeviceVariablePointersFeatures { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVariablePointersFeatures; - VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT : variablePointersStorageBuffer( variablePointersStorageBuffer_ ) , variablePointers( variablePointers_ ) {} - PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVariablePointersFeatures ) - offsetof( PhysicalDeviceVariablePointersFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceVariablePointersFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & + operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVariablePointersFeatures & + operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVariablePointersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT { variablePointersStorageBuffer = variablePointersStorageBuffer_; return *this; } - PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVariablePointersFeatures & + setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT { variablePointers = variablePointers_; return *this; } - - operator VkPhysicalDeviceVariablePointersFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVariablePointersFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) - && ( variablePointers == rhs.variablePointers ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ); } - bool operator!=( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; }; - static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceVariablePointersFeatures; }; + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; - VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) {} - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( + PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & + operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT { vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; return *this; } - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT { vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; return *this; } - - operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) - && ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && + ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); } - bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; }; - static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -61115,68 +88048,72 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; - VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT : maxVertexAttribDivisor( maxVertexAttribDivisor_ ) {} - PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( + PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); } - bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; - void* pNext = {}; - uint32_t maxVertexAttribDivisor = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; }; - static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -61184,23 +88121,112 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT; }; + struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexInputDynamicState( vertexInputDynamicState_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( + PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexInputDynamicStateFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + vertexInputDynamicState = vertexInputDynamicState_; + return *this; + } + + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexInputDynamicState == rhs.vertexInputDynamicState ); + } + + bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {}; + }; + static_assert( sizeof( PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + }; + struct PhysicalDeviceVulkan11Features { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features; - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) , storagePushConstant16( storagePushConstant16_ ) @@ -61215,48 +88241,52 @@ namespace VULKAN_HPP_NAMESPACE , shaderDrawParameters( shaderDrawParameters_ ) {} - PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkan11Features ) - offsetof( PhysicalDeviceVulkan11Features, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceVulkan11Features( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVulkan11Features& operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVulkan11Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceVulkan11Features & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT { storageBuffer16BitAccess = storageBuffer16BitAccess_; return *this; } - PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; return *this; } - PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT { storagePushConstant16 = storagePushConstant16_; return *this; } - PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT { storageInputOutput16 = storageInputOutput16_; return *this; @@ -61268,108 +88298,109 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceVulkan11Features & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT { multiviewGeometryShader = multiviewGeometryShader_; return *this; } - PhysicalDeviceVulkan11Features & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT { multiviewTessellationShader = multiviewTessellationShader_; return *this; } - PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT { variablePointersStorageBuffer = variablePointersStorageBuffer_; return *this; } - PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT { variablePointers = variablePointers_; return *this; } - PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT { protectedMemory = protectedMemory_; return *this; } - PhysicalDeviceVulkan11Features & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT { samplerYcbcrConversion = samplerYcbcrConversion_; return *this; } - PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT { shaderDrawParameters = shaderDrawParameters_; return *this; } - - operator VkPhysicalDeviceVulkan11Features const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVulkan11Features const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) - && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) - && ( storagePushConstant16 == rhs.storagePushConstant16 ) - && ( storageInputOutput16 == rhs.storageInputOutput16 ) - && ( multiview == rhs.multiview ) - && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) - && ( multiviewTessellationShader == rhs.multiviewTessellationShader ) - && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) - && ( variablePointers == rhs.variablePointers ) - && ( protectedMemory == rhs.protectedMemory ) - && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) - && ( shaderDrawParameters == rhs.shaderDrawParameters ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && + ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ) && ( protectedMemory == rhs.protectedMemory ) && + ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) && + ( shaderDrawParameters == rhs.shaderDrawParameters ); } - bool operator!=( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; }; - static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -61379,24 +88410,27 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceVulkan11Properties { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( std::array const& deviceUUID_ = {}, - std::array const& driverUUID_ = {}, - std::array const& deviceLUID_ = {}, - uint32_t deviceNodeMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, - uint32_t subgroupSize_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, - uint32_t maxMultiviewViewCount_ = {}, - uint32_t maxMultiviewInstanceIndex_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, - uint32_t maxPerSetDescriptors_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( + std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, + uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT : deviceUUID( deviceUUID_ ) , driverUUID( driverUUID_ ) , deviceLUID( deviceLUID_ ) @@ -61414,89 +88448,81 @@ namespace VULKAN_HPP_NAMESPACE , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) {} - PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkan11Properties ) - offsetof( PhysicalDeviceVulkan11Properties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceVulkan11Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVulkan11Properties& operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties & + operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkPhysicalDeviceVulkan11Properties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVulkan11Properties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceUUID == rhs.deviceUUID ) - && ( driverUUID == rhs.driverUUID ) - && ( deviceLUID == rhs.deviceLUID ) - && ( deviceNodeMask == rhs.deviceNodeMask ) - && ( deviceLUIDValid == rhs.deviceLUIDValid ) - && ( subgroupSize == rhs.subgroupSize ) - && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) - && ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) - && ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) - && ( pointClippingBehavior == rhs.pointClippingBehavior ) - && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) - && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) - && ( protectedNoFault == rhs.protectedNoFault ) - && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) - && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && + ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) && + ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) && + ( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) && + ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) && + ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && + ( pointClippingBehavior == rhs.pointClippingBehavior ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) && + ( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); } - bool operator!=( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; - uint32_t deviceNodeMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; - uint32_t subgroupSize = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; - VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; - uint32_t maxMultiviewViewCount = {}; - uint32_t maxMultiviewInstanceIndex = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; - uint32_t maxPerSetDescriptors = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; }; - static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -61506,56 +88532,58 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceVulkan12Features { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features; - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {} ) VULKAN_HPP_NOEXCEPT : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ) , drawIndirectCount( drawIndirectCount_ ) , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) @@ -61605,66 +88633,73 @@ namespace VULKAN_HPP_NAMESPACE , subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ ) {} - PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkan12Features ) - offsetof( PhysicalDeviceVulkan12Features, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceVulkan12Features( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVulkan12Features& operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVulkan12Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceVulkan12Features & setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT { samplerMirrorClampToEdge = samplerMirrorClampToEdge_; return *this; } - PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT { drawIndirectCount = drawIndirectCount_; return *this; } - PhysicalDeviceVulkan12Features & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT { storageBuffer8BitAccess = storageBuffer8BitAccess_; return *this; } - PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; return *this; } - PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT { storagePushConstant8 = storagePushConstant8_; return *this; } - PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT { shaderBufferInt64Atomics = shaderBufferInt64Atomics_; return *this; } - PhysicalDeviceVulkan12Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT { shaderSharedInt64Atomics = shaderSharedInt64Atomics_; return *this; @@ -61682,364 +88717,398 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT { descriptorIndexing = descriptorIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; return *this; } - PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT { descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; return *this; } - PhysicalDeviceVulkan12Features & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT { runtimeDescriptorArray = runtimeDescriptorArray_; return *this; } - PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT { samplerFilterMinmax = samplerFilterMinmax_; return *this; } - PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT { scalarBlockLayout = scalarBlockLayout_; return *this; } - PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT { imagelessFramebuffer = imagelessFramebuffer_; return *this; } - PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT { uniformBufferStandardLayout = uniformBufferStandardLayout_; return *this; } - PhysicalDeviceVulkan12Features & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT { shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; return *this; } - PhysicalDeviceVulkan12Features & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT { separateDepthStencilLayouts = separateDepthStencilLayouts_; return *this; } - PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT { hostQueryReset = hostQueryReset_; return *this; } - PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT { timelineSemaphore = timelineSemaphore_; return *this; } - PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return *this; } - PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return *this; } - PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return *this; } - PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModel = vulkanMemoryModel_; return *this; } - PhysicalDeviceVulkan12Features & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; return *this; } - PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; return *this; } - PhysicalDeviceVulkan12Features & setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT { shaderOutputViewportIndex = shaderOutputViewportIndex_; return *this; } - PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT { shaderOutputLayer = shaderOutputLayer_; return *this; } - PhysicalDeviceVulkan12Features & setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & + setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT { subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; return *this; } - - operator VkPhysicalDeviceVulkan12Features const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVulkan12Features const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) - && ( drawIndirectCount == rhs.drawIndirectCount ) - && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) - && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) - && ( storagePushConstant8 == rhs.storagePushConstant8 ) - && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) - && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) - && ( shaderFloat16 == rhs.shaderFloat16 ) - && ( shaderInt8 == rhs.shaderInt8 ) - && ( descriptorIndexing == rhs.descriptorIndexing ) - && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) - && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) - && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) - && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) - && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) - && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) - && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) - && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) - && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) - && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) - && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) - && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) - && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) - && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) - && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) - && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) - && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) - && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) - && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) - && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) - && ( samplerFilterMinmax == rhs.samplerFilterMinmax ) - && ( scalarBlockLayout == rhs.scalarBlockLayout ) - && ( imagelessFramebuffer == rhs.imagelessFramebuffer ) - && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) - && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) - && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) - && ( hostQueryReset == rhs.hostQueryReset ) - && ( timelineSemaphore == rhs.timelineSemaphore ) - && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) - && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) - && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) - && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) - && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) - && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) - && ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) - && ( shaderOutputLayer == rhs.shaderOutputLayer ) - && ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) && + ( drawIndirectCount == rhs.drawIndirectCount ) && + ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && + ( storagePushConstant8 == rhs.storagePushConstant8 ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) && ( shaderFloat16 == rhs.shaderFloat16 ) && + ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == + rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == + rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == + rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == + rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && + ( samplerFilterMinmax == rhs.samplerFilterMinmax ) && ( scalarBlockLayout == rhs.scalarBlockLayout ) && + ( imagelessFramebuffer == rhs.imagelessFramebuffer ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && + ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && + ( hostQueryReset == rhs.hostQueryReset ) && ( timelineSemaphore == rhs.timelineSemaphore ) && + ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && + ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) && + ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && + ( shaderOutputLayer == rhs.shaderOutputLayer ) && + ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId ); } - bool operator!=( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; - VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; - VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; - VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; - VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; - VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; - VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; }; - static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -62049,61 +89118,65 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceVulkan12Properties { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, - std::array const& driverName_ = {}, - std::array const& driverInfo_ = {}, - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, - uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, - uint32_t maxPerStageUpdateAfterBindResources_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, - uint64_t maxTimelineSemaphoreValueDifference_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + uint64_t maxTimelineSemaphoreValueDifference_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT : driverID( driverID_ ) , driverName( driverName_ ) , driverInfo( driverInfo_ ) @@ -62158,163 +89231,172 @@ namespace VULKAN_HPP_NAMESPACE , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) {} - PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkan12Properties ) - offsetof( PhysicalDeviceVulkan12Properties, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceVulkan12Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVulkan12Properties& operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties & + operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkPhysicalDeviceVulkan12Properties const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVulkan12Properties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( driverID == rhs.driverID ) - && ( driverName == rhs.driverName ) - && ( driverInfo == rhs.driverInfo ) - && ( conformanceVersion == rhs.conformanceVersion ) - && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) - && ( roundingModeIndependence == rhs.roundingModeIndependence ) - && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) - && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) - && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) - && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) - && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) - && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) - && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) - && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) - && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) - && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) - && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) - && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) - && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) - && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) - && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) - && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) - && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) - && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) - && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) - && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) - && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) - && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) - && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) - && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) - && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) - && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) - && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) - && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) - && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) - && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) - && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) - && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) - && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) - && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) - && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) - && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) - && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) - && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) - && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) - && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) - && ( independentResolveNone == rhs.independentResolveNone ) - && ( independentResolve == rhs.independentResolve ) - && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) - && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) - && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) - && ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && + ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) && + ( conformanceVersion == rhs.conformanceVersion ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && + ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && + ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && + ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && + ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && + ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == + rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == + rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == + rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == + rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == + rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && + ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == + rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == + rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == + rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == + rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) && + ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); } - bool operator!=( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; - uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; - uint32_t maxPerStageUpdateAfterBindResources = {}; - uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; - uint64_t maxTimelineSemaphoreValueDifference = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; }; - static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -62324,183 +89406,397 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceVulkanMemoryModelFeatures { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT : vulkanMemoryModel( vulkanMemoryModel_ ) , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) {} - PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) - offsetof( PhysicalDeviceVulkanMemoryModelFeatures, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PhysicalDeviceVulkanMemoryModelFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVulkanMemoryModelFeatures& operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkanMemoryModelFeatures & + operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModel = vulkanMemoryModel_; return *this; } - PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; return *this; } - PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; return *this; } - - operator VkPhysicalDeviceVulkanMemoryModelFeatures const&() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) - && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) - && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); } - bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; }; - static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) == + sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PhysicalDeviceVulkanMemoryModelFeatures; }; + using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; - struct PhysicalDeviceYcbcrImageArraysFeaturesEXT + struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; - VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT - : ycbcrImageArrays( ycbcrImageArrays_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ ) + , workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ ) + , workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ ) + , workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ ) {} - PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) - offsetof( PhysicalDeviceYcbcrImageArraysFeaturesEXT, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT { - ycbcrImageArrays = ycbcrImageArrays_; + workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_; return *this; } - - operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayoutScalarBlockLayout( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_; + return *this; } - operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_; + return *this; } + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_; + return *this; + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& ) const = default; + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default; #else - bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) && + ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout ) && + ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess ) && + ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess ); } - bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {}; + }; + static_assert( sizeof( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) == + sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + }; + + struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {} ) VULKAN_HPP_NOEXCEPT + : ycbcr2plane444Formats( ycbcr2plane444Formats_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT + { + ycbcr2plane444Formats = ycbcr2plane444Formats_; + return *this; + } + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats ); + } + + bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {}; }; - static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) == + sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + }; + + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT : ycbcrImageArrays( ycbcrImageArrays_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( + PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcrImageArraysFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & + operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & + operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & + setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrImageArrays = ycbcrImageArrays_; + return *this; + } + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); + } + + bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; + }; + static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == + sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -62508,205 +89804,195 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT; }; - struct PipelineCacheCreateInfo + struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; - VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, - size_t initialDataSize_ = {}, - const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , initialDataSize( initialDataSize_ ) - , pInitialData( pInitialData_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ) {} - PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( + PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( + VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & + operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & + operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCacheCreateInfo ) - offsetof( PipelineCacheCreateInfo, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineCacheCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & setShaderZeroInitializeWorkgroupMemory( + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; return *this; } - PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - initialDataSize = initialDataSize_; - return *this; + return *reinterpret_cast( this ); } - PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR &() VULKAN_HPP_NOEXCEPT { - pInitialData = pInitialData_; - return *this; + return *reinterpret_cast( this ); } - - operator VkPipelineCacheCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCacheCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & ) const = default; #else - bool operator==( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( initialDataSize == rhs.initialDataSize ) - && ( pInitialData == rhs.pInitialData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ); } - bool operator!=( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; - size_t initialDataSize = {}; - const void* pInitialData = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; }; - static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR ) == + sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PipelineCacheCreateInfo; + using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; }; struct PipelineColorBlendAdvancedStateCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, - VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) + VULKAN_HPP_NOEXCEPT : srcPremultiplied( srcPremultiplied_ ) , dstPremultiplied( dstPremultiplied_ ) , blendOverlap( blendOverlap_ ) {} - PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( + PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAdvancedStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAdvancedStateCreateInfoEXT & + operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) - offsetof( PipelineColorBlendAdvancedStateCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & + setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT { srcPremultiplied = srcPremultiplied_; return *this; } - PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & + setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT { dstPremultiplied = dstPremultiplied_; return *this; } - PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & + setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT { blendOverlap = blendOverlap_; return *this; } - - operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcPremultiplied == rhs.srcPremultiplied ) - && ( dstPremultiplied == rhs.dstPremultiplied ) - && ( blendOverlap == rhs.blendOverlap ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && + ( dstPremultiplied == rhs.dstPremultiplied ) && ( blendOverlap == rhs.blendOverlap ); } - bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; - VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; - VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; }; - static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == + sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -62714,82 +90000,192 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineColorBlendAdvancedStateCreateInfoEXT; }; - struct PipelineCompilerControlCreateInfoAMD + struct PipelineColorWriteCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : compilerControlFlags( compilerControlFlags_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentCount( attachmentCount_ ) + , pColorWriteEnables( pColorWriteEnables_ ) {} - PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorWriteCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_ ) + : attachmentCount( static_cast( colorWriteEnables_.size() ) ) + , pColorWriteEnables( colorWriteEnables_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & + operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCompilerControlCreateInfoAMD ) - offsetof( PipelineCompilerControlCreateInfoAMD, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineCompilerControlCreateInfoAMD& operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineCompilerControlCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT + PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT { - compilerControlFlags = compilerControlFlags_; + attachmentCount = attachmentCount_; return *this; } - - operator VkPipelineCompilerControlCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT + PipelineColorWriteCreateInfoEXT & + setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pColorWriteEnables = pColorWriteEnables_; + return *this; } - operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT & setColorWriteEnables( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + attachmentCount = static_cast( colorWriteEnables_.size() ); + pColorWriteEnables = colorWriteEnables_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } + operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCompilerControlCreateInfoAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( compilerControlFlags == rhs.compilerControlFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && + ( pColorWriteEnables == rhs.pColorWriteEnables ); } - bool operator!=( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {}; + }; + static_assert( sizeof( PipelineColorWriteCreateInfoEXT ) == sizeof( VkPipelineColorWriteCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineColorWriteCreateInfoEXT; + }; + + struct PipelineCompilerControlCreateInfoAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCompilerControlCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : compilerControlFlags( compilerControlFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & + operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCompilerControlCreateInfoAMD & + operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT + { + compilerControlFlags = compilerControlFlags_; + return *this; + } + + operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default; +#else + bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags ); + } + + bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; - }; - static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -62799,14 +90195,18 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineCoverageModulationStateCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCoverageModulationStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, - VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, - uint32_t coverageModulationTableCount_ = {}, - const float* pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, + uint32_t coverageModulationTableCount_ = {}, + const float * pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , coverageModulationMode( coverageModulationMode_ ) , coverageModulationTableEnable( coverageModulationTableEnable_ ) @@ -62814,105 +90214,134 @@ namespace VULKAN_HPP_NAMESPACE , pCoverageModulationTable( pCoverageModulationTable_ ) {} - PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( + PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineCoverageModulationStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) + : flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( static_cast( coverageModulationTable_.size() ) ) + , pCoverageModulationTable( coverageModulationTable_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageModulationStateCreateInfoNV & + operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCoverageModulationStateCreateInfoNV ) - offsetof( PipelineCoverageModulationStateCreateInfoNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineCoverageModulationStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCoverageModulationStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT { coverageModulationMode = coverageModulationMode_; return *this; } - PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT { coverageModulationTableEnable = coverageModulationTableEnable_; return *this; } - PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT { coverageModulationTableCount = coverageModulationTableCount_; return *this; } - PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable( const float* pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & + setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT { pCoverageModulationTable = pCoverageModulationTable_; return *this; } - - operator VkPipelineCoverageModulationStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + coverageModulationTableCount = static_cast( coverageModulationTable_.size() ); + pCoverageModulationTable = coverageModulationTable_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( coverageModulationMode == rhs.coverageModulationMode ) - && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) - && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) - && ( pCoverageModulationTable == rhs.pCoverageModulationTable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageModulationMode == rhs.coverageModulationMode ) && + ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) && + ( coverageModulationTableCount == rhs.coverageModulationTableCount ) && + ( pCoverageModulationTable == rhs.pCoverageModulationTable ); } - bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; - uint32_t coverageModulationTableCount = {}; - const float* pCoverageModulationTable = {}; - + uint32_t coverageModulationTableCount = {}; + const float * pCoverageModulationTable = {}; }; - static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == + sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -62922,90 +90351,96 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineCoverageReductionStateCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCoverageReductionStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , coverageReductionMode( coverageReductionMode_ ) {} - PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( + PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineCoverageReductionStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & + operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageReductionStateCreateInfoNV & + operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCoverageReductionStateCreateInfoNV ) - offsetof( PipelineCoverageReductionStateCreateInfoNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineCoverageReductionStateCreateInfoNV& operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineCoverageReductionStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCoverageReductionStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT { coverageReductionMode = coverageReductionMode_; return *this; } - - operator VkPipelineCoverageReductionStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( coverageReductionMode == rhs.coverageReductionMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageReductionMode == rhs.coverageReductionMode ); } - bool operator!=( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; - + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; }; - static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == + sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -63015,100 +90450,105 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineCoverageToColorStateCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCoverageToColorStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, - uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, + uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , coverageToColorEnable( coverageToColorEnable_ ) , coverageToColorLocation( coverageToColorLocation_ ) {} - PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( + PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineCoverageToColorStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageToColorStateCreateInfoNV & + operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCoverageToColorStateCreateInfoNV ) - offsetof( PipelineCoverageToColorStateCreateInfoNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineCoverageToColorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCoverageToColorStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT { coverageToColorEnable = coverageToColorEnable_; return *this; } - PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT { coverageToColorLocation = coverageToColorLocation_; return *this; } - - operator VkPipelineCoverageToColorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( coverageToColorEnable == rhs.coverageToColorEnable ) - && ( coverageToColorLocation == rhs.coverageToColorLocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageToColorEnable == rhs.coverageToColorEnable ) && + ( coverageToColorLocation == rhs.coverageToColorLocation ); } - bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; - uint32_t coverageToColorLocation = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; + uint32_t coverageToColorLocation = {}; }; - static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == + sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -63118,158 +90558,186 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineCreationFeedbackEXT { - - - VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, - uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, + uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , duration( duration_ ) {} - PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + PipelineCreationFeedbackEXT( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineCreationFeedbackEXT& operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedbackEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackEXT & + operator=( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedbackEXT & operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkPipelineCreationFeedbackEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCreationFeedbackEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineCreationFeedbackEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCreationFeedbackEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedbackEXT const & ) const = default; #else - bool operator==( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCreationFeedbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( flags == rhs.flags ) - && ( duration == rhs.duration ); + return ( flags == rhs.flags ) && ( duration == rhs.duration ); } - bool operator!=( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCreationFeedbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {}; - uint64_t duration = {}; - + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {}; + uint64_t duration = {}; }; - static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineCreationFeedbackCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCreationFeedbackCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = {}, - uint32_t pipelineStageCreationFeedbackCount_ = {}, - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_ = {}, + uint32_t pipelineStageCreationFeedbackCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT : pPipelineCreationFeedback( pPipelineCreationFeedback_ ) , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ) , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) {} - PipelineCreationFeedbackCreateInfoEXT & operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineCreationFeedbackCreateInfoEXT ) - offsetof( PipelineCreationFeedbackCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( PipelineCreationFeedbackCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PipelineCreationFeedbackCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} - PipelineCreationFeedbackCreateInfoEXT& operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pipelineStageCreationFeedbacks_ ) + : pPipelineCreationFeedback( pPipelineCreationFeedback_ ) + , pipelineStageCreationFeedbackCount( static_cast( pipelineStageCreationFeedbacks_.size() ) ) + , pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfoEXT & + operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedbackCreateInfoEXT & + operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineCreationFeedbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT { pPipelineCreationFeedback = pPipelineCreationFeedback_; return *this; } - PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfoEXT & + setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT { pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; return *this; } - PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT { pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; return *this; } - - operator VkPipelineCreationFeedbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbacks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pipelineStageCreationFeedbackCount = static_cast( pipelineStageCreationFeedbacks_.size() ); + pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) - && ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) - && ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) && + ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) && + ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); } - bool operator!=( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback = {}; - uint32_t pipelineStageCreationFeedbackCount = {}; - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback = {}; + uint32_t pipelineStageCreationFeedbackCount = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks = {}; }; - static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -63279,110 +90747,140 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineDiscardRectangleStateCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, - uint32_t discardRectangleCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, + uint32_t discardRectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , discardRectangleMode( discardRectangleMode_ ) , discardRectangleCount( discardRectangleCount_ ) , pDiscardRectangles( pDiscardRectangles_ ) {} - PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( + PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineDiscardRectangleStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) + : flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( static_cast( discardRectangles_.size() ) ) + , pDiscardRectangles( discardRectangles_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDiscardRectangleStateCreateInfoEXT & + operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) - offsetof( PipelineDiscardRectangleStateCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineDiscardRectangleStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT { discardRectangleMode = discardRectangleMode_; return *this; } - PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT { discardRectangleCount = discardRectangleCount_; return *this; } - PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & + setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT { pDiscardRectangles = pDiscardRectangles_; return *this; } - - operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + discardRectangleCount = static_cast( discardRectangles_.size() ); + pDiscardRectangles = discardRectangles_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( discardRectangleMode == rhs.discardRectangleMode ) - && ( discardRectangleCount == rhs.discardRectangleCount ) - && ( pDiscardRectangles == rhs.pDiscardRectangles ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( discardRectangleMode == rhs.discardRectangleMode ) && + ( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles ); } - bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; - uint32_t discardRectangleCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles = {}; - + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; + uint32_t discardRectangleCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {}; }; - static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == + sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -63390,886 +90888,322 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineDiscardRectangleStateCreateInfoEXT; }; - struct PipelineExecutableInfoKHR + struct PipelineFragmentShadingRateEnumStateCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, - uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : pipeline( pipeline_ ) - , executableIndex( executableIndex_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, + VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } } ) VULKAN_HPP_NOEXCEPT + : shadingRateType( shadingRateType_ ) + , shadingRate( shadingRate_ ) + , combinerOps( combinerOps_ ) {} - PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( + PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateEnumStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateEnumStateCreateInfoNV & + operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutableInfoKHR ) - offsetof( PipelineExecutableInfoKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineExecutableInfoKHR& operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT { - pipeline = pipeline_; + shadingRateType = shadingRateType_; return *this; } - PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT + PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT { - executableIndex = executableIndex_; + shadingRate = shadingRate_; return *this; } - - operator VkPipelineExecutableInfoKHR const&() const VULKAN_HPP_NOEXCEPT + PipelineFragmentShadingRateEnumStateCreateInfoNV & setCombinerOps( + std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + combinerOps = combinerOps_; + return *this; } - operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineExecutableInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipeline == rhs.pipeline ) - && ( executableIndex == rhs.executableIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) && + ( shadingRate == rhs.shadingRate ) && ( combinerOps == rhs.combinerOps ); } - bool operator!=( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - uint32_t executableIndex = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize; + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; }; - static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineFragmentShadingRateEnumStateCreateInfoNV ) == + sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PipelineExecutableInfoKHR; + using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV; }; - struct PipelineExecutableInternalRepresentationKHR + struct PipelineFragmentShadingRateStateCreateInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array const& name_ = {}, - std::array const& description_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, - size_t dataSize_ = {}, - void* pData_ = {} ) VULKAN_HPP_NOEXCEPT - : name( name_ ) - , description( description_ ) - , isText( isText_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, + VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } } ) VULKAN_HPP_NOEXCEPT + : fragmentSize( fragmentSize_ ) + , combinerOps( combinerOps_ ) {} - PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutableInternalRepresentationKHR ) - offsetof( PipelineExecutableInternalRepresentationKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( + PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPipelineExecutableInternalRepresentationKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineExecutableInternalRepresentationKHR const& ) const = default; -#else - bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( name == rhs.name ) - && ( description == rhs.description ) - && ( isText == rhs.isText ) - && ( dataSize == rhs.dataSize ) - && ( pData == rhs.pData ); - } - - bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - VULKAN_HPP_NAMESPACE::Bool32 isText = {}; - size_t dataSize = {}; - void* pData = {}; - - }; - static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PipelineExecutableInternalRepresentationKHR; - }; - - struct PipelineExecutablePropertiesKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR; - - VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, - std::array const& name_ = {}, - std::array const& description_ = {}, - uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT - : stages( stages_ ) - , name( name_ ) - , description( description_ ) - , subgroupSize( subgroupSize_ ) + PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateStateCreateInfoKHR( + *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & + operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateStateCreateInfoKHR & + operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutablePropertiesKHR ) - offsetof( PipelineExecutablePropertiesKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineExecutablePropertiesKHR& operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPipelineExecutablePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineExecutablePropertiesKHR const& ) const = default; -#else - bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stages == rhs.stages ) - && ( name == rhs.name ) - && ( description == rhs.description ) - && ( subgroupSize == rhs.subgroupSize ); - } - - bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - uint32_t subgroupSize = {}; - - }; - static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PipelineExecutablePropertiesKHR; - }; - - union PipelineExecutableStatisticValueKHR - { - PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const& rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); - } - - PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) - : b32( b32_ ) - {} - - PipelineExecutableStatisticValueKHR( int64_t i64_ ) - : i64( i64_ ) - {} - - PipelineExecutableStatisticValueKHR( uint64_t u64_ ) - : u64( u64_ ) - {} - - PipelineExecutableStatisticValueKHR( double f64_ ) - : f64( f64_ ) - {} - - PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT - { - b32 = b32_; - return *this; - } - - PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT - { - i64 = i64_; - return *this; - } - - PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT - { - u64 = u64_; - return *this; - } - - PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT - { - f64 = f64_; - return *this; - } - - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); - return *this; - } - - operator VkPipelineExecutableStatisticValueKHR const&() const - { - return *reinterpret_cast(this); - } - - operator VkPipelineExecutableStatisticValueKHR &() - { - return *reinterpret_cast(this); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - VULKAN_HPP_NAMESPACE::Bool32 b32; - int64_t i64; - uint64_t u64; - double f64; -#else - VkBool32 b32; - int64_t i64; - uint64_t u64; - double f64; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct PipelineExecutableStatisticKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR; - - PipelineExecutableStatisticKHR( std::array const& name_ = {}, - std::array const& description_ = {}, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) VULKAN_HPP_NOEXCEPT - : name( name_ ) - , description( description_ ) - , format( format_ ) - , value( value_ ) - {} - - PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutableStatisticKHR ) - offsetof( PipelineExecutableStatisticKHR, pNext ) ); - return *this; - } - - PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineExecutableStatisticKHR& operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPipelineExecutableStatisticKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; - - }; - static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PipelineExecutableStatisticKHR; - }; - - struct PipelineInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR; - - VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT - : pipeline( pipeline_ ) - {} - - PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineInfoKHR ) - offsetof( PipelineInfoKHR, pNext ) ); - return *this; - } - - PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineInfoKHR& operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + PipelineFragmentShadingRateStateCreateInfoKHR & + setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT { - pipeline = pipeline_; + fragmentSize = fragmentSize_; return *this; } - - operator VkPipelineInfoKHR const&() const VULKAN_HPP_NOEXCEPT + PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps( + std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + combinerOps = combinerOps_; + return *this; } - operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } + operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default; #else - bool operator==( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipeline == rhs.pipeline ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) && + ( combinerOps == rhs.combinerOps ); } - bool operator!=( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; }; - static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineFragmentShadingRateStateCreateInfoKHR ) == + sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = PipelineInfoKHR; + using Type = PipelineFragmentShadingRateStateCreateInfoKHR; }; - struct PushConstantRange - { - - - VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, - uint32_t offset_ = {}, - uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT - : stageFlags( stageFlags_ ) - , offset( offset_ ) - , size( size_ ) - {} - - PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PushConstantRange& operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT - { - stageFlags = stageFlags_; - return *this; - } - - PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT - { - size = size_; - return *this; - } - - - operator VkPushConstantRange const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PushConstantRange const& ) const = default; -#else - bool operator==( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( stageFlags == rhs.stageFlags ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); - } - - bool operator!=( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; - uint32_t offset = {}; - uint32_t size = {}; - - }; - static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PipelineLayoutCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo; - - VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, - uint32_t setLayoutCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}, - uint32_t pushConstantRangeCount_ = {}, - const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , setLayoutCount( setLayoutCount_ ) - , pSetLayouts( pSetLayouts_ ) - , pushConstantRangeCount( pushConstantRangeCount_ ) - , pPushConstantRanges( pPushConstantRanges_ ) - {} - - PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineLayoutCreateInfo ) - offsetof( PipelineLayoutCreateInfo, pNext ) ); - return *this; - } - - PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = setLayoutCount_; - return *this; - } - - PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayouts = pSetLayouts_; - return *this; - } - - PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = pushConstantRangeCount_; - return *this; - } - - PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pPushConstantRanges = pPushConstantRanges_; - return *this; - } - - - operator VkPipelineLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineLayoutCreateInfo const& ) const = default; -#else - bool operator==( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( setLayoutCount == rhs.setLayoutCount ) - && ( pSetLayouts == rhs.pSetLayouts ) - && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) - && ( pPushConstantRanges == rhs.pPushConstantRanges ); - } - - bool operator!=( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; - uint32_t setLayoutCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {}; - uint32_t pushConstantRangeCount = {}; - const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges = {}; - - }; - static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PipelineLayoutCreateInfo; - }; - -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct PipelineLibraryCreateInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; - - VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, - const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ = {} ) VULKAN_HPP_NOEXCEPT - : libraryCount( libraryCount_ ) - , pLibraries( pLibraries_ ) - {} - - PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineLibraryCreateInfoKHR ) - offsetof( PipelineLibraryCreateInfoKHR, pNext ) ); - return *this; - } - - PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineLibraryCreateInfoKHR& operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineLibraryCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT - { - libraryCount = libraryCount_; - return *this; - } - - PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ ) VULKAN_HPP_NOEXCEPT - { - pLibraries = pLibraries_; - return *this; - } - - - operator VkPipelineLibraryCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineLibraryCreateInfoKHR const& ) const = default; -#else - bool operator==( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( libraryCount == rhs.libraryCount ) - && ( pLibraries == rhs.pLibraries ); - } - - bool operator!=( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; - const void* pNext = {}; - uint32_t libraryCount = {}; - const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries = {}; - - }; - static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PipelineLibraryCreateInfoKHR; - }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PipelineRasterizationConservativeStateCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, - float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, + float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , conservativeRasterizationMode( conservativeRasterizationMode_ ) , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) {} - PipelineRasterizationConservativeStateCreateInfoEXT & operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( + PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationConservativeStateCreateInfoEXT( + VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationConservativeStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) - offsetof( PipelineRasterizationConservativeStateCreateInfoEXT, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT { conservativeRasterizationMode = conservativeRasterizationMode_; return *this; } - PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & + setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT { extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; return *this; } - - operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) - && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) && + ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); } - bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; float extraPrimitiveOverestimationSize = {}; - }; - static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -64279,90 +91213,94 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineRasterizationDepthClipStateCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , depthClipEnable( depthClipEnable_ ) {} - PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( + PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationDepthClipStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationDepthClipStateCreateInfoEXT & + operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) - offsetof( PipelineRasterizationDepthClipStateCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineRasterizationDepthClipStateCreateInfoEXT& operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT { depthClipEnable = depthClipEnable_; return *this; } - - operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( depthClipEnable == rhs.depthClipEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthClipEnable == rhs.depthClipEnable ); } - bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; }; - static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -64372,110 +91310,118 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineRasterizationLineStateCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationLineStateCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, - VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, - uint32_t lineStippleFactor_ = {}, - uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, + uint32_t lineStippleFactor_ = {}, + uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT : lineRasterizationMode( lineRasterizationMode_ ) , stippledLineEnable( stippledLineEnable_ ) , lineStippleFactor( lineStippleFactor_ ) , lineStipplePattern( lineStipplePattern_ ) {} - PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( + PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationLineStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & + operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationLineStateCreateInfoEXT & + operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationLineStateCreateInfoEXT ) - offsetof( PipelineRasterizationLineStateCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineRasterizationLineStateCreateInfoEXT& operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT { lineRasterizationMode = lineRasterizationMode_; return *this; } - PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & + setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT { stippledLineEnable = stippledLineEnable_; return *this; } - PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & + setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT { lineStippleFactor = lineStippleFactor_; return *this; } - PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfoEXT & + setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT { lineStipplePattern = lineStipplePattern_; return *this; } - - operator VkPipelineRasterizationLineStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( lineRasterizationMode == rhs.lineRasterizationMode ) - && ( stippledLineEnable == rhs.stippledLineEnable ) - && ( lineStippleFactor == rhs.lineStippleFactor ) - && ( lineStipplePattern == rhs.lineStipplePattern ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( lineRasterizationMode == rhs.lineRasterizationMode ) && + ( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) && + ( lineStipplePattern == rhs.lineStipplePattern ); } - bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault; VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; - uint32_t lineStippleFactor = {}; - uint16_t lineStipplePattern = {}; - + uint32_t lineStippleFactor = {}; + uint16_t lineStipplePattern = {}; }; - static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -64483,82 +91429,176 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineRasterizationLineStateCreateInfoEXT; }; - struct PipelineRasterizationStateRasterizationOrderAMD + struct PipelineRasterizationProvokingVertexStateCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT - : rasterizationOrder( rasterizationOrder_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex ) VULKAN_HPP_NOEXCEPT + : provokingVertexMode( provokingVertexMode_ ) {} - PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( + PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationProvokingVertexStateCreateInfoEXT( + VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationProvokingVertexStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) - offsetof( PipelineRasterizationStateRasterizationOrderAMD, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT { - rasterizationOrder = rasterizationOrder_; + provokingVertexMode = provokingVertexMode_; return *this; } - - operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( rasterizationOrder == rhs.rasterizationOrder ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexMode == rhs.provokingVertexMode ); } - bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex; + }; + static_assert( sizeof( PipelineRasterizationProvokingVertexStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT; + }; + + struct PipelineRasterizationStateRasterizationOrderAMD + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT + : rasterizationOrder( rasterizationOrder_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( + PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateRasterizationOrderAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & + operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateRasterizationOrderAMD & + operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateRasterizationOrderAMD & + setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrder = rasterizationOrder_; + return *this; + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default; +#else + bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder ); + } + + bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; }; - static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == + sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -64568,90 +91608,94 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineRasterizationStateStreamCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, - uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, + uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , rasterizationStream( rasterizationStream_ ) {} - PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( + PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateStreamCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & + operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateStreamCreateInfoEXT & + operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) - offsetof( PipelineRasterizationStateStreamCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRasterizationStateStreamCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT & + setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT { rasterizationStream = rasterizationStream_; return *this; } - - operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( rasterizationStream == rhs.rasterizationStream ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rasterizationStream == rhs.rasterizationStream ); } - bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; - uint32_t rasterizationStream = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; + uint32_t rasterizationStream = {}; }; - static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == + sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -64661,80 +91705,85 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineRepresentativeFragmentTestStateCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT : representativeFragmentTestEnable( representativeFragmentTestEnable_ ) {} - PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( + PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRepresentativeFragmentTestStateCreateInfoNV( + VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRepresentativeFragmentTestStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) - offsetof( PipelineRepresentativeFragmentTestStateCreateInfoNV, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT { representativeFragmentTestEnable = representativeFragmentTestEnable_; return *this; } - - operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); } - bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; }; - static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == + sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -64744,90 +91793,95 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineSampleLocationsStateCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineSampleLocationsStateCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT : sampleLocationsEnable( sampleLocationsEnable_ ) , sampleLocationsInfo( sampleLocationsInfo_ ) {} - PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( + PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineSampleLocationsStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & + operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineSampleLocationsStateCreateInfoEXT & + operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) - offsetof( PipelineSampleLocationsStateCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT & + setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsEnable = sampleLocationsEnable_; return *this; } - PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } - - operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) - && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationsEnable == rhs.sampleLocationsEnable ) && + ( sampleLocationsInfo == rhs.sampleLocationsInfo ); } - bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; }; - static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == + sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -64837,68 +91891,71 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT : requiredSubgroupSize( requiredSubgroupSize_ ) {} - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( + VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & + operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & + operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) - offsetof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); } - bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; - void* pNext = {}; - uint32_t requiredSubgroupSize = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + void * pNext = {}; + uint32_t requiredSubgroupSize = {}; }; - static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == + sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -64908,105 +91965,117 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineTessellationDomainOriginStateCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineTessellationDomainOriginStateCreateInfo; - VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT - : domainOrigin( domainOrigin_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT : domainOrigin( domainOrigin_ ) {} - PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( + PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineTessellationDomainOriginStateCreateInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & + operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationDomainOriginStateCreateInfo & + operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) - offsetof( PipelineTessellationDomainOriginStateCreateInfo, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT + PipelineTessellationDomainOriginStateCreateInfo & + setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT { domainOrigin = domainOrigin_; return *this; } - - operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default; #else - bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( domainOrigin == rhs.domainOrigin ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin ); } - bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; }; - static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == + sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = PipelineTessellationDomainOriginStateCreateInfo; }; + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; struct VertexInputBindingDivisorDescriptionEXT { - - +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {}, uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT : binding( binding_ ) , divisor( divisor_ ) {} - VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDivisorDescriptionEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & + operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDivisorDescriptionEXT & + operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -65022,129 +92091,154 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkVertexInputBindingDivisorDescriptionEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( VertexInputBindingDivisorDescriptionEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDivisorDescriptionEXT const & ) const = default; #else - bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( binding == rhs.binding ) - && ( divisor == rhs.divisor ); + return ( binding == rhs.binding ) && ( divisor == rhs.divisor ); } - bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: uint32_t binding = {}; uint32_t divisor = {}; - }; - static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == + sizeof( VkVertexInputBindingDivisorDescriptionEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct PipelineVertexInputDivisorStateCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; - VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( + uint32_t vertexBindingDivisorCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ = {} ) + VULKAN_HPP_NOEXCEPT : vertexBindingDivisorCount( vertexBindingDivisorCount_ ) , pVertexBindingDivisors( pVertexBindingDivisors_ ) {} - PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( + PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineVertexInputDivisorStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) + : vertexBindingDivisorCount( static_cast( vertexBindingDivisors_.size() ) ) + , pVertexBindingDivisors( vertexBindingDivisors_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & + operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputDivisorStateCreateInfoEXT & + operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) - offsetof( PipelineVertexInputDivisorStateCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfoEXT & + setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT { vertexBindingDivisorCount = vertexBindingDivisorCount_; return *this; } - PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ ) + VULKAN_HPP_NOEXCEPT { pVertexBindingDivisors = pVertexBindingDivisors_; return *this; } - - operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vertexBindingDivisorCount = static_cast( vertexBindingDivisors_.size() ); + pVertexBindingDivisors = vertexBindingDivisors_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) - && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) && + ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); } - bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; - const void* pNext = {}; - uint32_t vertexBindingDivisorCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; + const void * pNext = {}; + uint32_t vertexBindingDivisorCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors = {}; }; - static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == + sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -65154,100 +92248,130 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineViewportCoarseSampleOrderStateCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, - uint32_t customSampleOrderCount_ = {}, - const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, + uint32_t customSampleOrderCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT : sampleOrderType( sampleOrderType_ ) , customSampleOrderCount( customSampleOrderCount_ ) , pCustomSampleOrders( pCustomSampleOrders_ ) {} - PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( + PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportCoarseSampleOrderStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + customSampleOrders_ ) + : sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( static_cast( customSampleOrders_.size() ) ) + , pCustomSampleOrders( customSampleOrders_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) - offsetof( PipelineViewportCoarseSampleOrderStateCreateInfoNV, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT { sampleOrderType = sampleOrderType_; return *this; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT { customSampleOrderCount = customSampleOrderCount_; return *this; } - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT { pCustomSampleOrders = pCustomSampleOrders_; return *this; } - - operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + customSampleOrders_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + customSampleOrderCount = static_cast( customSampleOrders_.size() ); + pCustomSampleOrders = customSampleOrders_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleOrderType == rhs.sampleOrderType ) - && ( customSampleOrderCount == rhs.customSampleOrderCount ) - && ( pCustomSampleOrders == rhs.pCustomSampleOrders ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) && + ( customSampleOrderCount == rhs.customSampleOrderCount ) && + ( pCustomSampleOrders == rhs.pCustomSampleOrders ); } - bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; - uint32_t customSampleOrderCount = {}; - const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; + uint32_t customSampleOrderCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {}; }; - static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == + sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -65257,90 +92381,114 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineViewportExclusiveScissorStateCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( + uint32_t exclusiveScissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT : exclusiveScissorCount( exclusiveScissorCount_ ) , pExclusiveScissors( pExclusiveScissors_ ) {} - PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( + PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportExclusiveScissorStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) + : exclusiveScissorCount( static_cast( exclusiveScissors_.size() ) ) + , pExclusiveScissors( exclusiveScissors_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) - offsetof( PipelineViewportExclusiveScissorStateCreateInfoNV, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportExclusiveScissorStateCreateInfoNV & + setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT { exclusiveScissorCount = exclusiveScissorCount_; return *this; } - PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportExclusiveScissorStateCreateInfoNV & + setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT { pExclusiveScissors = pExclusiveScissors_; return *this; } - - operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + exclusiveScissorCount = static_cast( exclusiveScissors_.size() ); + pExclusiveScissors = exclusiveScissors_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) - && ( pExclusiveScissors == rhs.pExclusiveScissors ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exclusiveScissorCount == rhs.exclusiveScissorCount ) && ( pExclusiveScissors == rhs.pExclusiveScissors ); } - bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; - const void* pNext = {}; - uint32_t exclusiveScissorCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + const void * pNext = {}; + uint32_t exclusiveScissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {}; }; - static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == + sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -65348,113 +92496,62 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineViewportExclusiveScissorStateCreateInfoNV; }; - struct ShadingRatePaletteNV - { - - - VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) - , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) - {} - - ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ShadingRatePaletteNV& operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT - { - shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; - return *this; - } - - ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT - { - pShadingRatePaletteEntries = pShadingRatePaletteEntries_; - return *this; - } - - - operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShadingRatePaletteNV const& ) const = default; -#else - bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) - && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); - } - - bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint32_t shadingRatePaletteEntryCount = {}; - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {}; - - }; - static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PipelineViewportShadingRateImageStateCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT : shadingRateImageEnable( shadingRateImageEnable_ ) , viewportCount( viewportCount_ ) , pShadingRatePalettes( pShadingRatePalettes_ ) {} - PipelineViewportShadingRateImageStateCreateInfoNV & operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( + PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportShadingRateImageStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePalettes_ ) + : shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( static_cast( shadingRatePalettes_.size() ) ) + , pShadingRatePalettes( shadingRatePalettes_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) - offsetof( PipelineViewportShadingRateImageStateCreateInfoNV, pNext ) ); + *this = + *reinterpret_cast( &rhs ); return *this; } - PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportShadingRateImageStateCreateInfoNV & + setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT { shadingRateImageEnable = shadingRateImageEnable_; return *this; @@ -65466,54 +92563,62 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT { pShadingRatePalettes = pShadingRatePalettes_; return *this; } - - operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + viewportCount = static_cast( shadingRatePalettes_.size() ); + pShadingRatePalettes = shadingRatePalettes_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) - && ( viewportCount == rhs.viewportCount ) - && ( pShadingRatePalettes == rhs.pShadingRatePalettes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shadingRateImageEnable == rhs.shadingRateImageEnable ) && ( viewportCount == rhs.viewportCount ) && + ( pShadingRatePalettes == rhs.pShadingRatePalettes ); } - bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {}; }; - static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == + sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -65523,26 +92628,35 @@ namespace VULKAN_HPP_NAMESPACE struct ViewportSwizzleNV { - - - VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT : x( x_ ) , y( y_ ) , z( z_ ) , w( w_ ) {} - ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ViewportSwizzleNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & + operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -65570,84 +92684,95 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkViewportSwizzleNV const&() const VULKAN_HPP_NOEXCEPT + operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ViewportSwizzleNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportSwizzleNV const & ) const = default; #else - bool operator==( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( z == rhs.z ) - && ( w == rhs.w ); + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w ); } - bool operator!=( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; - }; - static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); + static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PipelineViewportSwizzleStateCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportSwizzleStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , viewportCount( viewportCount_ ) , pViewportSwizzles( pViewportSwizzles_ ) {} - PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( + PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportSwizzleStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportSwizzles_ ) + : flags( flags_ ) + , viewportCount( static_cast( viewportSwizzles_.size() ) ) + , pViewportSwizzles( viewportSwizzles_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportSwizzleStateCreateInfoNV & + operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) - offsetof( PipelineViewportSwizzleStateCreateInfoNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportSwizzleStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -65659,54 +92784,61 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV & + setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT { pViewportSwizzles = pViewportSwizzles_; return *this; } - - operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + viewportCount = static_cast( viewportSwizzles_.size() ); + pViewportSwizzles = viewportSwizzles_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( viewportCount == rhs.viewportCount ) - && ( pViewportSwizzles == rhs.pViewportSwizzles ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( viewportCount == rhs.viewportCount ) && ( pViewportSwizzles == rhs.pViewportSwizzles ); } - bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {}; }; - static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == + sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -65714,113 +92846,61 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineViewportSwizzleStateCreateInfoNV; }; - struct ViewportWScalingNV - { - - - VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, - float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT - : xcoeff( xcoeff_ ) - , ycoeff( ycoeff_ ) - {} - - ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT - { - xcoeff = xcoeff_; - return *this; - } - - ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT - { - ycoeff = ycoeff_; - return *this; - } - - - operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ViewportWScalingNV const& ) const = default; -#else - bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( xcoeff == rhs.xcoeff ) - && ( ycoeff == rhs.ycoeff ); - } - - bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - float xcoeff = {}; - float ycoeff = {}; - - }; - static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - struct PipelineViewportWScalingStateCreateInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportWScalingStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT : viewportWScalingEnable( viewportWScalingEnable_ ) , viewportCount( viewportCount_ ) , pViewportWScalings( pViewportWScalings_ ) {} - PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( + PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportWScalingStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportWScalings_ ) + : viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( static_cast( viewportWScalings_.size() ) ) + , pViewportWScalings( viewportWScalings_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportWScalingStateCreateInfoNV & + operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportWScalingStateCreateInfoNV ) - offsetof( PipelineViewportWScalingStateCreateInfoNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PipelineViewportWScalingStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV & + setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT { viewportWScalingEnable = viewportWScalingEnable_; return *this; @@ -65832,54 +92912,62 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV & + setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT { pViewportWScalings = pViewportWScalings_; return *this; } - - operator VkPipelineViewportWScalingStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportWScalings_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + viewportCount = static_cast( viewportWScalings_.size() ); + pViewportWScalings = viewportWScalings_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default; #else - bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) - && ( viewportCount == rhs.viewportCount ) - && ( pViewportWScalings == rhs.pViewportWScalings ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( viewportWScalingEnable == rhs.viewportWScalingEnable ) && ( viewportCount == rhs.viewportCount ) && + ( pViewportWScalings == rhs.pViewportWScalings ); } - bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {}; }; - static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == + sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -65887,34 +92975,34 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineViewportWScalingStateCreateInfoNV; }; -#ifdef VK_USE_PLATFORM_GGP +#if defined( VK_USE_PLATFORM_GGP ) struct PresentFrameTokenGGP { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP; +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {} ) VULKAN_HPP_NOEXCEPT : frameToken( frameToken_ ) {} - PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PresentFrameTokenGGP ) - offsetof( PresentFrameTokenGGP, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PresentFrameTokenGGP( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PresentFrameTokenGGP& operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & + operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PresentFrameTokenGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -65926,43 +93014,38 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkPresentFrameTokenGGP const&() const VULKAN_HPP_NOEXCEPT + operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentFrameTokenGGP const& ) const = default; -#else - bool operator==( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentFrameTokenGGP const & ) const = default; +# else + bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 ); } - bool operator!=( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; - const void* pNext = {}; - GgpFrameToken frameToken = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; + const void * pNext = {}; + GgpFrameToken frameToken = {}; }; - static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" ); + static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -65972,166 +93055,33 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_GGP*/ - struct PresentInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR; - - VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, - uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = {}, - const uint32_t* pImageIndices_ = {}, - VULKAN_HPP_NAMESPACE::Result* pResults_ = {} ) VULKAN_HPP_NOEXCEPT - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , swapchainCount( swapchainCount_ ) - , pSwapchains( pSwapchains_ ) - , pImageIndices( pImageIndices_ ) - , pResults( pResults_ ) - {} - - PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PresentInfoKHR ) - offsetof( PresentInfoKHR, pNext ) ); - return *this; - } - - PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - waitSemaphoreCount = waitSemaphoreCount_; - return *this; - } - - PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT - { - pWaitSemaphores = pWaitSemaphores_; - return *this; - } - - PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT - { - swapchainCount = swapchainCount_; - return *this; - } - - PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT - { - pSwapchains = pSwapchains_; - return *this; - } - - PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) VULKAN_HPP_NOEXCEPT - { - pImageIndices = pImageIndices_; - return *this; - } - - PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT - { - pResults = pResults_; - return *this; - } - - - operator VkPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentInfoKHR const& ) const = default; -#else - bool operator==( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pSwapchains == rhs.pSwapchains ) - && ( pImageIndices == rhs.pImageIndices ) - && ( pResults == rhs.pResults ); - } - - bool operator!=( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; - uint32_t swapchainCount = {}; - const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains = {}; - const uint32_t* pImageIndices = {}; - VULKAN_HPP_NAMESPACE::Result* pResults = {}; - - }; - static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PresentInfoKHR; - }; - struct RectLayerKHR { - - +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, - uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT : offset( offset_ ) , extent( extent_ ) , layer( layer_ ) {} - explicit RectLayerKHR( Rect2D const& rect2D, - uint32_t layer_ = {} ) - : offset( rect2D.offset ) - , extent( rect2D.extent ) - , layer( layer_ ) - {} + VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : RectLayerKHR( *reinterpret_cast( &rhs ) ) + {} - RectLayerKHR& operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) + : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -66153,63 +93103,67 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkRectLayerKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RectLayerKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RectLayerKHR const & ) const = default; #else - bool operator==( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( offset == rhs.offset ) - && ( extent == rhs.extent ) - && ( layer == rhs.layer ); + return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer ); } - bool operator!=( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: VULKAN_HPP_NAMESPACE::Offset2D offset = {}; VULKAN_HPP_NAMESPACE::Extent2D extent = {}; - uint32_t layer = {}; - + uint32_t layer = {}; }; static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PresentRegionKHR { - - - VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, - const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PresentRegionKHR( uint32_t rectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT : rectangleCount( rectangleCount_ ) , pRectangles( pRectangles_ ) {} - PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentRegionKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) + : rectangleCount( static_cast( rectangles_.size() ) ), pRectangles( rectangles_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -66219,78 +93173,92 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ ) VULKAN_HPP_NOEXCEPT + PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT { pRectangles = pRectangles_; return *this; } - - operator VkPresentRegionKHR const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR & setRectangles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + rectangleCount = static_cast( rectangles_.size() ); + pRectangles = rectangles_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentRegionKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionKHR const & ) const = default; #else - bool operator==( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( rectangleCount == rhs.rectangleCount ) - && ( pRectangles == rhs.pRectangles ); + return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles ); } - bool operator!=( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t rectangleCount = {}; - const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles = {}; - + uint32_t rectangleCount = {}; + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {}; }; - static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PresentRegionsKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR; - VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PresentRegionsKHR( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT : swapchainCount( swapchainCount_ ) , pRegions( pRegions_ ) {} - PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PresentRegionsKHR ) - offsetof( PresentRegionsKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PresentRegionsKHR( *reinterpret_cast( &rhs ) ) + {} - PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : swapchainCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & + operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PresentRegionsKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -66302,51 +93270,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ ) VULKAN_HPP_NOEXCEPT + PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } - - operator VkPresentRegionsKHR const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + swapchainCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentRegionsKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionsKHR const & ) const = default; #else - bool operator==( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pRegions == rhs.pRegions ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pRegions == rhs.pRegions ); } - bool operator!=( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; - const void* pNext = {}; - uint32_t swapchainCount = {}; - const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {}; }; - static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -66357,22 +93330,26 @@ namespace VULKAN_HPP_NAMESPACE struct PresentTimeGOOGLE { - - - VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT : presentID( presentID_ ) , desiredPresentTime( desiredPresentTime_ ) {} - PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentTimeGOOGLE( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & + operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -66388,72 +93365,75 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkPresentTimeGOOGLE const&() const VULKAN_HPP_NOEXCEPT + operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentTimeGOOGLE const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimeGOOGLE const & ) const = default; #else - bool operator==( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( presentID == rhs.presentID ) - && ( desiredPresentTime == rhs.desiredPresentTime ); + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ); } - bool operator!=( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t presentID = {}; + uint32_t presentID = {}; uint64_t desiredPresentTime = {}; - }; - static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" ); + static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct PresentTimesInfoGOOGLE { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE; - VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {} ) VULKAN_HPP_NOEXCEPT : swapchainCount( swapchainCount_ ) , pTimes( pTimes_ ) {} - PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PresentTimesInfoGOOGLE ) - offsetof( PresentTimesInfoGOOGLE, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : PresentTimesInfoGOOGLE( *reinterpret_cast( &rhs ) ) + {} - PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) + : swapchainCount( static_cast( times_.size() ) ), pTimes( times_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & + operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - PresentTimesInfoGOOGLE & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -66465,51 +93445,56 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ ) VULKAN_HPP_NOEXCEPT + PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT { pTimes = pTimes_; return *this; } - - operator VkPresentTimesInfoGOOGLE const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE & setTimes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + swapchainCount = static_cast( times_.size() ); + pTimes = times_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PresentTimesInfoGOOGLE const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default; #else - bool operator==( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pTimes == rhs.pTimes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pTimes == rhs.pTimes ); } - bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; - const void* pNext = {}; - uint32_t swapchainCount = {}; - const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {}; }; - static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" ); + static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -66518,116 +93503,33 @@ namespace VULKAN_HPP_NAMESPACE using Type = PresentTimesInfoGOOGLE; }; - struct PrivateDataSlotCreateInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfoEXT; - - VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - {} - - PrivateDataSlotCreateInfoEXT & operator=( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( PrivateDataSlotCreateInfoEXT ) - offsetof( PrivateDataSlotCreateInfoEXT, pNext ) ); - return *this; - } - - PrivateDataSlotCreateInfoEXT( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - PrivateDataSlotCreateInfoEXT& operator=( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - PrivateDataSlotCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - PrivateDataSlotCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - - operator VkPrivateDataSlotCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPrivateDataSlotCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( PrivateDataSlotCreateInfoEXT const& ) const = default; -#else - bool operator==( PrivateDataSlotCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); - } - - bool operator!=( PrivateDataSlotCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags = {}; - - }; - static_assert( sizeof( PrivateDataSlotCreateInfoEXT ) == sizeof( VkPrivateDataSlotCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = PrivateDataSlotCreateInfoEXT; - }; - struct ProtectedSubmitInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo; +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {} ) VULKAN_HPP_NOEXCEPT : protectedSubmit( protectedSubmit_ ) {} - ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ProtectedSubmitInfo ) - offsetof( ProtectedSubmitInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : ProtectedSubmitInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & + operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ProtectedSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -66639,43 +93541,37 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkProtectedSubmitInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ProtectedSubmitInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ProtectedSubmitInfo const & ) const = default; #else - bool operator==( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( protectedSubmit == rhs.protectedSubmit ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit ); } - bool operator!=( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; }; - static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -66684,369 +93580,230 @@ namespace VULKAN_HPP_NAMESPACE using Type = ProtectedSubmitInfo; }; - struct QueryPoolCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo; - - VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, - uint32_t queryCount_ = {}, - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , queryType( queryType_ ) - , queryCount( queryCount_ ) - , pipelineStatistics( pipelineStatistics_ ) - {} - - QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolCreateInfo ) - offsetof( QueryPoolCreateInfo, pNext ) ); - return *this; - } - - QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - QueryPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT - { - queryType = queryType_; - return *this; - } - - QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT - { - queryCount = queryCount_; - return *this; - } - - QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT - { - pipelineStatistics = pipelineStatistics_; - return *this; - } - - - operator VkQueryPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueryPoolCreateInfo const& ) const = default; -#else - bool operator==( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queryType == rhs.queryType ) - && ( queryCount == rhs.queryCount ) - && ( pipelineStatistics == rhs.pipelineStatistics ); - } - - bool operator!=( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; - uint32_t queryCount = {}; - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; - - }; - static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = QueryPoolCreateInfo; - }; - - struct QueryPoolPerformanceCreateInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR; - - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, - uint32_t counterIndexCount_ = {}, - const uint32_t* pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT - : queueFamilyIndex( queueFamilyIndex_ ) - , counterIndexCount( counterIndexCount_ ) - , pCounterIndices( pCounterIndices_ ) - {} - - QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolPerformanceCreateInfoKHR ) - offsetof( QueryPoolPerformanceCreateInfoKHR, pNext ) ); - return *this; - } - - QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - QueryPoolPerformanceCreateInfoKHR& operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT - { - queueFamilyIndex = queueFamilyIndex_; - return *this; - } - - QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT - { - counterIndexCount = counterIndexCount_; - return *this; - } - - QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT - { - pCounterIndices = pCounterIndices_; - return *this; - } - - - operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueryPoolPerformanceCreateInfoKHR const& ) const = default; -#else - bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( counterIndexCount == rhs.counterIndexCount ) - && ( pCounterIndices == rhs.pCounterIndices ); - } - - bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; - const void* pNext = {}; - uint32_t queueFamilyIndex = {}; - uint32_t counterIndexCount = {}; - const uint32_t* pCounterIndices = {}; - - }; - static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = QueryPoolPerformanceCreateInfoKHR; - }; - struct QueryPoolPerformanceQueryCreateInfoINTEL { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT : performanceCountersSampling( performanceCountersSampling_ ) {} - QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( + QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) + VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceQueryCreateInfoINTEL( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & + operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceQueryCreateInfoINTEL & + operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) - offsetof( QueryPoolPerformanceQueryCreateInfoINTEL, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - QueryPoolPerformanceQueryCreateInfoINTEL& operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT { performanceCountersSampling = performanceCountersSampling_; return *this; } - - operator VkQueryPoolPerformanceQueryCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default; #else - bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( performanceCountersSampling == rhs.performanceCountersSampling ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( performanceCountersSampling == rhs.performanceCountersSampling ); } - bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; }; - static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) == + sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = QueryPoolPerformanceQueryCreateInfoINTEL; }; + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; - struct QueueFamilyCheckpointPropertiesNV + struct QueueFamilyCheckpointProperties2NV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueueFamilyCheckpointProperties2NV; - VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) {} - QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointProperties2NV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyCheckpointProperties2NV & + operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointProperties2NV & + operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( QueueFamilyCheckpointPropertiesNV ) - offsetof( QueueFamilyCheckpointPropertiesNV, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - QueueFamilyCheckpointPropertiesNV& operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); - return *this; + return *reinterpret_cast( this ); } - - operator VkQueueFamilyCheckpointPropertiesNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueueFamilyCheckpointPropertiesNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default; #else - bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); } - bool operator!=( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask = {}; + }; + static_assert( sizeof( QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + template <> + struct CppType + { + using Type = QueueFamilyCheckpointProperties2NV; + }; + + struct QueueFamilyCheckpointPropertiesNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueueFamilyCheckpointPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT + : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyCheckpointPropertiesNV & + operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default; +#else + bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); + } + + bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; - void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; + void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; - }; - static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -67054,937 +93811,156 @@ namespace VULKAN_HPP_NAMESPACE using Type = QueueFamilyCheckpointPropertiesNV; }; - struct QueueFamilyProperties + struct QueueFamilyGlobalPriorityPropertiesEXT { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueueFamilyGlobalPriorityPropertiesEXT; - - VULKAN_HPP_CONSTEXPR QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, - uint32_t queueCount_ = {}, - uint32_t timestampValidBits_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT - : queueFlags( queueFlags_ ) - , queueCount( queueCount_ ) - , timestampValidBits( timestampValidBits_ ) - , minImageTransferGranularity( minImageTransferGranularity_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesEXT( + uint32_t priorityCount_ = {}, + std::array const & + priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow } } ) VULKAN_HPP_NOEXCEPT + : priorityCount( priorityCount_ ) + , priorities( priorities_ ) {} - QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesEXT( QueueFamilyGlobalPriorityPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - QueueFamilyProperties& operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkQueueFamilyProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueueFamilyProperties const& ) const = default; -#else - bool operator==( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( queueFlags == rhs.queueFlags ) - && ( queueCount == rhs.queueCount ) - && ( timestampValidBits == rhs.timestampValidBits ) - && ( minImageTransferGranularity == rhs.minImageTransferGranularity ); - } - - bool operator!=( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; - uint32_t queueCount = {}; - uint32_t timestampValidBits = {}; - VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; - - }; - static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct QueueFamilyProperties2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2; - - VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : queueFamilyProperties( queueFamilyProperties_ ) + QueueFamilyGlobalPriorityPropertiesEXT( VkQueueFamilyGlobalPriorityPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyGlobalPriorityPropertiesEXT( + *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesEXT & + operator=( QueueFamilyGlobalPriorityPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyGlobalPriorityPropertiesEXT & + operator=( VkQueueFamilyGlobalPriorityPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( QueueFamilyProperties2 ) - offsetof( QueueFamilyProperties2, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - QueueFamilyProperties2& operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkQueueFamilyProperties2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( QueueFamilyProperties2 const& ) const = default; -#else - bool operator==( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( queueFamilyProperties == rhs.queueFamilyProperties ); - } - - bool operator!=( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; - - }; - static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = QueueFamilyProperties2; - }; - -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct RayTracingShaderGroupCreateInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR; - - VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, - uint32_t generalShader_ = {}, - uint32_t closestHitShader_ = {}, - uint32_t anyHitShader_ = {}, - uint32_t intersectionShader_ = {}, - const void* pShaderGroupCaptureReplayHandle_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , generalShader( generalShader_ ) - , closestHitShader( closestHitShader_ ) - , anyHitShader( anyHitShader_ ) - , intersectionShader( intersectionShader_ ) - , pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ ) - {} - - RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RayTracingShaderGroupCreateInfoKHR ) - offsetof( RayTracingShaderGroupCreateInfoKHR, pNext ) ); - return *this; - } - - RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RayTracingShaderGroupCreateInfoKHR& operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RayTracingShaderGroupCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + QueueFamilyGlobalPriorityPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + QueueFamilyGlobalPriorityPropertiesEXT & setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT { - type = type_; + priorityCount = priorityCount_; return *this; } - RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + QueueFamilyGlobalPriorityPropertiesEXT & setPriorities( + std::array priorities_ ) + VULKAN_HPP_NOEXCEPT { - generalShader = generalShader_; + priorities = priorities_; return *this; } - RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyGlobalPriorityPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - closestHitShader = closestHitShader_; - return *this; + return *reinterpret_cast( this ); } - RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyGlobalPriorityPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - anyHitShader = anyHitShader_; - return *this; + return *reinterpret_cast( this ); } - RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT - { - intersectionShader = intersectionShader_; - return *this; - } - - RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle( const void* pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT - { - pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; - return *this; - } - - - operator VkRayTracingShaderGroupCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RayTracingShaderGroupCreateInfoKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyGlobalPriorityPropertiesEXT const & ) const = default; #else - bool operator==( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyGlobalPriorityPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( generalShader == rhs.generalShader ) - && ( closestHitShader == rhs.closestHitShader ) - && ( anyHitShader == rhs.anyHitShader ) - && ( intersectionShader == rhs.intersectionShader ) - && ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) && + ( priorities == rhs.priorities ); } - bool operator!=( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyGlobalPriorityPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; - uint32_t generalShader = {}; - uint32_t closestHitShader = {}; - uint32_t anyHitShader = {}; - uint32_t intersectionShader = {}; - const void* pShaderGroupCaptureReplayHandle = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesEXT; + void * pNext = {}; + uint32_t priorityCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D + priorities = {}; }; - static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( QueueFamilyGlobalPriorityPropertiesEXT ) == sizeof( VkQueueFamilyGlobalPriorityPropertiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = RayTracingShaderGroupCreateInfoKHR; + using Type = QueueFamilyGlobalPriorityPropertiesEXT; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct RayTracingPipelineInterfaceCreateInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; - - VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPayloadSize_ = {}, - uint32_t maxAttributeSize_ = {}, - uint32_t maxCallableSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxPayloadSize( maxPayloadSize_ ) - , maxAttributeSize( maxAttributeSize_ ) - , maxCallableSize( maxCallableSize_ ) - {} - - RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) - offsetof( RayTracingPipelineInterfaceCreateInfoKHR, pNext ) ); - return *this; - } - - RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RayTracingPipelineInterfaceCreateInfoKHR& operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RayTracingPipelineInterfaceCreateInfoKHR & setMaxPayloadSize( uint32_t maxPayloadSize_ ) VULKAN_HPP_NOEXCEPT - { - maxPayloadSize = maxPayloadSize_; - return *this; - } - - RayTracingPipelineInterfaceCreateInfoKHR & setMaxAttributeSize( uint32_t maxAttributeSize_ ) VULKAN_HPP_NOEXCEPT - { - maxAttributeSize = maxAttributeSize_; - return *this; - } - - RayTracingPipelineInterfaceCreateInfoKHR & setMaxCallableSize( uint32_t maxCallableSize_ ) VULKAN_HPP_NOEXCEPT - { - maxCallableSize = maxCallableSize_; - return *this; - } - - - operator VkRayTracingPipelineInterfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxPayloadSize == rhs.maxPayloadSize ) - && ( maxAttributeSize == rhs.maxAttributeSize ) - && ( maxCallableSize == rhs.maxCallableSize ); - } - - bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; - const void* pNext = {}; - uint32_t maxPayloadSize = {}; - uint32_t maxAttributeSize = {}; - uint32_t maxCallableSize = {}; - - }; - static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = RayTracingPipelineInterfaceCreateInfoKHR; - }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct RayTracingPipelineCreateInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR; - - VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, - uint32_t groupCount_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ = {}, - uint32_t maxRecursionDepth_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , libraries( libraries_ ) - , pLibraryInterface( pLibraryInterface_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) - {} - - RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RayTracingPipelineCreateInfoKHR ) - offsetof( RayTracingPipelineCreateInfoKHR, pNext ) ); - return *this; - } - - RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RayTracingPipelineCreateInfoKHR& operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RayTracingPipelineCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT - { - stageCount = stageCount_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT - { - pStages = pStages_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT - { - groupCount = groupCount_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ ) VULKAN_HPP_NOEXCEPT - { - pGroups = pGroups_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT - { - maxRecursionDepth = maxRecursionDepth_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & libraries_ ) VULKAN_HPP_NOEXCEPT - { - libraries = libraries_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT - { - pLibraryInterface = pLibraryInterface_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT - { - layout = layout_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT - { - basePipelineHandle = basePipelineHandle_; - return *this; - } - - RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT - { - basePipelineIndex = basePipelineIndex_; - return *this; - } - - - operator VkRayTracingPipelineCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RayTracingPipelineCreateInfoKHR const& ) const = default; -#else - bool operator==( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( groupCount == rhs.groupCount ) - && ( pGroups == rhs.pGroups ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( libraries == rhs.libraries ) - && ( pLibraryInterface == rhs.pLibraryInterface ) - && ( layout == rhs.layout ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); - } - - bool operator!=( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; - uint32_t groupCount = {}; - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups = {}; - uint32_t maxRecursionDepth = {}; - VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR libraries = {}; - const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; - - }; - static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = RayTracingPipelineCreateInfoKHR; - }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - struct RayTracingShaderGroupCreateInfoNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV; - - VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, - uint32_t generalShader_ = {}, - uint32_t closestHitShader_ = {}, - uint32_t anyHitShader_ = {}, - uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , generalShader( generalShader_ ) - , closestHitShader( closestHitShader_ ) - , anyHitShader( anyHitShader_ ) - , intersectionShader( intersectionShader_ ) - {} - - RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RayTracingShaderGroupCreateInfoNV ) - offsetof( RayTracingShaderGroupCreateInfoNV, pNext ) ); - return *this; - } - - RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT - { - generalShader = generalShader_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT - { - closestHitShader = closestHitShader_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT - { - anyHitShader = anyHitShader_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT - { - intersectionShader = intersectionShader_; - return *this; - } - - - operator VkRayTracingShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RayTracingShaderGroupCreateInfoNV const& ) const = default; -#else - bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( generalShader == rhs.generalShader ) - && ( closestHitShader == rhs.closestHitShader ) - && ( anyHitShader == rhs.anyHitShader ) - && ( intersectionShader == rhs.intersectionShader ); - } - - bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; - uint32_t generalShader = {}; - uint32_t closestHitShader = {}; - uint32_t anyHitShader = {}; - uint32_t intersectionShader = {}; - - }; - static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = RayTracingShaderGroupCreateInfoNV; - }; - - struct RayTracingPipelineCreateInfoNV - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV; - - VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, - uint32_t groupCount_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = {}, - uint32_t maxRecursionDepth_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) - {} - - RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RayTracingPipelineCreateInfoNV ) - offsetof( RayTracingPipelineCreateInfoNV, pNext ) ); - return *this; - } - - RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT - { - stageCount = stageCount_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT - { - pStages = pStages_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT - { - groupCount = groupCount_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT - { - pGroups = pGroups_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT - { - maxRecursionDepth = maxRecursionDepth_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT - { - layout = layout_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT - { - basePipelineHandle = basePipelineHandle_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT - { - basePipelineIndex = basePipelineIndex_; - return *this; - } - - - operator VkRayTracingPipelineCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RayTracingPipelineCreateInfoNV const& ) const = default; -#else - bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( groupCount == rhs.groupCount ) - && ( pGroups == rhs.pGroups ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( layout == rhs.layout ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); - } - - bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; - uint32_t groupCount = {}; - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups = {}; - uint32_t maxRecursionDepth = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; - - }; - static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = RayTracingPipelineCreateInfoNV; - }; - - struct RefreshCycleDurationGOOGLE - { - - - VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT - : refreshDuration( refreshDuration_ ) - {} - - RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RefreshCycleDurationGOOGLE& operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkRefreshCycleDurationGOOGLE const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RefreshCycleDurationGOOGLE const& ) const = default; -#else - bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( refreshDuration == rhs.refreshDuration ); - } - - bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint64_t refreshDuration = {}; - - }; - static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct RenderPassAttachmentBeginInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo; - VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT : attachmentCount( attachmentCount_ ) , pAttachments( pAttachments_ ) {} - RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassAttachmentBeginInfo ) - offsetof( RenderPassAttachmentBeginInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : RenderPassAttachmentBeginInfo( *reinterpret_cast( &rhs ) ) + {} - RenderPassAttachmentBeginInfo& operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) + : attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & + operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassAttachmentBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -67996,1170 +93972,147 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT + RenderPassAttachmentBeginInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } - - operator VkRenderPassAttachmentBeginInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassAttachmentBeginInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default; #else - bool operator==( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ); } - bool operator!=( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; - const void* pNext = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; }; - static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = RenderPassAttachmentBeginInfo; }; - - struct RenderPassBeginInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo; - - VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, - uint32_t clearValueCount_ = {}, - const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT - : renderPass( renderPass_ ) - , framebuffer( framebuffer_ ) - , renderArea( renderArea_ ) - , clearValueCount( clearValueCount_ ) - , pClearValues( pClearValues_ ) - {} - - RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassBeginInfo ) - offsetof( RenderPassBeginInfo, pNext ) ); - return *this; - } - - RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT - { - renderPass = renderPass_; - return *this; - } - - RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT - { - framebuffer = framebuffer_; - return *this; - } - - RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT - { - renderArea = renderArea_; - return *this; - } - - RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT - { - clearValueCount = clearValueCount_; - return *this; - } - - RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT - { - pClearValues = pClearValues_; - return *this; - } - - - operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassBeginInfo const& ) const = default; -#else - bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( renderPass == rhs.renderPass ) - && ( framebuffer == rhs.framebuffer ) - && ( renderArea == rhs.renderArea ) - && ( clearValueCount == rhs.clearValueCount ) - && ( pClearValues == rhs.pClearValues ); - } - - bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; - VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; - uint32_t clearValueCount = {}; - const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {}; - - }; - static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = RenderPassBeginInfo; - }; - - struct SubpassDescription - { - - - VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t inputAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, - uint32_t preserveAttachmentCount_ = {}, - const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , inputAttachmentCount( inputAttachmentCount_ ) - , pInputAttachments( pInputAttachments_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pResolveAttachments( pResolveAttachments_ ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( preserveAttachmentCount_ ) - , pPreserveAttachments( pPreserveAttachments_ ) - {} - - SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassDescription& operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - inputAttachmentCount = inputAttachmentCount_; - return *this; - } - - SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pInputAttachments = pInputAttachments_; - return *this; - } - - SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - colorAttachmentCount = colorAttachmentCount_; - return *this; - } - - SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pColorAttachments = pColorAttachments_; - return *this; - } - - SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pResolveAttachments = pResolveAttachments_; - return *this; - } - - SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT - { - pDepthStencilAttachment = pDepthStencilAttachment_; - return *this; - } - - SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - preserveAttachmentCount = preserveAttachmentCount_; - return *this; - } - - SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pPreserveAttachments = pPreserveAttachments_; - return *this; - } - - - operator VkSubpassDescription const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassDescription const& ) const = default; -#else - bool operator==( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( flags == rhs.flags ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( inputAttachmentCount == rhs.inputAttachmentCount ) - && ( pInputAttachments == rhs.pInputAttachments ) - && ( colorAttachmentCount == rhs.colorAttachmentCount ) - && ( pColorAttachments == rhs.pColorAttachments ) - && ( pResolveAttachments == rhs.pResolveAttachments ) - && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) - && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) - && ( pPreserveAttachments == rhs.pPreserveAttachments ); - } - - bool operator!=( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t inputAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment = {}; - uint32_t preserveAttachmentCount = {}; - const uint32_t* pPreserveAttachments = {}; - - }; - static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SubpassDependency - { - - - VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {}, - uint32_t dstSubpass_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubpass( srcSubpass_ ) - , dstSubpass( dstSubpass_ ) - , srcStageMask( srcStageMask_ ) - , dstStageMask( dstStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , dependencyFlags( dependencyFlags_ ) - {} - - SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassDependency& operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT - { - srcSubpass = srcSubpass_; - return *this; - } - - SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT - { - dstSubpass = dstSubpass_; - return *this; - } - - SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT - { - srcStageMask = srcStageMask_; - return *this; - } - - SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT - { - dstStageMask = dstStageMask_; - return *this; - } - - SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - srcAccessMask = srcAccessMask_; - return *this; - } - - SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - dstAccessMask = dstAccessMask_; - return *this; - } - - SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT - { - dependencyFlags = dependencyFlags_; - return *this; - } - - - operator VkSubpassDependency const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassDependency const& ) const = default; -#else - bool operator==( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( srcSubpass == rhs.srcSubpass ) - && ( dstSubpass == rhs.dstSubpass ) - && ( srcStageMask == rhs.srcStageMask ) - && ( dstStageMask == rhs.dstStageMask ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( dependencyFlags == rhs.dependencyFlags ); - } - - bool operator!=( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint32_t srcSubpass = {}; - uint32_t dstSubpass = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; - - }; - static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct RenderPassCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo; - - VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = {}, - uint32_t subpassCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = {}, - uint32_t dependencyCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , subpassCount( subpassCount_ ) - , pSubpasses( pSubpasses_ ) - , dependencyCount( dependencyCount_ ) - , pDependencies( pDependencies_ ) - {} - - RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassCreateInfo ) - offsetof( RenderPassCreateInfo, pNext ) ); - return *this; - } - - RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RenderPassCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - attachmentCount = attachmentCount_; - return *this; - } - - RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pAttachments = pAttachments_; - return *this; - } - - RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT - { - subpassCount = subpassCount_; - return *this; - } - - RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT - { - pSubpasses = pSubpasses_; - return *this; - } - - RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT - { - dependencyCount = dependencyCount_; - return *this; - } - - RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT - { - pDependencies = pDependencies_; - return *this; - } - - - operator VkRenderPassCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassCreateInfo const& ) const = default; -#else - bool operator==( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( subpassCount == rhs.subpassCount ) - && ( pSubpasses == rhs.pSubpasses ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pDependencies == rhs.pDependencies ); - } - - bool operator!=( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments = {}; - uint32_t subpassCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses = {}; - uint32_t dependencyCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies = {}; - - }; - static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = RenderPassCreateInfo; - }; - - struct SubpassDescription2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2; - - VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t viewMask_ = {}, - uint32_t inputAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, - uint32_t preserveAttachmentCount_ = {}, - const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , viewMask( viewMask_ ) - , inputAttachmentCount( inputAttachmentCount_ ) - , pInputAttachments( pInputAttachments_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pResolveAttachments( pResolveAttachments_ ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( preserveAttachmentCount_ ) - , pPreserveAttachments( pPreserveAttachments_ ) - {} - - SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SubpassDescription2 ) - offsetof( SubpassDescription2, pNext ) ); - return *this; - } - - SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassDescription2& operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT - { - viewMask = viewMask_; - return *this; - } - - SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - inputAttachmentCount = inputAttachmentCount_; - return *this; - } - - SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pInputAttachments = pInputAttachments_; - return *this; - } - - SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - colorAttachmentCount = colorAttachmentCount_; - return *this; - } - - SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pColorAttachments = pColorAttachments_; - return *this; - } - - SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pResolveAttachments = pResolveAttachments_; - return *this; - } - - SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT - { - pDepthStencilAttachment = pDepthStencilAttachment_; - return *this; - } - - SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - preserveAttachmentCount = preserveAttachmentCount_; - return *this; - } - - SubpassDescription2 & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pPreserveAttachments = pPreserveAttachments_; - return *this; - } - - - operator VkSubpassDescription2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassDescription2 const& ) const = default; -#else - bool operator==( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( viewMask == rhs.viewMask ) - && ( inputAttachmentCount == rhs.inputAttachmentCount ) - && ( pInputAttachments == rhs.pInputAttachments ) - && ( colorAttachmentCount == rhs.colorAttachmentCount ) - && ( pColorAttachments == rhs.pColorAttachments ) - && ( pResolveAttachments == rhs.pResolveAttachments ) - && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) - && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) - && ( pPreserveAttachments == rhs.pPreserveAttachments ); - } - - bool operator!=( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t viewMask = {}; - uint32_t inputAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment = {}; - uint32_t preserveAttachmentCount = {}; - const uint32_t* pPreserveAttachments = {}; - - }; - static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SubpassDescription2; - }; - - struct SubpassDependency2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2; - - VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, - uint32_t dstSubpass_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, - int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubpass( srcSubpass_ ) - , dstSubpass( dstSubpass_ ) - , srcStageMask( srcStageMask_ ) - , dstStageMask( dstStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , dependencyFlags( dependencyFlags_ ) - , viewOffset( viewOffset_ ) - {} - - SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SubpassDependency2 ) - offsetof( SubpassDependency2, pNext ) ); - return *this; - } - - SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassDependency2& operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassDependency2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT - { - srcSubpass = srcSubpass_; - return *this; - } - - SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT - { - dstSubpass = dstSubpass_; - return *this; - } - - SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT - { - srcStageMask = srcStageMask_; - return *this; - } - - SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT - { - dstStageMask = dstStageMask_; - return *this; - } - - SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - srcAccessMask = srcAccessMask_; - return *this; - } - - SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - dstAccessMask = dstAccessMask_; - return *this; - } - - SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT - { - dependencyFlags = dependencyFlags_; - return *this; - } - - SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT - { - viewOffset = viewOffset_; - return *this; - } - - - operator VkSubpassDependency2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassDependency2 const& ) const = default; -#else - bool operator==( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcSubpass == rhs.srcSubpass ) - && ( dstSubpass == rhs.dstSubpass ) - && ( srcStageMask == rhs.srcStageMask ) - && ( dstStageMask == rhs.dstStageMask ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( dependencyFlags == rhs.dependencyFlags ) - && ( viewOffset == rhs.viewOffset ); - } - - bool operator!=( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; - const void* pNext = {}; - uint32_t srcSubpass = {}; - uint32_t dstSubpass = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; - int32_t viewOffset = {}; - - }; - static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SubpassDependency2; - }; - - struct RenderPassCreateInfo2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2; - - VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ = {}, - uint32_t subpassCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ = {}, - uint32_t dependencyCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ = {}, - uint32_t correlatedViewMaskCount_ = {}, - const uint32_t* pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , subpassCount( subpassCount_ ) - , pSubpasses( pSubpasses_ ) - , dependencyCount( dependencyCount_ ) - , pDependencies( pDependencies_ ) - , correlatedViewMaskCount( correlatedViewMaskCount_ ) - , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) - {} - - RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassCreateInfo2 ) - offsetof( RenderPassCreateInfo2, pNext ) ); - return *this; - } - - RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RenderPassCreateInfo2& operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RenderPassCreateInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - attachmentCount = attachmentCount_; - return *this; - } - - RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pAttachments = pAttachments_; - return *this; - } - - RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT - { - subpassCount = subpassCount_; - return *this; - } - - RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ ) VULKAN_HPP_NOEXCEPT - { - pSubpasses = pSubpasses_; - return *this; - } - - RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT - { - dependencyCount = dependencyCount_; - return *this; - } - - RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ ) VULKAN_HPP_NOEXCEPT - { - pDependencies = pDependencies_; - return *this; - } - - RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT - { - correlatedViewMaskCount = correlatedViewMaskCount_; - return *this; - } - - RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT - { - pCorrelatedViewMasks = pCorrelatedViewMasks_; - return *this; - } - - - operator VkRenderPassCreateInfo2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassCreateInfo2 const& ) const = default; -#else - bool operator==( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( subpassCount == rhs.subpassCount ) - && ( pSubpasses == rhs.pSubpasses ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pDependencies == rhs.pDependencies ) - && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) - && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); - } - - bool operator!=( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments = {}; - uint32_t subpassCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses = {}; - uint32_t dependencyCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies = {}; - uint32_t correlatedViewMaskCount = {}; - const uint32_t* pCorrelatedViewMasks = {}; - - }; - static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = RenderPassCreateInfo2; - }; + using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; struct RenderPassFragmentDensityMapCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; - VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( + VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) {} - RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( + RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : RenderPassFragmentDensityMapCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & + operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassFragmentDensityMapCreateInfoEXT & + operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) - offsetof( RenderPassFragmentDensityMapCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RenderPassFragmentDensityMapCreateInfoEXT& operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT + RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( + VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT { fragmentDensityMapAttachment = fragmentDensityMapAttachment_; return *this; } - - operator VkRenderPassFragmentDensityMapCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default; #else - bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); } - bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; - }; - static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == + sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -69169,108 +94122,134 @@ namespace VULKAN_HPP_NAMESPACE struct RenderPassInputAttachmentAspectCreateInfo { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderPassInputAttachmentAspectCreateInfo; - VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {}, - const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( + uint32_t aspectReferenceCount_ = {}, + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT : aspectReferenceCount( aspectReferenceCount_ ) , pAspectReferences( pAspectReferences_ ) {} - RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( + RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : RenderPassInputAttachmentAspectCreateInfo( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassInputAttachmentAspectCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + aspectReferences_ ) + : aspectReferenceCount( static_cast( aspectReferences_.size() ) ) + , pAspectReferences( aspectReferences_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & + operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassInputAttachmentAspectCreateInfo & + operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassInputAttachmentAspectCreateInfo ) - offsetof( RenderPassInputAttachmentAspectCreateInfo, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - RenderPassInputAttachmentAspectCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo & + setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT { aspectReferenceCount = aspectReferenceCount_; return *this; } - RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ ) VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT { pAspectReferences = pAspectReferences_; return *this; } - - operator VkRenderPassInputAttachmentAspectCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + aspectReferences_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + aspectReferenceCount = static_cast( aspectReferences_.size() ); + pAspectReferences = aspectReferences_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default; #else - bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( aspectReferenceCount == rhs.aspectReferenceCount ) - && ( pAspectReferences == rhs.pAspectReferences ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) && + ( pAspectReferences == rhs.pAspectReferences ); } - bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; - const void* pNext = {}; - uint32_t aspectReferenceCount = {}; - const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + const void * pNext = {}; + uint32_t aspectReferenceCount = {}; + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {}; }; - static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == + sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = RenderPassInputAttachmentAspectCreateInfo; }; + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; struct RenderPassMultiviewCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo; - VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, - const uint32_t* pViewMasks_ = {}, - uint32_t dependencyCount_ = {}, - const int32_t* pViewOffsets_ = {}, - uint32_t correlationMaskCount_ = {}, - const uint32_t* pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, + const uint32_t * pViewMasks_ = {}, + uint32_t dependencyCount_ = {}, + const int32_t * pViewOffsets_ = {}, + uint32_t correlationMaskCount_ = {}, + const uint32_t * pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT : subpassCount( subpassCount_ ) , pViewMasks( pViewMasks_ ) , dependencyCount( dependencyCount_ ) @@ -69279,24 +94258,38 @@ namespace VULKAN_HPP_NAMESPACE , pCorrelationMasks( pCorrelationMasks_ ) {} - RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassMultiviewCreateInfo ) - offsetof( RenderPassMultiviewCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : RenderPassMultiviewCreateInfo( *reinterpret_cast( &rhs ) ) + {} - RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ = {} ) + : subpassCount( static_cast( viewMasks_.size() ) ) + , pViewMasks( viewMasks_.data() ) + , dependencyCount( static_cast( viewOffsets_.size() ) ) + , pViewOffsets( viewOffsets_.data() ) + , correlationMaskCount( static_cast( correlationMasks_.size() ) ) + , pCorrelationMasks( correlationMasks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & + operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassMultiviewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -69308,109 +94301,139 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t* pViewMasks_ ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT { pViewMasks = pViewMasks_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( viewMasks_.size() ); + pViewMasks = viewMasks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT { dependencyCount = dependencyCount_; return *this; } - RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t* pViewOffsets_ ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT { pViewOffsets = pViewOffsets_; return *this; } +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( viewOffsets_.size() ); + pViewOffsets = viewOffsets_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT { correlationMaskCount = correlationMaskCount_; return *this; } - RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t* pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT { pCorrelationMasks = pCorrelationMasks_; return *this; } - - operator VkRenderPassMultiviewCreateInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setCorrelationMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + correlationMaskCount = static_cast( correlationMasks_.size() ); + pCorrelationMasks = correlationMasks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassMultiviewCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default; #else - bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( subpassCount == rhs.subpassCount ) - && ( pViewMasks == rhs.pViewMasks ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pViewOffsets == rhs.pViewOffsets ) - && ( correlationMaskCount == rhs.correlationMaskCount ) - && ( pCorrelationMasks == rhs.pCorrelationMasks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && + ( pViewMasks == rhs.pViewMasks ) && ( dependencyCount == rhs.dependencyCount ) && + ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) && + ( pCorrelationMasks == rhs.pCorrelationMasks ); } - bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; - const void* pNext = {}; - uint32_t subpassCount = {}; - const uint32_t* pViewMasks = {}; - uint32_t dependencyCount = {}; - const int32_t* pViewOffsets = {}; - uint32_t correlationMaskCount = {}; - const uint32_t* pCorrelationMasks = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; + const void * pNext = {}; + uint32_t subpassCount = {}; + const uint32_t * pViewMasks = {}; + uint32_t dependencyCount = {}; + const int32_t * pViewOffsets = {}; + uint32_t correlationMaskCount = {}; + const uint32_t * pCorrelationMasks = {}; }; - static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = RenderPassMultiviewCreateInfo; }; + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; struct SubpassSampleLocationsEXT { - - - VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( + uint32_t subpassIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT : subpassIndex( subpassIndex_ ) , sampleLocationsInfo( sampleLocationsInfo_ ) {} - SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR + SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & + operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -69420,155 +94443,193 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + SubpassSampleLocationsEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } - - operator VkSubpassSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassSampleLocationsEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassSampleLocationsEXT const & ) const = default; #else - bool operator==( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( subpassIndex == rhs.subpassIndex ) - && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); } - bool operator!=( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t subpassIndex = {}; + uint32_t subpassIndex = {}; VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; - }; - static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct RenderPassSampleLocationsBeginInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderPassSampleLocationsBeginInfoEXT; - VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = {}, - uint32_t postSubpassSampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( + uint32_t attachmentInitialSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, + uint32_t postSubpassSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) {} - RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassSampleLocationsBeginInfoEXT ) - offsetof( RenderPassSampleLocationsBeginInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : RenderPassSampleLocationsBeginInfoEXT( + *reinterpret_cast( &rhs ) ) + {} - RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentInitialSampleLocations_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + postSubpassSampleLocations_ = {} ) + : attachmentInitialSampleLocationsCount( static_cast( attachmentInitialSampleLocations_.size() ) ) + , pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ) + , postSubpassSampleLocationsCount( static_cast( postSubpassSampleLocations_.size() ) ) + , pPostSubpassSampleLocations( postSubpassSampleLocations_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassSampleLocationsBeginInfoEXT & + operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassSampleLocationsBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & + setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT { attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; return *this; } - RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) + VULKAN_HPP_NOEXCEPT { pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; return *this; } - RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = static_cast( attachmentInitialSampleLocations_.size() ); + pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassSampleLocationsBeginInfoEXT & + setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT { postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; return *this; } - RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT { pPostSubpassSampleLocations = pPostSubpassSampleLocations_; return *this; } - - operator VkRenderPassSampleLocationsBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + postSubpassSampleLocationsCount = static_cast( postSubpassSampleLocations_.size() ); + pPostSubpassSampleLocations = postSubpassSampleLocations_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default; #else - bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) - && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) - && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) - && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) && + ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) && + ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && + ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); } - bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; - const void* pNext = {}; - uint32_t attachmentInitialSampleLocationsCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations = {}; - uint32_t postSubpassSampleLocationsCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + const void * pNext = {}; + uint32_t attachmentInitialSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {}; + uint32_t postSubpassSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {}; }; - static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -69578,80 +94639,79 @@ namespace VULKAN_HPP_NAMESPACE struct RenderPassTransformBeginInfoQCOM { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM; - VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT - : transform( transform_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT : transform( transform_ ) {} - RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( RenderPassTransformBeginInfoQCOM ) - offsetof( RenderPassTransformBeginInfoQCOM, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : RenderPassTransformBeginInfoQCOM( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassTransformBeginInfoQCOM& operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & + operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - RenderPassTransformBeginInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } - - operator VkRenderPassTransformBeginInfoQCOM const&() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( RenderPassTransformBeginInfoQCOM const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default; #else - bool operator==( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( transform == rhs.transform ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); } - bool operator!=( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; }; - static_assert( sizeof( RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -69659,274 +94719,47 @@ namespace VULKAN_HPP_NAMESPACE using Type = RenderPassTransformBeginInfoQCOM; }; - struct SamplerCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; - - VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - float mipLodBias_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, - float maxAnisotropy_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, - VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - float minLod_ = {}, - float maxLod_ = {}, - VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, - VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , magFilter( magFilter_ ) - , minFilter( minFilter_ ) - , mipmapMode( mipmapMode_ ) - , addressModeU( addressModeU_ ) - , addressModeV( addressModeV_ ) - , addressModeW( addressModeW_ ) - , mipLodBias( mipLodBias_ ) - , anisotropyEnable( anisotropyEnable_ ) - , maxAnisotropy( maxAnisotropy_ ) - , compareEnable( compareEnable_ ) - , compareOp( compareOp_ ) - , minLod( minLod_ ) - , maxLod( maxLod_ ) - , borderColor( borderColor_ ) - , unnormalizedCoordinates( unnormalizedCoordinates_ ) - {} - - SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SamplerCreateInfo ) - offsetof( SamplerCreateInfo, pNext ) ); - return *this; - } - - SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SamplerCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT - { - magFilter = magFilter_; - return *this; - } - - SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT - { - minFilter = minFilter_; - return *this; - } - - SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT - { - mipmapMode = mipmapMode_; - return *this; - } - - SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT - { - addressModeU = addressModeU_; - return *this; - } - - SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT - { - addressModeV = addressModeV_; - return *this; - } - - SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT - { - addressModeW = addressModeW_; - return *this; - } - - SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT - { - mipLodBias = mipLodBias_; - return *this; - } - - SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT - { - anisotropyEnable = anisotropyEnable_; - return *this; - } - - SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT - { - maxAnisotropy = maxAnisotropy_; - return *this; - } - - SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT - { - compareEnable = compareEnable_; - return *this; - } - - SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT - { - compareOp = compareOp_; - return *this; - } - - SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT - { - minLod = minLod_; - return *this; - } - - SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT - { - maxLod = maxLod_; - return *this; - } - - SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT - { - borderColor = borderColor_; - return *this; - } - - SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT - { - unnormalizedCoordinates = unnormalizedCoordinates_; - return *this; - } - - - operator VkSamplerCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerCreateInfo const& ) const = default; -#else - bool operator==( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( magFilter == rhs.magFilter ) - && ( minFilter == rhs.minFilter ) - && ( mipmapMode == rhs.mipmapMode ) - && ( addressModeU == rhs.addressModeU ) - && ( addressModeV == rhs.addressModeV ) - && ( addressModeW == rhs.addressModeW ) - && ( mipLodBias == rhs.mipLodBias ) - && ( anisotropyEnable == rhs.anisotropyEnable ) - && ( maxAnisotropy == rhs.maxAnisotropy ) - && ( compareEnable == rhs.compareEnable ) - && ( compareOp == rhs.compareOp ) - && ( minLod == rhs.minLod ) - && ( maxLod == rhs.maxLod ) - && ( borderColor == rhs.borderColor ) - && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); - } - - bool operator!=( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - float mipLodBias = {}; - VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; - float maxAnisotropy = {}; - VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; - VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; - float minLod = {}; - float maxLod = {}; - VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; - VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; - - }; - static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SamplerCreateInfo; - }; - struct SamplerCustomBorderColorCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSamplerCustomBorderColorCreateInfoEXT; - SamplerCustomBorderColorCreateInfoEXT( VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + SamplerCustomBorderColorCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT : customBorderColor( customBorderColor_ ) , format( format_ ) {} - SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SamplerCustomBorderColorCreateInfoEXT ) - offsetof( SamplerCustomBorderColorCreateInfoEXT, pNext ) ); - return *this; - } + SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SamplerCustomBorderColorCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SamplerCustomBorderColorCreateInfoEXT& operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerCustomBorderColorCreateInfoEXT & + operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCustomBorderColorCreateInfoEXT & + operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SamplerCustomBorderColorCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SamplerCustomBorderColorCreateInfoEXT & setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT + SamplerCustomBorderColorCreateInfoEXT & + setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT { customBorderColor = customBorderColor_; return *this; @@ -69938,29 +94771,26 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkSamplerCustomBorderColorCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; }; - static_assert( sizeof( SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -69970,787 +94800,376 @@ namespace VULKAN_HPP_NAMESPACE struct SamplerReductionModeCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo; - VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = + VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT : reductionMode( reductionMode_ ) {} - SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SamplerReductionModeCreateInfo ) - offsetof( SamplerReductionModeCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SamplerReductionModeCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SamplerReductionModeCreateInfo& operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & + operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SamplerReductionModeCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT + SamplerReductionModeCreateInfo & + setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT { reductionMode = reductionMode_; return *this; } - - operator VkSamplerReductionModeCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerReductionModeCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default; #else - bool operator==( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( reductionMode == rhs.reductionMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode ); } - bool operator!=( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = + VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; }; - static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = SamplerReductionModeCreateInfo; }; - - struct SamplerYcbcrConversionCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo; - - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, - VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, - VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT - : format( format_ ) - , ycbcrModel( ycbcrModel_ ) - , ycbcrRange( ycbcrRange_ ) - , components( components_ ) - , xChromaOffset( xChromaOffset_ ) - , yChromaOffset( yChromaOffset_ ) - , chromaFilter( chromaFilter_ ) - , forceExplicitReconstruction( forceExplicitReconstruction_ ) - {} - - SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SamplerYcbcrConversionCreateInfo ) - offsetof( SamplerYcbcrConversionCreateInfo, pNext ) ); - return *this; - } - - SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT - { - format = format_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT - { - ycbcrModel = ycbcrModel_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT - { - ycbcrRange = ycbcrRange_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT - { - components = components_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT - { - xChromaOffset = xChromaOffset_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT - { - yChromaOffset = yChromaOffset_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT - { - chromaFilter = chromaFilter_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT - { - forceExplicitReconstruction = forceExplicitReconstruction_; - return *this; - } - - - operator VkSamplerYcbcrConversionCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerYcbcrConversionCreateInfo const& ) const = default; -#else - bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( ycbcrModel == rhs.ycbcrModel ) - && ( ycbcrRange == rhs.ycbcrRange ) - && ( components == rhs.components ) - && ( xChromaOffset == rhs.xChromaOffset ) - && ( yChromaOffset == rhs.yChromaOffset ) - && ( chromaFilter == rhs.chromaFilter ) - && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); - } - - bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; - VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; - VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; - - }; - static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SamplerYcbcrConversionCreateInfo; - }; + using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; struct SamplerYcbcrConversionImageFormatProperties { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSamplerYcbcrConversionImageFormatProperties; - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( + uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) {} - SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( + SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionImageFormatProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionImageFormatProperties & + operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionImageFormatProperties & + operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SamplerYcbcrConversionImageFormatProperties ) - offsetof( SamplerYcbcrConversionImageFormatProperties, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; - } - - SamplerYcbcrConversionImageFormatProperties& operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSamplerYcbcrConversionImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerYcbcrConversionImageFormatProperties const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default; #else - bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); } - bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; - void* pNext = {}; - uint32_t combinedImageSamplerDescriptorCount = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + void * pNext = {}; + uint32_t combinedImageSamplerDescriptorCount = {}; }; - static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == + sizeof( VkSamplerYcbcrConversionImageFormatProperties ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = SamplerYcbcrConversionImageFormatProperties; }; + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; struct SamplerYcbcrConversionInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo; - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT : conversion( conversion_ ) {} - SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SamplerYcbcrConversionInfo ) - offsetof( SamplerYcbcrConversionInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SamplerYcbcrConversionInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & + operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SamplerYcbcrConversionInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionInfo & + setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT { conversion = conversion_; return *this; } - - operator VkSamplerYcbcrConversionInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SamplerYcbcrConversionInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default; #else - bool operator==( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( conversion == rhs.conversion ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion ); } - bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; - }; - static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = SamplerYcbcrConversionInfo; }; + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; - struct SemaphoreCreateInfo +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ScreenSurfaceCreateInfoQNX { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX; - VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, + struct _screen_context * context_ = {}, + struct _screen_window * window_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) + , context( context_ ) + , window( window_ ) {} - SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenSurfaceCreateInfoQNX( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & + operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SemaphoreCreateInfo ) - offsetof( SemaphoreCreateInfo, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + ScreenSurfaceCreateInfoQNX & + setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - - operator VkSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT + ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + context = context_; + return *this; } - operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + window = window_; + return *this; } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreCreateInfo const& ) const = default; -#else - bool operator==( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); + return *reinterpret_cast( this ); } - bool operator!=( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default; +# else + bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) && + ( window == rhs.window ); + } + + bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {}; + struct _screen_context * context = {}; + struct _screen_window * window = {}; }; - static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ScreenSurfaceCreateInfoQNX ) == sizeof( VkScreenSurfaceCreateInfoQNX ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> - struct CppType + struct CppType { - using Type = SemaphoreCreateInfo; - }; - - struct SemaphoreGetFdInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR; - - VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : semaphore( semaphore_ ) - , handleType( handleType_ ) - {} - - SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SemaphoreGetFdInfoKHR ) - offsetof( SemaphoreGetFdInfoKHR, pNext ) ); - return *this; - } - - SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } - - SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - - operator VkSemaphoreGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreGetFdInfoKHR const& ) const = default; -#else - bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - - }; - static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SemaphoreGetFdInfoKHR; - }; - -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SemaphoreGetWin32HandleInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR; - - VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT - : semaphore( semaphore_ ) - , handleType( handleType_ ) - {} - - SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SemaphoreGetWin32HandleInfoKHR ) - offsetof( SemaphoreGetWin32HandleInfoKHR, pNext ) ); - return *this; - } - - SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } - - SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - - operator VkSemaphoreGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreGetWin32HandleInfoKHR const& ) const = default; -#else - bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - - }; - static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SemaphoreGetWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct SemaphoreSignalInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo; - - VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - uint64_t value_ = {} ) VULKAN_HPP_NOEXCEPT - : semaphore( semaphore_ ) - , value( value_ ) - {} - - SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SemaphoreSignalInfo ) - offsetof( SemaphoreSignalInfo, pNext ) ); - return *this; - } - - SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SemaphoreSignalInfo& operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SemaphoreSignalInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } - - SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT - { - value = value_; - return *this; - } - - - operator VkSemaphoreSignalInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreSignalInfo const& ) const = default; -#else - bool operator==( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( value == rhs.value ); - } - - bool operator!=( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - uint64_t value = {}; - - }; - static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SemaphoreSignalInfo; + using Type = ScreenSurfaceCreateInfoQNX; }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ struct SemaphoreTypeCreateInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo; - VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, - uint64_t initialValue_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( + VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, + uint64_t initialValue_ = {} ) VULKAN_HPP_NOEXCEPT : semaphoreType( semaphoreType_ ) , initialValue( initialValue_ ) {} - SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SemaphoreTypeCreateInfo ) - offsetof( SemaphoreTypeCreateInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SemaphoreTypeCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SemaphoreTypeCreateInfo& operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & + operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SemaphoreTypeCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -70768,45 +95187,39 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkSemaphoreTypeCreateInfo const&() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreTypeCreateInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default; #else - bool operator==( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphoreType == rhs.semaphoreType ) - && ( initialValue == rhs.initialValue ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && + ( initialValue == rhs.initialValue ); } - bool operator!=( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; - uint64_t initialValue = {}; - + uint64_t initialValue = {}; }; - static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), "struct and wrapper have different size!" ); + static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -70814,136 +95227,27 @@ namespace VULKAN_HPP_NAMESPACE { using Type = SemaphoreTypeCreateInfo; }; - - struct SemaphoreWaitInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo; - - VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, - uint32_t semaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ = {}, - const uint64_t* pValues_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , semaphoreCount( semaphoreCount_ ) - , pSemaphores( pSemaphores_ ) - , pValues( pValues_ ) - {} - - SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SemaphoreWaitInfo ) - offsetof( SemaphoreWaitInfo, pNext ) ); - return *this; - } - - SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SemaphoreWaitInfo& operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SemaphoreWaitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - semaphoreCount = semaphoreCount_; - return *this; - } - - SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT - { - pSemaphores = pSemaphores_; - return *this; - } - - SemaphoreWaitInfo & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT - { - pValues = pValues_; - return *this; - } - - - operator VkSemaphoreWaitInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SemaphoreWaitInfo const& ) const = default; -#else - bool operator==( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( semaphoreCount == rhs.semaphoreCount ) - && ( pSemaphores == rhs.pSemaphores ) - && ( pValues == rhs.pValues ); - } - - bool operator!=( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; - uint32_t semaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores = {}; - const uint64_t* pValues = {}; - - }; - static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SemaphoreWaitInfo; - }; + using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; struct SetStateFlagsIndirectCommandNV { +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data( data_ ) {} - - VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT - : data( data_ ) - {} + VULKAN_HPP_CONSTEXPR + SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SetStateFlagsIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SetStateFlagsIndirectCommandNV& operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & + operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -70953,220 +95257,117 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkSetStateFlagsIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SetStateFlagsIndirectCommandNV const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default; #else - bool operator==( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( data == rhs.data ); } - bool operator!=( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: uint32_t data = {}; - - }; - static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ShaderModuleCreateInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo; - - VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, - size_t codeSize_ = {}, - const uint32_t* pCode_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , codeSize( codeSize_ ) - , pCode( pCode_ ) - {} - - ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ShaderModuleCreateInfo ) - offsetof( ShaderModuleCreateInfo, pNext ) ); - return *this; - } - - ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT - { - codeSize = codeSize_; - return *this; - } - - ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT - { - pCode = pCode_; - return *this; - } - - - operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShaderModuleCreateInfo const& ) const = default; -#else - bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( codeSize == rhs.codeSize ) - && ( pCode == rhs.pCode ); - } - - bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; - size_t codeSize = {}; - const uint32_t* pCode = {}; - - }; - static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ShaderModuleCreateInfo; }; + static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ShaderModuleValidationCacheCreateInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eShaderModuleValidationCacheCreateInfoEXT; - VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT : validationCache( validationCache_ ) {} - ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( + ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : ShaderModuleValidationCacheCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & + operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleValidationCacheCreateInfoEXT & + operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) - offsetof( ShaderModuleValidationCacheCreateInfoEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT + ShaderModuleValidationCacheCreateInfoEXT & + setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT { validationCache = validationCache_; return *this; } - - operator VkShaderModuleValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default; #else - bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( validationCache == rhs.validationCache ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache ); } - bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; - }; - static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == + sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -71176,13 +95377,12 @@ namespace VULKAN_HPP_NAMESPACE struct ShaderResourceUsageAMD { - - - VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, - uint32_t numUsedSgprs_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, + uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, - size_t ldsUsageSizeInBytes_ = {}, - size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT + size_t ldsUsageSizeInBytes_ = {}, + size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT : numUsedVgprs( numUsedVgprs_ ) , numUsedSgprs( numUsedSgprs_ ) , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ) @@ -71190,71 +95390,71 @@ namespace VULKAN_HPP_NAMESPACE , scratchMemUsageInBytes( scratchMemUsageInBytes_ ) {} - ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ShaderResourceUsageAMD& operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderResourceUsageAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderResourceUsageAMD & + operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkShaderResourceUsageAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShaderResourceUsageAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderResourceUsageAMD const & ) const = default; #else - bool operator==( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( numUsedVgprs == rhs.numUsedVgprs ) - && ( numUsedSgprs == rhs.numUsedSgprs ) - && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) - && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) - && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); + return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && + ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) && + ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && + ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); } - bool operator!=( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t numUsedVgprs = {}; - uint32_t numUsedSgprs = {}; + uint32_t numUsedVgprs = {}; + uint32_t numUsedSgprs = {}; uint32_t ldsSizePerLocalWorkGroup = {}; - size_t ldsUsageSizeInBytes = {}; - size_t scratchMemUsageInBytes = {}; - + size_t ldsUsageSizeInBytes = {}; + size_t scratchMemUsageInBytes = {}; }; - static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" ); + static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct ShaderStatisticsInfoAMD { - - - VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, - VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, - uint32_t numPhysicalVgprs_ = {}, - uint32_t numPhysicalSgprs_ = {}, - uint32_t numAvailableVgprs_ = {}, - uint32_t numAvailableSgprs_ = {}, - std::array const& computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, + uint32_t numPhysicalVgprs_ = {}, + uint32_t numPhysicalSgprs_ = {}, + uint32_t numAvailableVgprs_ = {}, + uint32_t numAvailableSgprs_ = {}, + std::array const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT : shaderStageMask( shaderStageMask_ ) , resourceUsage( resourceUsage_ ) , numPhysicalVgprs( numPhysicalVgprs_ ) @@ -71264,128 +95464,127 @@ namespace VULKAN_HPP_NAMESPACE , computeWorkGroupSize( computeWorkGroupSize_ ) {} - ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + VULKAN_HPP_CONSTEXPR_14 + ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ShaderStatisticsInfoAMD& operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderStatisticsInfoAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD & + operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkShaderStatisticsInfoAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ShaderStatisticsInfoAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default; #else - bool operator==( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( shaderStageMask == rhs.shaderStageMask ) - && ( resourceUsage == rhs.resourceUsage ) - && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) - && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) - && ( numAvailableVgprs == rhs.numAvailableVgprs ) - && ( numAvailableSgprs == rhs.numAvailableSgprs ) - && ( computeWorkGroupSize == rhs.computeWorkGroupSize ); + return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && + ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && + ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) && + ( computeWorkGroupSize == rhs.computeWorkGroupSize ); } - bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; - VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; - uint32_t numPhysicalVgprs = {}; - uint32_t numPhysicalSgprs = {}; - uint32_t numAvailableVgprs = {}; - uint32_t numAvailableSgprs = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; + uint32_t numPhysicalVgprs = {}; + uint32_t numPhysicalSgprs = {}; + uint32_t numAvailableVgprs = {}; + uint32_t numAvailableSgprs = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D computeWorkGroupSize = {}; - }; - static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" ); + static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); struct SharedPresentSurfaceCapabilitiesKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSharedPresentSurfaceCapabilitiesKHR; - VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) {} - SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SharedPresentSurfaceCapabilitiesKHR ) - offsetof( SharedPresentSurfaceCapabilitiesKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SharedPresentSurfaceCapabilitiesKHR& operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SharedPresentSurfaceCapabilitiesKHR & + operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SharedPresentSurfaceCapabilitiesKHR & + operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkSharedPresentSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default; #else - bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); } - bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; - void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + void * pNext = {}; VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; - }; - static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -71393,367 +95592,94 @@ namespace VULKAN_HPP_NAMESPACE using Type = SharedPresentSurfaceCapabilitiesKHR; }; - struct SparseImageFormatProperties - { - - - VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, - VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , imageGranularity( imageGranularity_ ) - , flags( flags_ ) - {} - - SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SparseImageFormatProperties& operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSparseImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageFormatProperties const& ) const = default; -#else - bool operator==( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( aspectMask == rhs.aspectMask ) - && ( imageGranularity == rhs.imageGranularity ) - && ( flags == rhs.flags ); - } - - bool operator!=( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; - VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; - - }; - static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseImageFormatProperties2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2; - - VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT - : properties( properties_ ) - {} - - SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SparseImageFormatProperties2 ) - offsetof( SparseImageFormatProperties2, pNext ) ); - return *this; - } - - SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SparseImageFormatProperties2& operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSparseImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageFormatProperties2 const& ) const = default; -#else - bool operator==( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( properties == rhs.properties ); - } - - bool operator!=( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; - - }; - static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SparseImageFormatProperties2; - }; - - struct SparseImageMemoryRequirements - { - - - VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, - uint32_t imageMipTailFirstLod_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT - : formatProperties( formatProperties_ ) - , imageMipTailFirstLod( imageMipTailFirstLod_ ) - , imageMipTailSize( imageMipTailSize_ ) - , imageMipTailOffset( imageMipTailOffset_ ) - , imageMipTailStride( imageMipTailStride_ ) - {} - - SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SparseImageMemoryRequirements& operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSparseImageMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageMemoryRequirements const& ) const = default; -#else - bool operator==( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( formatProperties == rhs.formatProperties ) - && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) - && ( imageMipTailSize == rhs.imageMipTailSize ) - && ( imageMipTailOffset == rhs.imageMipTailOffset ) - && ( imageMipTailStride == rhs.imageMipTailStride ); - } - - bool operator!=( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; - uint32_t imageMipTailFirstLod = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; - - }; - static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseImageMemoryRequirements2 - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2; - - VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryRequirements( memoryRequirements_ ) - {} - - SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SparseImageMemoryRequirements2 ) - offsetof( SparseImageMemoryRequirements2, pNext ) ); - return *this; - } - - SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SparseImageMemoryRequirements2& operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSparseImageMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SparseImageMemoryRequirements2 const& ) const = default; -#else - bool operator==( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryRequirements == rhs.memoryRequirements ); - } - - bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; - - }; - static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SparseImageMemoryRequirements2; - }; - -#ifdef VK_USE_PLATFORM_GGP +#if defined( VK_USE_PLATFORM_GGP ) struct StreamDescriptorSurfaceCreateInfoGGP { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eStreamDescriptorSurfaceCreateInfoGGP; - VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, - GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, + GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , streamDescriptor( streamDescriptor_ ) {} - StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( StreamDescriptorSurfaceCreateInfoGGP ) - offsetof( StreamDescriptorSurfaceCreateInfoGGP, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) + VULKAN_HPP_NOEXCEPT = default; StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - StreamDescriptorSurfaceCreateInfoGGP& operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & + operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StreamDescriptorSurfaceCreateInfoGGP & + operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - StreamDescriptorSurfaceCreateInfoGGP & setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT + StreamDescriptorSurfaceCreateInfoGGP & + setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT + StreamDescriptorSurfaceCreateInfoGGP & + setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT { streamDescriptor = streamDescriptor_; return *this; } - - operator VkStreamDescriptorSurfaceCreateInfoGGP const&() const VULKAN_HPP_NOEXCEPT + operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const& ) const = default; -#else - bool operator==( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & ) const = default; +# else + bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 ); } - bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; - GgpStreamDescriptor streamDescriptor = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; + GgpStreamDescriptor streamDescriptor = {}; }; - static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -71762,859 +95688,194 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_GGP*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - struct StridedBufferRegionKHR - { - - - VULKAN_HPP_CONSTEXPR StridedBufferRegionKHR( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , offset( offset_ ) - , stride( stride_ ) - , size( size_ ) - {} - - explicit StridedBufferRegionKHR( IndirectCommandsStreamNV const& indirectCommandsStreamNV, - VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) - : buffer( indirectCommandsStreamNV.buffer ) - , offset( indirectCommandsStreamNV.offset ) - , stride( stride_ ) - , size( size_ ) - {} - - StridedBufferRegionKHR( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - StridedBufferRegionKHR& operator=( VkStridedBufferRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - StridedBufferRegionKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } - - StridedBufferRegionKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - StridedBufferRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT - { - stride = stride_; - return *this; - } - - StridedBufferRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT - { - size = size_; - return *this; - } - - - operator VkStridedBufferRegionKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkStridedBufferRegionKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( StridedBufferRegionKHR const& ) const = default; -#else - bool operator==( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( stride == rhs.stride ) - && ( size == rhs.size ); - } - - bool operator!=( StridedBufferRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - - }; - static_assert( sizeof( StridedBufferRegionKHR ) == sizeof( VkStridedBufferRegionKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - struct SubmitInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo; - - VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = {}, - uint32_t commandBufferCount_ = {}, - const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , pWaitDstStageMask( pWaitDstStageMask_ ) - , commandBufferCount( commandBufferCount_ ) - , pCommandBuffers( pCommandBuffers_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphores( pSignalSemaphores_ ) - {} - - SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SubmitInfo ) - offsetof( SubmitInfo, pNext ) ); - return *this; - } - - SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubmitInfo& operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - waitSemaphoreCount = waitSemaphoreCount_; - return *this; - } - - SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT - { - pWaitSemaphores = pWaitSemaphores_; - return *this; - } - - SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT - { - pWaitDstStageMask = pWaitDstStageMask_; - return *this; - } - - SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT - { - commandBufferCount = commandBufferCount_; - return *this; - } - - SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT - { - pCommandBuffers = pCommandBuffers_; - return *this; - } - - SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - signalSemaphoreCount = signalSemaphoreCount_; - return *this; - } - - SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT - { - pSignalSemaphores = pSignalSemaphores_; - return *this; - } - - - operator VkSubmitInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubmitInfo const& ) const = default; -#else - bool operator==( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) - && ( commandBufferCount == rhs.commandBufferCount ) - && ( pCommandBuffers == rhs.pCommandBuffers ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphores == rhs.pSignalSemaphores ); - } - - bool operator!=( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; - const void* pNext = {}; - uint32_t waitSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; - const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask = {}; - uint32_t commandBufferCount = {}; - const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers = {}; - uint32_t signalSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {}; - - }; - static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SubmitInfo; - }; - - struct SubpassBeginInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo; - - VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT - : contents( contents_ ) - {} - - SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SubpassBeginInfo ) - offsetof( SubpassBeginInfo, pNext ) ); - return *this; - } - - SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassBeginInfo& operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT - { - contents = contents_; - return *this; - } - - - operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassBeginInfo const& ) const = default; -#else - bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( contents == rhs.contents ); - } - - bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; - - }; - static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SubpassBeginInfo; - }; - struct SubpassDescriptionDepthStencilResolve { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSubpassDescriptionDepthStencilResolve; - VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, - VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT : depthResolveMode( depthResolveMode_ ) , stencilResolveMode( stencilResolveMode_ ) , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) {} - SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SubpassDescriptionDepthStencilResolve ) - offsetof( SubpassDescriptionDepthStencilResolve, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) + VULKAN_HPP_NOEXCEPT = default; SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SubpassDescriptionDepthStencilResolve( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubpassDescriptionDepthStencilResolve& operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescriptionDepthStencilResolve & + operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SubpassDescriptionDepthStencilResolve & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SubpassDescriptionDepthStencilResolve & setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & + setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT { depthResolveMode = depthResolveMode_; return *this; } - SubpassDescriptionDepthStencilResolve & setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & + setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT { stencilResolveMode = stencilResolveMode_; return *this; } - SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT { pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; return *this; } - - operator VkSubpassDescriptionDepthStencilResolve const&() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassDescriptionDepthStencilResolve const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default; #else - bool operator==( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( depthResolveMode == rhs.depthResolveMode ) - && ( stencilResolveMode == rhs.stencilResolveMode ) - && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) && + ( stencilResolveMode == rhs.stencilResolveMode ) && + ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); } - bool operator!=( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; - const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment = {}; - + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {}; }; - static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = SubpassDescriptionDepthStencilResolve; }; + using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; - struct SubpassEndInfo - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; - - VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT - {} - - SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SubpassEndInfo ) - offsetof( SubpassEndInfo, pNext ) ); - return *this; - } - - SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SubpassEndInfo& operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - - operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SubpassEndInfo const& ) const = default; -#else - bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ); - } - - bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; - const void* pNext = {}; - - }; - static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SubpassEndInfo; - }; - - struct SurfaceCapabilities2EXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT; - - VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( uint32_t minImageCount_ = {}, - uint32_t maxImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, - uint32_t maxImageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT - : minImageCount( minImageCount_ ) - , maxImageCount( maxImageCount_ ) - , currentExtent( currentExtent_ ) - , minImageExtent( minImageExtent_ ) - , maxImageExtent( maxImageExtent_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , supportedTransforms( supportedTransforms_ ) - , currentTransform( currentTransform_ ) - , supportedCompositeAlpha( supportedCompositeAlpha_ ) - , supportedUsageFlags( supportedUsageFlags_ ) - , supportedSurfaceCounters( supportedSurfaceCounters_ ) - {} - - SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceCapabilities2EXT ) - offsetof( SurfaceCapabilities2EXT, pNext ) ); - return *this; - } - - SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceCapabilities2EXT& operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSurfaceCapabilities2EXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceCapabilities2EXT const& ) const = default; -#else - bool operator==( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minImageCount == rhs.minImageCount ) - && ( maxImageCount == rhs.maxImageCount ) - && ( currentExtent == rhs.currentExtent ) - && ( minImageExtent == rhs.minImageExtent ) - && ( maxImageExtent == rhs.maxImageExtent ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( currentTransform == rhs.currentTransform ) - && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) - && ( supportedUsageFlags == rhs.supportedUsageFlags ) - && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); - } - - bool operator!=( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; - void* pNext = {}; - uint32_t minImageCount = {}; - uint32_t maxImageCount = {}; - VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; - uint32_t maxImageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; - - }; - static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SurfaceCapabilities2EXT; - }; - - struct SurfaceCapabilitiesKHR - { - - - VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {}, - uint32_t maxImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, - uint32_t maxImageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : minImageCount( minImageCount_ ) - , maxImageCount( maxImageCount_ ) - , currentExtent( currentExtent_ ) - , minImageExtent( minImageExtent_ ) - , maxImageExtent( maxImageExtent_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , supportedTransforms( supportedTransforms_ ) - , currentTransform( currentTransform_ ) - , supportedCompositeAlpha( supportedCompositeAlpha_ ) - , supportedUsageFlags( supportedUsageFlags_ ) - {} - - SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceCapabilitiesKHR& operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceCapabilitiesKHR const& ) const = default; -#else - bool operator==( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( minImageCount == rhs.minImageCount ) - && ( maxImageCount == rhs.maxImageCount ) - && ( currentExtent == rhs.currentExtent ) - && ( minImageExtent == rhs.minImageExtent ) - && ( maxImageExtent == rhs.maxImageExtent ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( currentTransform == rhs.currentTransform ) - && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) - && ( supportedUsageFlags == rhs.supportedUsageFlags ); - } - - bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - uint32_t minImageCount = {}; - uint32_t maxImageCount = {}; - VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; - uint32_t maxImageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; - - }; - static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SurfaceCapabilities2KHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR; - - VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT - : surfaceCapabilities( surfaceCapabilities_ ) - {} - - SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceCapabilities2KHR ) - offsetof( SurfaceCapabilities2KHR, pNext ) ); - return *this; - } - - SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceCapabilities2KHR& operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSurfaceCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceCapabilities2KHR const& ) const = default; -#else - bool operator==( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceCapabilities == rhs.surfaceCapabilities ); - } - - bool operator!=( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; - - }; - static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SurfaceCapabilities2KHR; - }; - -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) struct SurfaceCapabilitiesFullScreenExclusiveEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; - VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) {} - SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( + SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesFullScreenExclusiveEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & + operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesFullScreenExclusiveEXT & + operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) - offsetof( SurfaceCapabilitiesFullScreenExclusiveEXT, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceCapabilitiesFullScreenExclusiveEXT& operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesFullScreenExclusiveEXT & + setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT { fullScreenExclusiveSupported = fullScreenExclusiveSupported_; return *this; } - - operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const& ) const = default; -#else - bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default; +# else + bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); } - bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; }; - static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == + sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -72623,212 +95884,84 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SurfaceFormatKHR - { - - - VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT - : format( format_ ) - , colorSpace( colorSpace_ ) - {} - - SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceFormatKHR& operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSurfaceFormatKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceFormatKHR const& ) const = default; -#else - bool operator==( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( format == rhs.format ) - && ( colorSpace == rhs.colorSpace ); - } - - bool operator!=( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; - - }; - static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SurfaceFormat2KHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR; - - VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT - : surfaceFormat( surfaceFormat_ ) - {} - - SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceFormat2KHR ) - offsetof( SurfaceFormat2KHR, pNext ) ); - return *this; - } - - SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SurfaceFormat2KHR& operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - - operator VkSurfaceFormat2KHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceFormat2KHR const& ) const = default; -#else - bool operator==( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceFormat == rhs.surfaceFormat ); - } - - bool operator!=( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; - - }; - static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SurfaceFormat2KHR; - }; - -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) struct SurfaceFullScreenExclusiveInfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSurfaceFullScreenExclusiveInfoEXT; - VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT : fullScreenExclusive( fullScreenExclusive_ ) {} - SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceFullScreenExclusiveInfoEXT ) - offsetof( SurfaceFullScreenExclusiveInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SurfaceFullScreenExclusiveInfoEXT& operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & + operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SurfaceFullScreenExclusiveInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveInfoEXT & + setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT { fullScreenExclusive = fullScreenExclusive_; return *this; } - - operator VkSurfaceFullScreenExclusiveInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const& ) const = default; -#else - bool operator==( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fullScreenExclusive == rhs.fullScreenExclusive ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive ); } - bool operator!=( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; }; - static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -72837,34 +95970,38 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) struct SurfaceFullScreenExclusiveWin32InfoEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {} ) VULKAN_HPP_NOEXCEPT : hmonitor( hmonitor_ ) {} - SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) - offsetof( SurfaceFullScreenExclusiveWin32InfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SurfaceFullScreenExclusiveWin32InfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SurfaceFullScreenExclusiveWin32InfoEXT& operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & + operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveWin32InfoEXT & + operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -72876,44 +96013,39 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkSurfaceFullScreenExclusiveWin32InfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const& ) const = default; -#else - bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( hmonitor == rhs.hmonitor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor ); } - bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; - const void* pNext = {}; - HMONITOR hmonitor = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + const void * pNext = {}; + HMONITOR hmonitor = {}; }; - static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -72924,80 +96056,78 @@ namespace VULKAN_HPP_NAMESPACE struct SurfaceProtectedCapabilitiesKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR; - VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT : supportsProtected( supportsProtected_ ) {} - SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SurfaceProtectedCapabilitiesKHR ) - offsetof( SurfaceProtectedCapabilitiesKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SurfaceProtectedCapabilitiesKHR& operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & + operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SurfaceProtectedCapabilitiesKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SurfaceProtectedCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT + SurfaceProtectedCapabilitiesKHR & + setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT { supportsProtected = supportsProtected_; return *this; } - - operator VkSurfaceProtectedCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SurfaceProtectedCapabilitiesKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default; #else - bool operator==( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supportsProtected == rhs.supportsProtected ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected ); } - bool operator!=( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; }; - static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -73007,80 +96137,78 @@ namespace VULKAN_HPP_NAMESPACE struct SwapchainCounterCreateInfoEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT; - VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT : surfaceCounters( surfaceCounters_ ) {} - SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SwapchainCounterCreateInfoEXT ) - offsetof( SwapchainCounterCreateInfoEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SwapchainCounterCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & + operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SwapchainCounterCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SwapchainCounterCreateInfoEXT & setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT + SwapchainCounterCreateInfoEXT & + setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT { surfaceCounters = surfaceCounters_; return *this; } - - operator VkSwapchainCounterCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SwapchainCounterCreateInfoEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default; #else - bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceCounters == rhs.surfaceCounters ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters ); } - bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; - const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; - }; - static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -73088,315 +96216,82 @@ namespace VULKAN_HPP_NAMESPACE using Type = SwapchainCounterCreateInfoEXT; }; - struct SwapchainCreateInfoKHR - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR; - - VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, - uint32_t minImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, - VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, - uint32_t imageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t* pQueueFamilyIndices_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, - VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, - VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, - VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , surface( surface_ ) - , minImageCount( minImageCount_ ) - , imageFormat( imageFormat_ ) - , imageColorSpace( imageColorSpace_ ) - , imageExtent( imageExtent_ ) - , imageArrayLayers( imageArrayLayers_ ) - , imageUsage( imageUsage_ ) - , imageSharingMode( imageSharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - , preTransform( preTransform_ ) - , compositeAlpha( compositeAlpha_ ) - , presentMode( presentMode_ ) - , clipped( clipped_ ) - , oldSwapchain( oldSwapchain_ ) - {} - - SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SwapchainCreateInfoKHR ) - offsetof( SwapchainCreateInfoKHR, pNext ) ); - return *this; - } - - SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT - { - surface = surface_; - return *this; - } - - SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT - { - minImageCount = minImageCount_; - return *this; - } - - SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT - { - imageFormat = imageFormat_; - return *this; - } - - SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT - { - imageColorSpace = imageColorSpace_; - return *this; - } - - SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT - { - imageExtent = imageExtent_; - return *this; - } - - SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT - { - imageArrayLayers = imageArrayLayers_; - return *this; - } - - SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT - { - imageUsage = imageUsage_; - return *this; - } - - SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT - { - imageSharingMode = imageSharingMode_; - return *this; - } - - SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT - { - queueFamilyIndexCount = queueFamilyIndexCount_; - return *this; - } - - SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT - { - pQueueFamilyIndices = pQueueFamilyIndices_; - return *this; - } - - SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT - { - preTransform = preTransform_; - return *this; - } - - SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT - { - compositeAlpha = compositeAlpha_; - return *this; - } - - SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT - { - presentMode = presentMode_; - return *this; - } - - SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT - { - clipped = clipped_; - return *this; - } - - SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT - { - oldSwapchain = oldSwapchain_; - return *this; - } - - - operator VkSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SwapchainCreateInfoKHR const& ) const = default; -#else - bool operator==( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( surface == rhs.surface ) - && ( minImageCount == rhs.minImageCount ) - && ( imageFormat == rhs.imageFormat ) - && ( imageColorSpace == rhs.imageColorSpace ) - && ( imageExtent == rhs.imageExtent ) - && ( imageArrayLayers == rhs.imageArrayLayers ) - && ( imageUsage == rhs.imageUsage ) - && ( imageSharingMode == rhs.imageSharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) - && ( preTransform == rhs.preTransform ) - && ( compositeAlpha == rhs.compositeAlpha ) - && ( presentMode == rhs.presentMode ) - && ( clipped == rhs.clipped ) - && ( oldSwapchain == rhs.oldSwapchain ); - } - - bool operator!=( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; - uint32_t minImageCount = {}; - VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; - VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; - uint32_t imageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; - VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t* pQueueFamilyIndices = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; - VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; - VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; - - }; - static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = SwapchainCreateInfoKHR; - }; - struct SwapchainDisplayNativeHdrCreateInfoAMD { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; - VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} ) VULKAN_HPP_NOEXCEPT - : localDimmingEnable( localDimmingEnable_ ) +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} ) + VULKAN_HPP_NOEXCEPT : localDimmingEnable( localDimmingEnable_ ) {} - SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) - offsetof( SwapchainDisplayNativeHdrCreateInfoAMD, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : SwapchainDisplayNativeHdrCreateInfoAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SwapchainDisplayNativeHdrCreateInfoAMD& operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & + operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainDisplayNativeHdrCreateInfoAMD & + operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT + SwapchainDisplayNativeHdrCreateInfoAMD & + setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT { localDimmingEnable = localDimmingEnable_; return *this; } - - operator VkSwapchainDisplayNativeHdrCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default; #else - bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( localDimmingEnable == rhs.localDimmingEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable ); } - bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; }; - static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -73406,68 +96301,68 @@ namespace VULKAN_HPP_NAMESPACE struct TextureLODGatherFormatPropertiesAMD { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eTextureLodGatherFormatPropertiesAMD; - VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) {} - TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( TextureLODGatherFormatPropertiesAMD ) - offsetof( TextureLODGatherFormatPropertiesAMD, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - TextureLODGatherFormatPropertiesAMD& operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TextureLODGatherFormatPropertiesAMD & + operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TextureLODGatherFormatPropertiesAMD & + operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - - operator VkTextureLODGatherFormatPropertiesAMD const&() const VULKAN_HPP_NOEXCEPT + operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( TextureLODGatherFormatPropertiesAMD const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default; #else - bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); } - bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; - void* pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; }; - static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -73477,37 +96372,50 @@ namespace VULKAN_HPP_NAMESPACE struct TimelineSemaphoreSubmitInfo { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo; - VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, - const uint64_t* pWaitSemaphoreValues_ = {}, - uint32_t signalSemaphoreValueCount_ = {}, - const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValueCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT : waitSemaphoreValueCount( waitSemaphoreValueCount_ ) , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) , signalSemaphoreValueCount( signalSemaphoreValueCount_ ) , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) {} - TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( TimelineSemaphoreSubmitInfo ) - offsetof( TimelineSemaphoreSubmitInfo, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : TimelineSemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) + {} - TimelineSemaphoreSubmitInfo& operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {} ) + : waitSemaphoreValueCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValueCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & + operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - TimelineSemaphoreSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -73519,103 +96427,123 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { pWaitSemaphoreValues = pWaitSemaphoreValues_; return *this; } - TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + TimelineSemaphoreSubmitInfo & + setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT { signalSemaphoreValueCount = signalSemaphoreValueCount_; return *this; } - TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + TimelineSemaphoreSubmitInfo & + setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { pSignalSemaphoreValues = pSignalSemaphoreValues_; return *this; } - - operator VkTimelineSemaphoreSubmitInfo const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + signalSemaphoreValueCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( TimelineSemaphoreSubmitInfo const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default; #else - bool operator==( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) - && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) - && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) - && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && + ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); } - bool operator!=( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; - const void* pNext = {}; - uint32_t waitSemaphoreValueCount = {}; - const uint64_t* pWaitSemaphoreValues = {}; - uint32_t signalSemaphoreValueCount = {}; - const uint64_t* pSignalSemaphoreValues = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreValueCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValueCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; }; - static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType { using Type = TimelineSemaphoreSubmitInfo; }; + using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; -#ifdef VK_ENABLE_BETA_EXTENSIONS struct TraceRaysIndirectCommandKHR { - - - VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, uint32_t height_ = {}, - uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT : width( width_ ) , height( height_ ) , depth( depth_ ) {} - explicit TraceRaysIndirectCommandKHR( Extent2D const& extent2D, - uint32_t depth_ = {} ) - : width( extent2D.width ) - , height( extent2D.height ) - , depth( depth_ ) - {} + VULKAN_HPP_CONSTEXPR + TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : TraceRaysIndirectCommandKHR( *reinterpret_cast( &rhs ) ) + {} - TraceRaysIndirectCommandKHR& operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & + operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } @@ -73637,254 +96565,180 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkTraceRaysIndirectCommandKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( TraceRaysIndirectCommandKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default; #else - bool operator==( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( width == rhs.width ) - && ( height == rhs.height ) - && ( depth == rhs.depth ); + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); } - bool operator!=( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - uint32_t width = {}; + uint32_t width = {}; uint32_t height = {}; - uint32_t depth = {}; - - }; - static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - struct ValidationCacheCreateInfoEXT - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT; - - VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, - size_t initialDataSize_ = {}, - const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , initialDataSize( initialDataSize_ ) - , pInitialData( pInitialData_ ) - {} - - ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ValidationCacheCreateInfoEXT ) - offsetof( ValidationCacheCreateInfoEXT, pNext ) ); - return *this; - } - - ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT - { - initialDataSize = initialDataSize_; - return *this; - } - - ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT - { - pInitialData = pInitialData_; - return *this; - } - - - operator VkValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ValidationCacheCreateInfoEXT const& ) const = default; -#else - bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( initialDataSize == rhs.initialDataSize ) - && ( pInitialData == rhs.pInitialData ); - } - - bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; - size_t initialDataSize = {}; - const void* pInitialData = {}; - - }; - static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = ValidationCacheCreateInfoEXT; + uint32_t depth = {}; }; + static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); struct ValidationFeaturesEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT; - VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = {}, - uint32_t disabledValidationFeatureCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( + uint32_t enabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, + uint32_t disabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT : enabledValidationFeatureCount( enabledValidationFeatureCount_ ) , pEnabledValidationFeatures( pEnabledValidationFeatures_ ) , disabledValidationFeatureCount( disabledValidationFeatureCount_ ) , pDisabledValidationFeatures( pDisabledValidationFeatures_ ) {} - ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ValidationFeaturesEXT ) - offsetof( ValidationFeaturesEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : ValidationFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} - ValidationFeaturesEXT& operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + enabledValidationFeatures_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationFeatures_ = {} ) + : enabledValidationFeatureCount( static_cast( enabledValidationFeatures_.size() ) ) + , pEnabledValidationFeatures( enabledValidationFeatures_.data() ) + , disabledValidationFeatureCount( static_cast( disabledValidationFeatures_.size() ) ) + , pDisabledValidationFeatures( disabledValidationFeatures_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & + operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ValidationFeaturesEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & + setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT { enabledValidationFeatureCount = enabledValidationFeatureCount_; return *this; } - ValidationFeaturesEXT & setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & setPEnabledValidationFeatures( + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT { pEnabledValidationFeatures = pEnabledValidationFeatures_; return *this; } - ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setEnabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = static_cast( enabledValidationFeatures_.size() ); + pEnabledValidationFeatures = enabledValidationFeatures_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ValidationFeaturesEXT & + setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT { disabledValidationFeatureCount = disabledValidationFeatureCount_; return *this; } - ValidationFeaturesEXT & setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & setPDisabledValidationFeatures( + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT { pDisabledValidationFeatures = pDisabledValidationFeatures_; return *this; } - - operator VkValidationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setDisabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + disabledValidationFeatureCount = static_cast( disabledValidationFeatures_.size() ); + pDisabledValidationFeatures = disabledValidationFeatures_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ValidationFeaturesEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFeaturesEXT const & ) const = default; #else - bool operator==( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) - && ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) - && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) - && ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) && + ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && + ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) && + ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); } - bool operator!=( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; - const void* pNext = {}; - uint32_t enabledValidationFeatureCount = {}; - const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures = {}; - uint32_t disabledValidationFeatureCount = {}; - const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; + const void * pNext = {}; + uint32_t enabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {}; + uint32_t disabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {}; }; - static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -73895,33 +96749,43 @@ namespace VULKAN_HPP_NAMESPACE struct ValidationFlagsEXT { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT; - VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( + uint32_t disabledValidationCheckCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT : disabledValidationCheckCount( disabledValidationCheckCount_ ) , pDisabledValidationChecks( pDisabledValidationChecks_ ) {} - ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ValidationFlagsEXT ) - offsetof( ValidationFlagsEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : ValidationFlagsEXT( *reinterpret_cast( &rhs ) ) + {} - ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationChecks_ ) + : disabledValidationCheckCount( static_cast( disabledValidationChecks_.size() ) ) + , pDisabledValidationChecks( disabledValidationChecks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & + operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ValidationFlagsEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -73933,51 +96797,58 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ValidationFlagsEXT & setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + ValidationFlagsEXT & setPDisabledValidationChecks( + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT { pDisabledValidationChecks = pDisabledValidationChecks_; return *this; } - - operator VkValidationFlagsEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT & setDisabledValidationChecks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + disabledValidationCheckCount = static_cast( disabledValidationChecks_.size() ); + pDisabledValidationChecks = disabledValidationChecks_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ValidationFlagsEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFlagsEXT const & ) const = default; #else - bool operator==( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) - && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) && + ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); } - bool operator!=( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; - const void* pNext = {}; - uint32_t disabledValidationCheckCount = {}; - const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; + const void * pNext = {}; + uint32_t disabledValidationCheckCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {}; }; - static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" ); + static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -73986,36 +96857,36 @@ namespace VULKAN_HPP_NAMESPACE using Type = ValidationFlagsEXT; }; -#ifdef VK_USE_PLATFORM_VI_NN +#if defined( VK_USE_PLATFORM_VI_NN ) struct ViSurfaceCreateInfoNN { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN; +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, - void* window_ = {} ) VULKAN_HPP_NOEXCEPT + void * window_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , window( window_ ) {} - ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( ViSurfaceCreateInfoNN ) - offsetof( ViSurfaceCreateInfoNN, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : ViSurfaceCreateInfoNN( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & + operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - ViSurfaceCreateInfoNN & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -74027,51 +96898,44 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - ViSurfaceCreateInfoNN & setWindow( void* window_ ) VULKAN_HPP_NOEXCEPT + ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT { window = window_; return *this; } - - operator VkViSurfaceCreateInfoNN const&() const VULKAN_HPP_NOEXCEPT + operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( ViSurfaceCreateInfoNN const& ) const = default; -#else - bool operator==( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default; +# else + bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( window == rhs.window ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); } - bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; - void* window = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; + void * window = {}; }; - static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" ); + static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -74081,103 +96945,3118 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - struct WaylandSurfaceCreateInfoKHR +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264CapabilitiesEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesEXT; - VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, - struct wl_display* display_ = {}, - struct wl_surface* surface_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , display( display_ ) - , surface( surface_ ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264CapabilitiesEXT( + uint32_t maxLevel_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : maxLevel( maxLevel_ ) + , fieldOffsetGranularity( fieldOffsetGranularity_ ) + , stdExtensionVersion( stdExtensionVersion_ ) {} - WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + VideoDecodeH264CapabilitiesEXT( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264CapabilitiesEXT( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264CapabilitiesEXT & + operator=( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264CapabilitiesEXT & operator=( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( WaylandSurfaceCreateInfoKHR ) - offsetof( WaylandSurfaceCreateInfoKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT { - *this = rhs; + return *reinterpret_cast( this ); } - WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264CapabilitiesEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxLevel == rhs.maxLevel ) && + ( fieldOffsetGranularity == rhs.fieldOffsetGranularity ) && + ( stdExtensionVersion == rhs.stdExtensionVersion ); + } + + bool operator!=( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesEXT; + void * pNext = {}; + uint32_t maxLevel = {}; + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {}; + }; + static_assert( sizeof( VideoDecodeH264CapabilitiesEXT ) == sizeof( VkVideoDecodeH264CapabilitiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264CapabilitiesEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264DpbSlotInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT( + const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdReferenceInfo( pStdReferenceInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264DpbSlotInfoEXT( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264DpbSlotInfoEXT( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & + operator=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264DpbSlotInfoEXT & operator=( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - WaylandSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + VideoDecodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + VideoDecodeH264DpbSlotInfoEXT & + setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } + + operator VkVideoDecodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264DpbSlotInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); + } + + bool operator!=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoEXT; + const void * pNext = {}; + const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {}; + }; + static_assert( sizeof( VideoDecodeH264DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH264DpbSlotInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264DpbSlotInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264MvcEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264MvcEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( const StdVideoDecodeH264Mvc * pStdMvc_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdMvc( pStdMvc_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264MvcEXT( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264MvcEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT & + operator=( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264MvcEXT & operator=( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264MvcEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264MvcEXT & setPStdMvc( const StdVideoDecodeH264Mvc * pStdMvc_ ) VULKAN_HPP_NOEXCEPT + { + pStdMvc = pStdMvc_; + return *this; + } + + operator VkVideoDecodeH264MvcEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264MvcEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264MvcEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdMvc == rhs.pStdMvc ); + } + + bool operator!=( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264MvcEXT; + const void * pNext = {}; + const StdVideoDecodeH264Mvc * pStdMvc = {}; + }; + static_assert( sizeof( VideoDecodeH264MvcEXT ) == sizeof( VkVideoDecodeH264MvcEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264MvcEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264PictureInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {}, + uint32_t slicesCount_ = {}, + const uint32_t * pSlicesDataOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( slicesCount_ ) + , pSlicesDataOffsets( pSlicesDataOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264PictureInfoEXT( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264PictureInfoEXT( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264PictureInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoEXT( + const StdVideoDecodeH264PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( static_cast( slicesDataOffsets_.size() ) ) + , pSlicesDataOffsets( slicesDataOffsets_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & + operator=( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264PictureInfoEXT & operator=( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264PictureInfoEXT & + setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VideoDecodeH264PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = slicesCount_; + return *this; + } + + VideoDecodeH264PictureInfoEXT & setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pSlicesDataOffsets = pSlicesDataOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoEXT & setSlicesDataOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = static_cast( slicesDataOffsets_.size() ); + pSlicesDataOffsets = slicesDataOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoDecodeH264PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264PictureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264PictureInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( slicesCount == rhs.slicesCount ) && ( pSlicesDataOffsets == rhs.pSlicesDataOffsets ); + } + + bool operator!=( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoEXT; + const void * pNext = {}; + const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {}; + uint32_t slicesCount = {}; + const uint32_t * pSlicesDataOffsets = {}; + }; + static_assert( sizeof( VideoDecodeH264PictureInfoEXT ) == sizeof( VkVideoDecodeH264PictureInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264PictureInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264ProfileEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileEXT( + StdVideoH264ProfileIdc stdProfileIdc_ = {}, + VULKAN_HPP_NAMESPACE::VideoDecodeH264FieldLayoutFlagsEXT fieldLayout_ = {} ) VULKAN_HPP_NOEXCEPT + : stdProfileIdc( stdProfileIdc_ ) + , fieldLayout( fieldLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264ProfileEXT( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264ProfileEXT( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264ProfileEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT & + operator=( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264ProfileEXT & operator=( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264ProfileEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } + + VideoDecodeH264ProfileEXT & + setFieldLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264FieldLayoutFlagsEXT fieldLayout_ ) VULKAN_HPP_NOEXCEPT + { + fieldLayout = fieldLayout_; + return *this; + } + + operator VkVideoDecodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264ProfileEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) && + ( fieldLayout == rhs.fieldLayout ); + } + + bool operator!=( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileEXT; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH264FieldLayoutFlagsEXT fieldLayout = {}; + }; + static_assert( sizeof( VideoDecodeH264ProfileEXT ) == sizeof( VkVideoDecodeH264ProfileEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264ProfileEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264SessionCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH264SessionCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT( + VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pStdExtensionVersion( pStdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT( VideoDecodeH264SessionCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionCreateInfoEXT( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT & + operator=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionCreateInfoEXT & + operator=( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264SessionCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display* display_ ) VULKAN_HPP_NOEXCEPT + VideoDecodeH264SessionCreateInfoEXT & setPStdExtensionVersion( + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdExtensionVersion = pStdExtensionVersion_; + return *this; + } + + operator VkVideoDecodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pStdExtensionVersion == rhs.pStdExtensionVersion ); + } + + bool operator!=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {}; + }; + static_assert( sizeof( VideoDecodeH264SessionCreateInfoEXT ) == sizeof( VkVideoDecodeH264SessionCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264SessionParametersAddInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH264SessionParametersAddInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( + uint32_t spsStdCount_ = {}, + const StdVideoH264SequenceParameterSet * pSpsStd_ = {}, + uint32_t ppsStdCount_ = {}, + const StdVideoH264PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT + : spsStdCount( spsStdCount_ ) + , pSpsStd( pSpsStd_ ) + , ppsStdCount( ppsStdCount_ ) + , pPpsStd( pPpsStd_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( + VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersAddInfoEXT( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersAddInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {} ) + : spsStdCount( static_cast( spsStd_.size() ) ) + , pSpsStd( spsStd_.data() ) + , ppsStdCount( static_cast( ppsStd_.size() ) ) + , pPpsStd( ppsStd_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & + operator=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersAddInfoEXT & + operator=( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + spsStdCount = spsStdCount_; + return *this; + } + + VideoDecodeH264SessionParametersAddInfoEXT & + setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pSpsStd = pSpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoEXT & + setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) + VULKAN_HPP_NOEXCEPT + { + spsStdCount = static_cast( spsStd_.size() ); + pSpsStd = spsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH264SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsStdCount = ppsStdCount_; + return *this; + } + + VideoDecodeH264SessionParametersAddInfoEXT & + setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pPpsStd = pPpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoEXT & + setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) + VULKAN_HPP_NOEXCEPT + { + ppsStdCount = static_cast( ppsStd_.size() ); + pPpsStd = ppsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoDecodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersAddInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && + ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); + } + + bool operator!=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT; + const void * pNext = {}; + uint32_t spsStdCount = {}; + const StdVideoH264SequenceParameterSet * pSpsStd = {}; + uint32_t ppsStdCount = {}; + const StdVideoH264PictureParameterSet * pPpsStd = {}; + }; + static_assert( sizeof( VideoDecodeH264SessionParametersAddInfoEXT ) == + sizeof( VkVideoDecodeH264SessionParametersAddInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersAddInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264SessionParametersCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT( + uint32_t maxSpsStdCount_ = {}, + uint32_t maxPpsStdCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {} ) + VULKAN_HPP_NOEXCEPT + : maxSpsStdCount( maxSpsStdCount_ ) + , maxPpsStdCount( maxPpsStdCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT( + VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersCreateInfoEXT( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & + operator=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersCreateInfoEXT & + operator=( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH264SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSpsStdCount = maxSpsStdCount_; + return *this; + } + + VideoDecodeH264SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPpsStdCount = maxPpsStdCount_; + return *this; + } + + VideoDecodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo( + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } + + operator VkVideoDecodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && + ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); + } + + bool operator!=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT; + const void * pNext = {}; + uint32_t maxSpsStdCount = {}; + uint32_t maxPpsStdCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {}; + }; + static_assert( sizeof( VideoDecodeH264SessionParametersCreateInfoEXT ) == + sizeof( VkVideoDecodeH264SessionParametersCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265CapabilitiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265CapabilitiesEXT( + uint32_t maxLevel_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : maxLevel( maxLevel_ ) + , stdExtensionVersion( stdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + VideoDecodeH265CapabilitiesEXT( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265CapabilitiesEXT( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265CapabilitiesEXT & + operator=( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265CapabilitiesEXT & operator=( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoDecodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265CapabilitiesEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxLevel == rhs.maxLevel ) && + ( stdExtensionVersion == rhs.stdExtensionVersion ); + } + + bool operator!=( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesEXT; + void * pNext = {}; + uint32_t maxLevel = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {}; + }; + static_assert( sizeof( VideoDecodeH265CapabilitiesEXT ) == sizeof( VkVideoDecodeH265CapabilitiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265CapabilitiesEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265DpbSlotInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT( + const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdReferenceInfo( pStdReferenceInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265DpbSlotInfoEXT( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265DpbSlotInfoEXT( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT & + operator=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265DpbSlotInfoEXT & operator=( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265DpbSlotInfoEXT & + setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } + + operator VkVideoDecodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265DpbSlotInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); + } + + bool operator!=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoEXT; + const void * pNext = {}; + const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {}; + }; + static_assert( sizeof( VideoDecodeH265DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH265DpbSlotInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265DpbSlotInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265PictureInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {}, + uint32_t slicesCount_ = {}, + const uint32_t * pSlicesDataOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( slicesCount_ ) + , pSlicesDataOffsets( pSlicesDataOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265PictureInfoEXT( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265PictureInfoEXT( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265PictureInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265PictureInfoEXT( + StdVideoDecodeH265PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( static_cast( slicesDataOffsets_.size() ) ) + , pSlicesDataOffsets( slicesDataOffsets_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & + operator=( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265PictureInfoEXT & operator=( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265PictureInfoEXT & + setPStdPictureInfo( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VideoDecodeH265PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = slicesCount_; + return *this; + } + + VideoDecodeH265PictureInfoEXT & setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pSlicesDataOffsets = pSlicesDataOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265PictureInfoEXT & setSlicesDataOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = static_cast( slicesDataOffsets_.size() ); + pSlicesDataOffsets = slicesDataOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoDecodeH265PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265PictureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265PictureInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( slicesCount == rhs.slicesCount ) && ( pSlicesDataOffsets == rhs.pSlicesDataOffsets ); + } + + bool operator!=( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoEXT; + const void * pNext = {}; + StdVideoDecodeH265PictureInfo * pStdPictureInfo = {}; + uint32_t slicesCount = {}; + const uint32_t * pSlicesDataOffsets = {}; + }; + static_assert( sizeof( VideoDecodeH265PictureInfoEXT ) == sizeof( VkVideoDecodeH265PictureInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265PictureInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265ProfileEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT + : stdProfileIdc( stdProfileIdc_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265ProfileEXT( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265ProfileEXT( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265ProfileEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT & + operator=( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265ProfileEXT & operator=( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265ProfileEXT & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } + + operator VkVideoDecodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265ProfileEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileEXT; + const void * pNext = {}; + StdVideoH265ProfileIdc stdProfileIdc = {}; + }; + static_assert( sizeof( VideoDecodeH265ProfileEXT ) == sizeof( VkVideoDecodeH265ProfileEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265ProfileEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265SessionCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH265SessionCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT( + VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pStdExtensionVersion( pStdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT( VideoDecodeH265SessionCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionCreateInfoEXT( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT & + operator=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionCreateInfoEXT & + operator=( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265SessionCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoDecodeH265SessionCreateInfoEXT & setPStdExtensionVersion( + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdExtensionVersion = pStdExtensionVersion_; + return *this; + } + + operator VkVideoDecodeH265SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pStdExtensionVersion == rhs.pStdExtensionVersion ); + } + + bool operator!=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {}; + }; + static_assert( sizeof( VideoDecodeH265SessionCreateInfoEXT ) == sizeof( VkVideoDecodeH265SessionCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265SessionParametersAddInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH265SessionParametersAddInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( + uint32_t spsStdCount_ = {}, + const StdVideoH265SequenceParameterSet * pSpsStd_ = {}, + uint32_t ppsStdCount_ = {}, + const StdVideoH265PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT + : spsStdCount( spsStdCount_ ) + , pSpsStd( pSpsStd_ ) + , ppsStdCount( ppsStdCount_ ) + , pPpsStd( pPpsStd_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( + VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersAddInfoEXT( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersAddInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {} ) + : spsStdCount( static_cast( spsStd_.size() ) ) + , pSpsStd( spsStd_.data() ) + , ppsStdCount( static_cast( ppsStd_.size() ) ) + , pPpsStd( ppsStd_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & + operator=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersAddInfoEXT & + operator=( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + spsStdCount = spsStdCount_; + return *this; + } + + VideoDecodeH265SessionParametersAddInfoEXT & + setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pSpsStd = pSpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoEXT & + setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) + VULKAN_HPP_NOEXCEPT + { + spsStdCount = static_cast( spsStd_.size() ); + pSpsStd = spsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH265SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsStdCount = ppsStdCount_; + return *this; + } + + VideoDecodeH265SessionParametersAddInfoEXT & + setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pPpsStd = pPpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoEXT & + setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) + VULKAN_HPP_NOEXCEPT + { + ppsStdCount = static_cast( ppsStd_.size() ); + pPpsStd = ppsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoDecodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionParametersAddInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && + ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); + } + + bool operator!=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT; + const void * pNext = {}; + uint32_t spsStdCount = {}; + const StdVideoH265SequenceParameterSet * pSpsStd = {}; + uint32_t ppsStdCount = {}; + const StdVideoH265PictureParameterSet * pPpsStd = {}; + }; + static_assert( sizeof( VideoDecodeH265SessionParametersAddInfoEXT ) == + sizeof( VkVideoDecodeH265SessionParametersAddInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionParametersAddInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265SessionParametersCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT( + uint32_t maxSpsStdCount_ = {}, + uint32_t maxPpsStdCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {} ) + VULKAN_HPP_NOEXCEPT + : maxSpsStdCount( maxSpsStdCount_ ) + , maxPpsStdCount( maxPpsStdCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT( + VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersCreateInfoEXT( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & + operator=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersCreateInfoEXT & + operator=( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoDecodeH265SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoDecodeH265SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSpsStdCount = maxSpsStdCount_; + return *this; + } + + VideoDecodeH265SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPpsStdCount = maxPpsStdCount_; + return *this; + } + + VideoDecodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo( + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } + + operator VkVideoDecodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionParametersCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && + ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); + } + + bool operator!=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT; + const void * pNext = {}; + uint32_t maxSpsStdCount = {}; + uint32_t maxPpsStdCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {}; + }; + static_assert( sizeof( VideoDecodeH265SessionParametersCreateInfoEXT ) == + sizeof( VkVideoDecodeH265SessionParametersCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionParametersCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264CapabilitiesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT( + VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minPictureSizeInMbs_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment_ = {}, + uint8_t maxNumL0ReferenceForP_ = {}, + uint8_t maxNumL0ReferenceForB_ = {}, + uint8_t maxNumL1Reference_ = {}, + uint8_t qualityLevelCount_ = {}, + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , inputModeFlags( inputModeFlags_ ) + , outputModeFlags( outputModeFlags_ ) + , minPictureSizeInMbs( minPictureSizeInMbs_ ) + , maxPictureSizeInMbs( maxPictureSizeInMbs_ ) + , inputImageDataAlignment( inputImageDataAlignment_ ) + , maxNumL0ReferenceForP( maxNumL0ReferenceForP_ ) + , maxNumL0ReferenceForB( maxNumL0ReferenceForB_ ) + , maxNumL1Reference( maxNumL1Reference_ ) + , qualityLevelCount( qualityLevelCount_ ) + , stdExtensionVersion( stdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + VideoEncodeH264CapabilitiesEXT( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264CapabilitiesEXT( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + operator=( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264CapabilitiesEXT & operator=( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setInputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ ) VULKAN_HPP_NOEXCEPT + { + inputModeFlags = inputModeFlags_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setOutputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ ) VULKAN_HPP_NOEXCEPT + { + outputModeFlags = outputModeFlags_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setMinPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & minPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT + { + minPictureSizeInMbs = minPictureSizeInMbs_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT + { + maxPictureSizeInMbs = maxPictureSizeInMbs_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & + setInputImageDataAlignment( VULKAN_HPP_NAMESPACE::Extent2D const & inputImageDataAlignment_ ) VULKAN_HPP_NOEXCEPT + { + inputImageDataAlignment = inputImageDataAlignment_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setMaxNumL0ReferenceForP( uint8_t maxNumL0ReferenceForP_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL0ReferenceForP = maxNumL0ReferenceForP_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setMaxNumL0ReferenceForB( uint8_t maxNumL0ReferenceForB_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL0ReferenceForB = maxNumL0ReferenceForB_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setMaxNumL1Reference( uint8_t maxNumL1Reference_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL1Reference = maxNumL1Reference_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setQualityLevelCount( uint8_t qualityLevelCount_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevelCount = qualityLevelCount_; + return *this; + } + + VideoEncodeH264CapabilitiesEXT & setStdExtensionVersion( + VULKAN_HPP_NAMESPACE::ExtensionProperties const & stdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + stdExtensionVersion = stdExtensionVersion_; + return *this; + } + + operator VkVideoEncodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264CapabilitiesEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( inputModeFlags == rhs.inputModeFlags ) && ( outputModeFlags == rhs.outputModeFlags ) && + ( minPictureSizeInMbs == rhs.minPictureSizeInMbs ) && ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs ) && + ( inputImageDataAlignment == rhs.inputImageDataAlignment ) && + ( maxNumL0ReferenceForP == rhs.maxNumL0ReferenceForP ) && + ( maxNumL0ReferenceForB == rhs.maxNumL0ReferenceForB ) && ( maxNumL1Reference == rhs.maxNumL1Reference ) && + ( qualityLevelCount == rhs.qualityLevelCount ) && ( stdExtensionVersion == rhs.stdExtensionVersion ); + } + + bool operator!=( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags = {}; + VULKAN_HPP_NAMESPACE::Extent2D minPictureSizeInMbs = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs = {}; + VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment = {}; + uint8_t maxNumL0ReferenceForP = {}; + uint8_t maxNumL0ReferenceForB = {}; + uint8_t maxNumL1Reference = {}; + uint8_t qualityLevelCount = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {}; + }; + static_assert( sizeof( VideoEncodeH264CapabilitiesEXT ) == sizeof( VkVideoEncodeH264CapabilitiesEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264CapabilitiesEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264DpbSlotInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264DpbSlotInfoEXT( int8_t slotIndex_ = {}, + const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : slotIndex( slotIndex_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264DpbSlotInfoEXT( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264DpbSlotInfoEXT( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & + operator=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264DpbSlotInfoEXT & operator=( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + { + slotIndex = slotIndex_; + return *this; + } + + VideoEncodeH264DpbSlotInfoEXT & + setPStdPictureInfo( const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + operator VkVideoEncodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264DpbSlotInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && + ( pStdPictureInfo == rhs.pStdPictureInfo ); + } + + bool operator!=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoEXT; + const void * pNext = {}; + int8_t slotIndex = {}; + const StdVideoEncodeH264PictureInfo * pStdPictureInfo = {}; + }; + static_assert( sizeof( VideoEncodeH264DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH264DpbSlotInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264DpbSlotInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264EmitPictureParametersEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264EmitPictureParametersEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264EmitPictureParametersEXT( uint8_t spsId_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, + uint32_t ppsIdEntryCount_ = {}, + const uint8_t * ppsIdEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : spsId( spsId_ ) + , emitSpsEnable( emitSpsEnable_ ) + , ppsIdEntryCount( ppsIdEntryCount_ ) + , ppsIdEntries( ppsIdEntries_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersEXT( VideoEncodeH264EmitPictureParametersEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264EmitPictureParametersEXT( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264EmitPictureParametersEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264EmitPictureParametersEXT( + uint8_t spsId_, + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_ ) + : spsId( spsId_ ) + , emitSpsEnable( emitSpsEnable_ ) + , ppsIdEntryCount( static_cast( psIdEntries_.size() ) ) + , ppsIdEntries( psIdEntries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & + operator=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264EmitPictureParametersEXT & + operator=( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264EmitPictureParametersEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264EmitPictureParametersEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT + { + spsId = spsId_; + return *this; + } + + VideoEncodeH264EmitPictureParametersEXT & + setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT + { + emitSpsEnable = emitSpsEnable_; + return *this; + } + + VideoEncodeH264EmitPictureParametersEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntryCount = ppsIdEntryCount_; + return *this; + } + + VideoEncodeH264EmitPictureParametersEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntries = ppsIdEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264EmitPictureParametersEXT & setPsIdEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntryCount = static_cast( psIdEntries_.size() ); + ppsIdEntries = psIdEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoEncodeH264EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264EmitPictureParametersEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsId == rhs.spsId ) && + ( emitSpsEnable == rhs.emitSpsEnable ) && ( ppsIdEntryCount == rhs.ppsIdEntryCount ) && + ( ppsIdEntries == rhs.ppsIdEntries ); + } + + bool operator!=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264EmitPictureParametersEXT; + const void * pNext = {}; + uint8_t spsId = {}; + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {}; + uint32_t ppsIdEntryCount = {}; + const uint8_t * ppsIdEntries = {}; + }; + static_assert( sizeof( VideoEncodeH264EmitPictureParametersEXT ) == + sizeof( VkVideoEncodeH264EmitPictureParametersEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264EmitPictureParametersEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264NaluSliceEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceEXT( + const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ = {}, + uint32_t mbCount_ = {}, + uint8_t refFinalList0EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries_ = {}, + uint8_t refFinalList1EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries_ = {}, + uint32_t precedingNaluBytes_ = {}, + uint8_t minQp_ = {}, + uint8_t maxQp_ = {} ) VULKAN_HPP_NOEXCEPT + : pSliceHeaderStd( pSliceHeaderStd_ ) + , mbCount( mbCount_ ) + , refFinalList0EntryCount( refFinalList0EntryCount_ ) + , pRefFinalList0Entries( pRefFinalList0Entries_ ) + , refFinalList1EntryCount( refFinalList1EntryCount_ ) + , pRefFinalList1Entries( pRefFinalList1Entries_ ) + , precedingNaluBytes( precedingNaluBytes_ ) + , minQp( minQp_ ) + , maxQp( maxQp_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264NaluSliceEXT( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264NaluSliceEXT( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264NaluSliceEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264NaluSliceEXT( + const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_, + uint32_t mbCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList0Entries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList1Entries_ = {}, + uint32_t precedingNaluBytes_ = {}, + uint8_t minQp_ = {}, + uint8_t maxQp_ = {} ) + : pSliceHeaderStd( pSliceHeaderStd_ ) + , mbCount( mbCount_ ) + , refFinalList0EntryCount( static_cast( refFinalList0Entries_.size() ) ) + , pRefFinalList0Entries( refFinalList0Entries_.data() ) + , refFinalList1EntryCount( static_cast( refFinalList1Entries_.size() ) ) + , pRefFinalList1Entries( refFinalList1Entries_.data() ) + , precedingNaluBytes( precedingNaluBytes_ ) + , minQp( minQp_ ) + , maxQp( maxQp_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & + operator=( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264NaluSliceEXT & operator=( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264NaluSliceEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & + setPSliceHeaderStd( const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ ) VULKAN_HPP_NOEXCEPT + { + pSliceHeaderStd = pSliceHeaderStd_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setMbCount( uint32_t mbCount_ ) VULKAN_HPP_NOEXCEPT + { + mbCount = mbCount_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setRefFinalList0EntryCount( uint8_t refFinalList0EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList0EntryCount = refFinalList0EntryCount_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setPRefFinalList0Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefFinalList0Entries = pRefFinalList0Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264NaluSliceEXT & setRefFinalList0Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList0EntryCount = static_cast( refFinalList0Entries_.size() ); + pRefFinalList0Entries = refFinalList0Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264NaluSliceEXT & setRefFinalList1EntryCount( uint8_t refFinalList1EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList1EntryCount = refFinalList1EntryCount_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setPRefFinalList1Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefFinalList1Entries = pRefFinalList1Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264NaluSliceEXT & setRefFinalList1Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList1EntryCount = static_cast( refFinalList1Entries_.size() ); + pRefFinalList1Entries = refFinalList1Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264NaluSliceEXT & setPrecedingNaluBytes( uint32_t precedingNaluBytes_ ) VULKAN_HPP_NOEXCEPT + { + precedingNaluBytes = precedingNaluBytes_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setMinQp( uint8_t minQp_ ) VULKAN_HPP_NOEXCEPT + { + minQp = minQp_; + return *this; + } + + VideoEncodeH264NaluSliceEXT & setMaxQp( uint8_t maxQp_ ) VULKAN_HPP_NOEXCEPT + { + maxQp = maxQp_; + return *this; + } + + operator VkVideoEncodeH264NaluSliceEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264NaluSliceEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264NaluSliceEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSliceHeaderStd == rhs.pSliceHeaderStd ) && + ( mbCount == rhs.mbCount ) && ( refFinalList0EntryCount == rhs.refFinalList0EntryCount ) && + ( pRefFinalList0Entries == rhs.pRefFinalList0Entries ) && + ( refFinalList1EntryCount == rhs.refFinalList1EntryCount ) && + ( pRefFinalList1Entries == rhs.pRefFinalList1Entries ) && + ( precedingNaluBytes == rhs.precedingNaluBytes ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp ); + } + + bool operator!=( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceEXT; + const void * pNext = {}; + const StdVideoEncodeH264SliceHeader * pSliceHeaderStd = {}; + uint32_t mbCount = {}; + uint8_t refFinalList0EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries = {}; + uint8_t refFinalList1EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries = {}; + uint32_t precedingNaluBytes = {}; + uint8_t minQp = {}; + uint8_t maxQp = {}; + }; + static_assert( sizeof( VideoEncodeH264NaluSliceEXT ) == sizeof( VkVideoEncodeH264NaluSliceEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264NaluSliceEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264ProfileEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileEXT( StdVideoH264ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT + : stdProfileIdc( stdProfileIdc_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264ProfileEXT( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264ProfileEXT( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264ProfileEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT & + operator=( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264ProfileEXT & operator=( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264ProfileEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } + + operator VkVideoEncodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264ProfileEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileEXT; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + }; + static_assert( sizeof( VideoEncodeH264ProfileEXT ) == sizeof( VkVideoEncodeH264ProfileEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264ProfileEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264SessionCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264SessionCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT( + VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , maxPictureSizeInMbs( maxPictureSizeInMbs_ ) + , pStdExtensionVersion( pStdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT( VideoEncodeH264SessionCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionCreateInfoEXT( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & + operator=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionCreateInfoEXT & + operator=( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264SessionCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoEncodeH264SessionCreateInfoEXT & + setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT + { + maxPictureSizeInMbs = maxPictureSizeInMbs_; + return *this; + } + + VideoEncodeH264SessionCreateInfoEXT & setPStdExtensionVersion( + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdExtensionVersion = pStdExtensionVersion_; + return *this; + } + + operator VkVideoEncodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs ) && ( pStdExtensionVersion == rhs.pStdExtensionVersion ); + } + + bool operator!=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {}; + }; + static_assert( sizeof( VideoEncodeH264SessionCreateInfoEXT ) == sizeof( VkVideoEncodeH264SessionCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264SessionParametersAddInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264SessionParametersAddInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( + uint32_t spsStdCount_ = {}, + const StdVideoH264SequenceParameterSet * pSpsStd_ = {}, + uint32_t ppsStdCount_ = {}, + const StdVideoH264PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT + : spsStdCount( spsStdCount_ ) + , pSpsStd( pSpsStd_ ) + , ppsStdCount( ppsStdCount_ ) + , pPpsStd( pPpsStd_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( + VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersAddInfoEXT( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersAddInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {} ) + : spsStdCount( static_cast( spsStd_.size() ) ) + , pSpsStd( spsStd_.data() ) + , ppsStdCount( static_cast( ppsStd_.size() ) ) + , pPpsStd( ppsStd_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & + operator=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersAddInfoEXT & + operator=( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + spsStdCount = spsStdCount_; + return *this; + } + + VideoEncodeH264SessionParametersAddInfoEXT & + setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pSpsStd = pSpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoEXT & + setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) + VULKAN_HPP_NOEXCEPT + { + spsStdCount = static_cast( spsStd_.size() ); + pSpsStd = spsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsStdCount = ppsStdCount_; + return *this; + } + + VideoEncodeH264SessionParametersAddInfoEXT & + setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pPpsStd = pPpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoEXT & + setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) + VULKAN_HPP_NOEXCEPT + { + ppsStdCount = static_cast( ppsStd_.size() ); + pPpsStd = ppsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkVideoEncodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersAddInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && + ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); + } + + bool operator!=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT; + const void * pNext = {}; + uint32_t spsStdCount = {}; + const StdVideoH264SequenceParameterSet * pSpsStd = {}; + uint32_t ppsStdCount = {}; + const StdVideoH264PictureParameterSet * pPpsStd = {}; + }; + static_assert( sizeof( VideoEncodeH264SessionParametersAddInfoEXT ) == + sizeof( VkVideoEncodeH264SessionParametersAddInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersAddInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264SessionParametersCreateInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT( + uint32_t maxSpsStdCount_ = {}, + uint32_t maxPpsStdCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {} ) + VULKAN_HPP_NOEXCEPT + : maxSpsStdCount( maxSpsStdCount_ ) + , maxPpsStdCount( maxPpsStdCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT( + VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersCreateInfoEXT( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & + operator=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersCreateInfoEXT & + operator=( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSpsStdCount = maxSpsStdCount_; + return *this; + } + + VideoEncodeH264SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPpsStdCount = maxPpsStdCount_; + return *this; + } + + VideoEncodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } + + operator VkVideoEncodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && + ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); + } + + bool operator!=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT; + const void * pNext = {}; + uint32_t maxSpsStdCount = {}; + uint32_t maxPpsStdCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {}; + }; + static_assert( sizeof( VideoEncodeH264SessionParametersCreateInfoEXT ) == + sizeof( VkVideoEncodeH264SessionParametersCreateInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264VclFrameInfoEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264VclFrameInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT( + uint8_t refDefaultFinalList0EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries_ = {}, + uint8_t refDefaultFinalList1EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries_ = {}, + uint32_t naluSliceEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : refDefaultFinalList0EntryCount( refDefaultFinalList0EntryCount_ ) + , pRefDefaultFinalList0Entries( pRefDefaultFinalList0Entries_ ) + , refDefaultFinalList1EntryCount( refDefaultFinalList1EntryCount_ ) + , pRefDefaultFinalList1Entries( pRefDefaultFinalList1Entries_ ) + , naluSliceEntryCount( naluSliceEntryCount_ ) + , pNaluSliceEntries( pNaluSliceEntries_ ) + , pCurrentPictureInfo( pCurrentPictureInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264VclFrameInfoEXT( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264VclFrameInfoEXT( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264VclFrameInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList0Entries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList1Entries_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + naluSliceEntries_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ = {} ) + : refDefaultFinalList0EntryCount( static_cast( refDefaultFinalList0Entries_.size() ) ) + , pRefDefaultFinalList0Entries( refDefaultFinalList0Entries_.data() ) + , refDefaultFinalList1EntryCount( static_cast( refDefaultFinalList1Entries_.size() ) ) + , pRefDefaultFinalList1Entries( refDefaultFinalList1Entries_.data() ) + , naluSliceEntryCount( static_cast( naluSliceEntries_.size() ) ) + , pNaluSliceEntries( naluSliceEntries_.data() ) + , pCurrentPictureInfo( pCurrentPictureInfo_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & + operator=( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264VclFrameInfoEXT & operator=( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeH264VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeH264VclFrameInfoEXT & + setRefDefaultFinalList0EntryCount( uint8_t refDefaultFinalList0EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList0EntryCount = refDefaultFinalList0EntryCount_; + return *this; + } + + VideoEncodeH264VclFrameInfoEXT & setPRefDefaultFinalList0Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefDefaultFinalList0Entries = pRefDefaultFinalList0Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT & setRefDefaultFinalList0Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList0EntryCount = static_cast( refDefaultFinalList0Entries_.size() ); + pRefDefaultFinalList0Entries = refDefaultFinalList0Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264VclFrameInfoEXT & + setRefDefaultFinalList1EntryCount( uint8_t refDefaultFinalList1EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList1EntryCount = refDefaultFinalList1EntryCount_; + return *this; + } + + VideoEncodeH264VclFrameInfoEXT & setPRefDefaultFinalList1Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefDefaultFinalList1Entries = pRefDefaultFinalList1Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT & setRefDefaultFinalList1Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList1EntryCount = static_cast( refDefaultFinalList1Entries_.size() ); + pRefDefaultFinalList1Entries = refDefaultFinalList1Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = naluSliceEntryCount_; + return *this; + } + + VideoEncodeH264VclFrameInfoEXT & setPNaluSliceEntries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + pNaluSliceEntries = pNaluSliceEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = static_cast( naluSliceEntries_.size() ); + pNaluSliceEntries = naluSliceEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264VclFrameInfoEXT & setPCurrentPictureInfo( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCurrentPictureInfo = pCurrentPictureInfo_; + return *this; + } + + operator VkVideoEncodeH264VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264VclFrameInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( refDefaultFinalList0EntryCount == rhs.refDefaultFinalList0EntryCount ) && + ( pRefDefaultFinalList0Entries == rhs.pRefDefaultFinalList0Entries ) && + ( refDefaultFinalList1EntryCount == rhs.refDefaultFinalList1EntryCount ) && + ( pRefDefaultFinalList1Entries == rhs.pRefDefaultFinalList1Entries ) && + ( naluSliceEntryCount == rhs.naluSliceEntryCount ) && ( pNaluSliceEntries == rhs.pNaluSliceEntries ) && + ( pCurrentPictureInfo == rhs.pCurrentPictureInfo ); + } + + bool operator!=( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264VclFrameInfoEXT; + const void * pNext = {}; + uint8_t refDefaultFinalList0EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries = {}; + uint8_t refDefaultFinalList1EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries = {}; + uint32_t naluSliceEntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo = {}; + }; + static_assert( sizeof( VideoEncodeH264VclFrameInfoEXT ) == sizeof( VkVideoEncodeH264VclFrameInfoEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264VclFrameInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeRateControlInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeRateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone, + uint32_t averageBitrate_ = {}, + uint16_t peakToAverageBitrateRatio_ = {}, + uint16_t frameRateNumerator_ = {}, + uint16_t frameRateDenominator_ = {}, + uint32_t virtualBufferSizeInMs_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , rateControlMode( rateControlMode_ ) + , averageBitrate( averageBitrate_ ) + , peakToAverageBitrateRatio( peakToAverageBitrateRatio_ ) + , frameRateNumerator( frameRateNumerator_ ) + , frameRateDenominator( frameRateDenominator_ ) + , virtualBufferSizeInMs( virtualBufferSizeInMs_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeRateControlInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & + operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoEncodeRateControlInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VideoEncodeRateControlInfoKHR & setRateControlMode( + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT + { + rateControlMode = rateControlMode_; + return *this; + } + + VideoEncodeRateControlInfoKHR & setAverageBitrate( uint32_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT + { + averageBitrate = averageBitrate_; + return *this; + } + + VideoEncodeRateControlInfoKHR & + setPeakToAverageBitrateRatio( uint16_t peakToAverageBitrateRatio_ ) VULKAN_HPP_NOEXCEPT + { + peakToAverageBitrateRatio = peakToAverageBitrateRatio_; + return *this; + } + + VideoEncodeRateControlInfoKHR & setFrameRateNumerator( uint16_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT + { + frameRateNumerator = frameRateNumerator_; + return *this; + } + + VideoEncodeRateControlInfoKHR & setFrameRateDenominator( uint16_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT + { + frameRateDenominator = frameRateDenominator_; + return *this; + } + + VideoEncodeRateControlInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT + { + virtualBufferSizeInMs = virtualBufferSizeInMs_; + return *this; + } + + operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default; +# else + bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rateControlMode == rhs.rateControlMode ) && ( averageBitrate == rhs.averageBitrate ) && + ( peakToAverageBitrateRatio == rhs.peakToAverageBitrateRatio ) && + ( frameRateNumerator == rhs.frameRateNumerator ) && ( frameRateDenominator == rhs.frameRateDenominator ) && + ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs ); + } + + bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone; + uint32_t averageBitrate = {}; + uint16_t peakToAverageBitrateRatio = {}; + uint16_t frameRateNumerator = {}; + uint16_t frameRateDenominator = {}; + uint32_t virtualBufferSizeInMs = {}; + }; + static_assert( sizeof( VideoEncodeRateControlInfoKHR ) == sizeof( VkVideoEncodeRateControlInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoEncodeRateControlInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoQueueFamilyProperties2KHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoQueueFamilyProperties2KHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoQueueFamilyProperties2KHR( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : videoCodecOperations( videoCodecOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoQueueFamilyProperties2KHR( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoQueueFamilyProperties2KHR( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoQueueFamilyProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR & + operator=( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoQueueFamilyProperties2KHR & operator=( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + VideoQueueFamilyProperties2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VideoQueueFamilyProperties2KHR & setVideoCodecOperations( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ ) VULKAN_HPP_NOEXCEPT + { + videoCodecOperations = videoCodecOperations_; + return *this; + } + + operator VkVideoQueueFamilyProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoQueueFamilyProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoQueueFamilyProperties2KHR const & ) const = default; +# else + bool operator==( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations ); + } + + bool operator!=( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoQueueFamilyProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {}; + }; + static_assert( sizeof( VideoQueueFamilyProperties2KHR ) == sizeof( VkVideoQueueFamilyProperties2KHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = VideoQueueFamilyProperties2KHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + struct WaylandSurfaceCreateInfoKHR + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, + struct wl_display * display_ = {}, + struct wl_surface * surface_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , display( display_ ) + , surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR + WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : WaylandSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & + operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WaylandSurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT { display = display_; return *this; } - WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface* surface_ ) VULKAN_HPP_NOEXCEPT + WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT { surface = surface_; return *this; } - - operator VkWaylandSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( WaylandSurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( display == rhs.display ) - && ( surface == rhs.surface ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( display == rhs.display ) && + ( surface == rhs.surface ); } - bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; - struct wl_display* display = {}; - struct wl_surface* surface = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; + struct wl_display * display = {}; + struct wl_surface * surface = {}; }; - static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -74186,19 +100065,22 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) struct Win32KeyedMutexAcquireReleaseInfoKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; - VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, - const uint64_t* pAcquireKeys_ = {}, - const uint32_t* pAcquireTimeouts_ = {}, - uint32_t releaseCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, - const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeouts_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT : acquireCount( acquireCount_ ) , pAcquireSyncs( pAcquireSyncs_ ) , pAcquireKeys( pAcquireKeys_ ) @@ -74208,24 +100090,79 @@ namespace VULKAN_HPP_NAMESPACE , pReleaseKeys( pReleaseKeys_ ) {} - Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) - offsetof( Win32KeyedMutexAcquireReleaseInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast( &rhs ) ) + {} - Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + releaseSyncs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {} ) + : acquireCount( static_cast( acquireSyncs_.size() ) ) + , pAcquireSyncs( acquireSyncs_.data() ) + , pAcquireKeys( acquireKeys_.data() ) + , pAcquireTimeouts( acquireTimeouts_.data() ) + , releaseCount( static_cast( releaseSyncs_.size() ) ) + , pReleaseSyncs( releaseSyncs_.data() ) + , pReleaseKeys( releaseKeys_.data() ) { - *this = *reinterpret_cast(&rhs); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() ); +# else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeouts_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeouts_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +# else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoKHR & + operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -74237,92 +100174,138 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT { pAcquireSyncs = pAcquireSyncs_; return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT { pAcquireKeys = pAcquireKeys_; return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT { pAcquireTimeouts = pAcquireTimeouts_; return *this; } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeouts_.size() ); + pAcquireTimeouts = acquireTimeouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT { releaseCount = releaseCount_; return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoKHR & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT { pReleaseSyncs = pReleaseSyncs_; return *this; } - Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT { pReleaseKeys = pReleaseKeys_; return *this; } - - operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const& ) const = default; -#else - bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( acquireCount == rhs.acquireCount ) - && ( pAcquireSyncs == rhs.pAcquireSyncs ) - && ( pAcquireKeys == rhs.pAcquireKeys ) - && ( pAcquireTimeouts == rhs.pAcquireTimeouts ) - && ( releaseCount == rhs.releaseCount ) - && ( pReleaseSyncs == rhs.pReleaseSyncs ) - && ( pReleaseKeys == rhs.pReleaseKeys ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && + ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) && + ( pAcquireTimeouts == rhs.pAcquireTimeouts ) && ( releaseCount == rhs.releaseCount ) && + ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys ); } - bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; - const void* pNext = {}; - uint32_t acquireCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {}; - const uint64_t* pAcquireKeys = {}; - const uint32_t* pAcquireTimeouts = {}; - uint32_t releaseCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {}; - const uint64_t* pReleaseKeys = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeouts = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; }; - static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -74331,19 +100314,22 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) struct Win32KeyedMutexAcquireReleaseInfoNV { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; - VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, - const uint64_t* pAcquireKeys_ = {}, - const uint32_t* pAcquireTimeoutMilliseconds_ = {}, - uint32_t releaseCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, - const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeoutMilliseconds_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT : acquireCount( acquireCount_ ) , pAcquireSyncs( pAcquireSyncs_ ) , pAcquireKeys( pAcquireKeys_ ) @@ -74353,24 +100339,79 @@ namespace VULKAN_HPP_NAMESPACE , pReleaseKeys( pReleaseKeys_ ) {} - Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) - offsetof( Win32KeyedMutexAcquireReleaseInfoNV, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast( &rhs ) ) + {} - Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + releaseSyncs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {} ) + : acquireCount( static_cast( acquireSyncs_.size() ) ) + , pAcquireSyncs( acquireSyncs_.data() ) + , pAcquireKeys( acquireKeys_.data() ) + , pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ) + , releaseCount( static_cast( releaseSyncs_.size() ) ) + , pReleaseSyncs( releaseSyncs_.data() ) + , pReleaseKeys( releaseKeys_.data() ) { - *this = *reinterpret_cast(&rhs); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() ); +# else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +# else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoNV & + operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -74382,92 +100423,141 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT { pAcquireSyncs = pAcquireSyncs_; return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT { pAcquireKeys = pAcquireKeys_; return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT { pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; return *this; } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ ) + VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeoutMilliseconds_.size() ); + pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT { releaseCount = releaseCount_; return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + Win32KeyedMutexAcquireReleaseInfoNV & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT { pReleaseSyncs = pReleaseSyncs_; return *this; } - Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT { pReleaseKeys = pReleaseKeys_; return *this; } - - operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const& ) const = default; -#else - bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( acquireCount == rhs.acquireCount ) - && ( pAcquireSyncs == rhs.pAcquireSyncs ) - && ( pAcquireKeys == rhs.pAcquireKeys ) - && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) - && ( releaseCount == rhs.releaseCount ) - && ( pReleaseSyncs == rhs.pReleaseSyncs ) - && ( pReleaseKeys == rhs.pReleaseKeys ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && + ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) && + ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) && + ( releaseCount == rhs.releaseCount ) && ( pReleaseSyncs == rhs.pReleaseSyncs ) && + ( pReleaseKeys == rhs.pReleaseKeys ); } - bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; - const void* pNext = {}; - uint32_t acquireCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {}; - const uint64_t* pAcquireKeys = {}; - const uint32_t* pAcquireTimeoutMilliseconds = {}; - uint32_t releaseCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {}; - const uint64_t* pReleaseKeys = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeoutMilliseconds = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; }; - static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -74476,38 +100566,39 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) struct Win32SurfaceCreateInfoKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR; - VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, - HINSTANCE hinstance_ = {}, +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, + HINSTANCE hinstance_ = {}, HWND hwnd_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , hinstance( hinstance_ ) , hwnd( hwnd_ ) {} - Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( Win32SurfaceCreateInfoKHR ) - offsetof( Win32SurfaceCreateInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR + Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : Win32SurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & + operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - Win32SurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -74531,48 +100622,42 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkWin32SurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( Win32SurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( hinstance == rhs.hinstance ) - && ( hwnd == rhs.hwnd ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( hinstance == rhs.hinstance ) && ( hwnd == rhs.hwnd ); } - bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; - HINSTANCE hinstance = {}; - HWND hwnd = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; + HINSTANCE hinstance = {}; + HWND hwnd = {}; }; - static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -74581,245 +100666,117 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct WriteDescriptorSet - { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet; - - VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, - uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, - const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT - : dstSet( dstSet_ ) - , dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) - , descriptorType( descriptorType_ ) - , pImageInfo( pImageInfo_ ) - , pBufferInfo( pBufferInfo_ ) - , pTexelBufferView( pTexelBufferView_ ) - {} - - WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( WriteDescriptorSet ) - offsetof( WriteDescriptorSet, pNext ) ); - return *this; - } - - WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT - { - dstSet = dstSet_; - return *this; - } - - WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT - { - dstBinding = dstBinding_; - return *this; - } - - WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT - { - dstArrayElement = dstArrayElement_; - return *this; - } - - WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT - { - descriptorCount = descriptorCount_; - return *this; - } - - WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT - { - descriptorType = descriptorType_; - return *this; - } - - WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT - { - pImageInfo = pImageInfo_; - return *this; - } - - WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT - { - pBufferInfo = pBufferInfo_; - return *this; - } - - WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT - { - pTexelBufferView = pTexelBufferView_; - return *this; - } - - - operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( WriteDescriptorSet const& ) const = default; -#else - bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dstSet == rhs.dstSet ) - && ( dstBinding == rhs.dstBinding ) - && ( dstArrayElement == rhs.dstArrayElement ) - && ( descriptorCount == rhs.descriptorCount ) - && ( descriptorType == rhs.descriptorType ) - && ( pImageInfo == rhs.pImageInfo ) - && ( pBufferInfo == rhs.pBufferInfo ) - && ( pTexelBufferView == rhs.pTexelBufferView ); - } - - bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; - uint32_t dstBinding = {}; - uint32_t dstArrayElement = {}; - uint32_t descriptorCount = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {}; - const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {}; - const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {}; - - }; - static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - template <> - struct CppType - { - using Type = WriteDescriptorSet; - }; - struct WriteDescriptorSetAccelerationStructureKHR { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWriteDescriptorSetAccelerationStructureKHR; - VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( uint32_t accelerationStructureCount_ = {}, - const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( + uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT : accelerationStructureCount( accelerationStructureCount_ ) , pAccelerationStructures( pAccelerationStructures_ ) {} - WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( + WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetAccelerationStructureKHR( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) + : accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & + operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureKHR & + operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - memcpy( &pNext, &rhs.pNext, sizeof( WriteDescriptorSetAccelerationStructureKHR ) - offsetof( WriteDescriptorSetAccelerationStructureKHR, pNext ) ); + *this = *reinterpret_cast( &rhs ); return *this; } - WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } - - WriteDescriptorSetAccelerationStructureKHR& operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast(&rhs); - return *this; - } - - WriteDescriptorSetAccelerationStructureKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT { accelerationStructureCount = accelerationStructureCount_; return *this; } - WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT { pAccelerationStructures = pAccelerationStructures_; return *this; } - - operator VkWriteDescriptorSetAccelerationStructureKHR const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default; #else - bool operator==( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( accelerationStructureCount == rhs.accelerationStructureCount ) - && ( pAccelerationStructures == rhs.pAccelerationStructures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); } - bool operator!=( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; - const void* pNext = {}; - uint32_t accelerationStructureCount = {}; - const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {}; }; - static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) == + sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -74827,35 +100784,164 @@ namespace VULKAN_HPP_NAMESPACE using Type = WriteDescriptorSetAccelerationStructureKHR; }; + struct WriteDescriptorSetAccelerationStructureNV + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWriteDescriptorSetAccelerationStructureNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( + uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructureCount( accelerationStructureCount_ ) + , pAccelerationStructures( pAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( + WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetAccelerationStructureNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) + : accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & + operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureNV & + operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSetAccelerationStructureNV & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureNV const & ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); + } + + bool operator!=( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {}; + }; + static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == + sizeof( VkWriteDescriptorSetAccelerationStructureNV ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSetAccelerationStructureNV; + }; + struct WriteDescriptorSetInlineUniformBlockEXT { - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWriteDescriptorSetInlineUniformBlockEXT; - VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = {}, - const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT : dataSize( dataSize_ ) , pData( pData_ ) {} - WriteDescriptorSetInlineUniformBlockEXT & operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( WriteDescriptorSetInlineUniformBlockEXT ) - offsetof( WriteDescriptorSetInlineUniformBlockEXT, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : WriteDescriptorSetInlineUniformBlockEXT( + *reinterpret_cast( &rhs ) ) + {} - WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + WriteDescriptorSetInlineUniformBlockEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) + : dataSize( static_cast( data_.size() * sizeof( T ) ) ), pData( data_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlockEXT & + operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetInlineUniformBlockEXT & + operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -74867,52 +100953,58 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - WriteDescriptorSetInlineUniformBlockEXT & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSetInlineUniformBlockEXT & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } - - operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + WriteDescriptorSetInlineUniformBlockEXT & + setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dataSize = static_cast( data_.size() * sizeof( T ) ); + pData = data_.data(); + return *this; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + operator VkWriteDescriptorSetInlineUniformBlockEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } operator VkWriteDescriptorSetInlineUniformBlockEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const& ) const = default; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const & ) const = default; #else - bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dataSize == rhs.dataSize ) - && ( pData == rhs.pData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); } - bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif - - public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; - const void* pNext = {}; - uint32_t dataSize = {}; - const void* pData = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; + const void * pNext = {}; + uint32_t dataSize = {}; + const void * pData = {}; }; - static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == + sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), + "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); template <> struct CppType @@ -74920,38 +101012,38 @@ namespace VULKAN_HPP_NAMESPACE using Type = WriteDescriptorSetInlineUniformBlockEXT; }; -#ifdef VK_USE_PLATFORM_XCB_KHR +#if defined( VK_USE_PLATFORM_XCB_KHR ) struct XcbSurfaceCreateInfoKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR; - VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, - xcb_connection_t* connection_ = {}, +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, + xcb_connection_t * connection_ = {}, xcb_window_t window_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , connection( connection_ ) , window( window_ ) {} - XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( XcbSurfaceCreateInfoKHR ) - offsetof( XcbSurfaceCreateInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : XcbSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & + operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - XcbSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -74963,7 +101055,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t* connection_ ) VULKAN_HPP_NOEXCEPT + XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT { connection = connection_; return *this; @@ -74975,47 +101067,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkXcbSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( XcbSurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XcbSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( connection == rhs.connection ) - && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( connection == rhs.connection ) && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 ); } - bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; - xcb_connection_t* connection = {}; - xcb_window_t window = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; + xcb_connection_t * connection = {}; + xcb_window_t window = {}; }; - static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -75025,38 +101110,38 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR +#if defined( VK_USE_PLATFORM_XLIB_KHR ) struct XlibSurfaceCreateInfoKHR { - static const bool allowDuplicate = false; + static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR; +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, - Display* dpy_ = {}, + Display * dpy_ = {}, Window window_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) , dpy( dpy_ ) , window( window_ ) {} - XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - memcpy( &pNext, &rhs.pNext, sizeof( XlibSurfaceCreateInfoKHR ) - offsetof( XlibSurfaceCreateInfoKHR, pNext ) ); - return *this; - } + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = rhs; - } + : XlibSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & + operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast(&rhs); + *this = *reinterpret_cast( &rhs ); return *this; } - XlibSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -75068,7 +101153,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - XlibSurfaceCreateInfoKHR & setDpy( Display* dpy_ ) VULKAN_HPP_NOEXCEPT + XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT { dpy = dpy_; return *this; @@ -75080,47 +101165,40 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - - operator VkXlibSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - -#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) - auto operator<=>( XlibSurfaceCreateInfoKHR const& ) const = default; -#else - bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XlibSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( dpy == rhs.dpy ) - && ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dpy == rhs.dpy ) && + ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 ); } - bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - +# endif public: - const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; - const void* pNext = {}; - VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; - Display* dpy = {}; - Window window = {}; - + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; + Display * dpy = {}; + Window window = {}; }; - static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); template <> @@ -75130,10121 +101208,22589 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ - template - VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d) VULKAN_HPP_NOEXCEPT + class DebugReportCallbackEXT { - return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pInstance ) ) ); - } + public: + using CType = VkDebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + public: + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() = default; + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( debugReportCallbackEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DebugReportCallbackEXT & operator=( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = debugReportCallbackEXT; + return *this; + } +#endif + + DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugReportCallbackEXT const & ) const = default; +#else + bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; + } + + bool operator!=( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; + } + + bool operator<( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == VK_NULL_HANDLE; + } + + private: + VkDebugReportCallbackEXT m_debugReportCallbackEXT = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() = default; + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DebugUtilsMessengerEXT & operator=( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + return *this; + } +#endif + + DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerEXT const & ) const = default; +#else + bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; + } + + bool operator!=( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; + } + + bool operator<( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; + } + + private: + VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Instance; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDebugReportCallbackEXT = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDebugUtilsMessengerEXT = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSurfaceKHR = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Instance + { + public: + using CType = VkInstance; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eInstance; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + + public: + VULKAN_HPP_CONSTEXPR Instance() = default; + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT : m_instance( instance ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Instance & operator=( VkInstance instance ) VULKAN_HPP_NOEXCEPT + { + m_instance = instance; + return *this; + } +#endif + + Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_instance = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Instance const & ) const = default; +#else + bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance == rhs.m_instance; + } + + bool operator!=( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance != rhs.m_instance; + } + + bool operator<( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance < rhs.m_instance; + } +#endif + + //=== VK_VERSION_1_0 === + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + PFN_vkVoidFunction + getProcAddr( const char * pName, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction + getProcAddr( const std::string & name, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceGroupPropertiesAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_surface === + + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_display === + + template + VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( + const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + template + VULKAN_HPP_NODISCARD Result + createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + template + VULKAN_HPP_NODISCARD Result + createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + template + VULKAN_HPP_NODISCARD Result + createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + template + VULKAN_HPP_NODISCARD Result + createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + template + VULKAN_HPP_NODISCARD Result + createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + template + VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT( + const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDebugReportCallbackEXT( + const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugReportCallbackEXT( + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + template + VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + template + VULKAN_HPP_NODISCARD Result + createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceGroupPropertiesAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + template + VULKAN_HPP_NODISCARD Result + createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + template + VULKAN_HPP_NODISCARD Result + createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + template + VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDebugUtilsMessengerEXT( + const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugUtilsMessengerEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + template + VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + template + VULKAN_HPP_NODISCARD Result + createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_headless_surface === + + template + VULKAN_HPP_NODISCARD Result + createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + template + VULKAN_HPP_NODISCARD Result + createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + template + VULKAN_HPP_NODISCARD Result + createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_instance != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_instance == VK_NULL_HANDLE; + } + + private: + VkInstance m_instance = {}; + }; + static_assert( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), + "handle and wrapper have different size!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + //=== VK_VERSION_1_0 === + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueInstance = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + template + VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createInstance( const InstanceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createInstanceUnique( const InstanceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( + const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = ExtensionPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = LayerPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceVersion( + uint32_t * pApiVersion, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=========================== + //=== COMMAND Definitions === + //=========================== + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pInstance ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + createInstance( const InstanceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) { VULKAN_HPP_NAMESPACE::Instance instance; - Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); - return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstance" ); + Result result = static_cast( + d.vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createInstanceUnique( const InstanceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) { VULKAN_HPP_NAMESPACE::Instance instance; - Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); - - ObjectDestroy deleter( allocator, d ); - return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstanceUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const &d ) - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d ) - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d) VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type enumerateInstanceVersion(Dispatch const &d ) - { - uint32_t apiVersion; - Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); - return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceVersion" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); -#else - if ( counterBuffers.size() != counterBufferOffsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSetCount, reinterpret_cast( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSets.size() , reinterpret_cast( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ), reinterpret_cast( pSizes ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); -#else - if ( buffers.size() != offsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == sizes.size() ); -#else - if ( buffers.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( offsets.size() == sizes.size() ); -#else - if ( offsets.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: offsets.size() != sizes.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ), reinterpret_cast( sizes.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); -#else - if ( buffers.size() != offsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ), reinterpret_cast( pSizes ), reinterpret_cast( pStrides ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, ArrayProxy strides, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); -#else - if ( buffers.size() != offsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == sizes.size() ); -#else - if ( buffers.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == strides.size() ); -#else - if ( buffers.size() != strides.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( offsets.size() == sizes.size() ); -#else - if ( offsets.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: offsets.size() != sizes.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( offsets.size() == strides.size() ); -#else - if ( offsets.size() != strides.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: offsets.size() != strides.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( sizes.size() == strides.size() ); -#else - if ( sizes.size() != strides.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: sizes.size() != strides.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ), reinterpret_cast( sizes.data() ), reinterpret_cast( strides.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ), static_cast( filter ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ), static_cast( filter ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( &info ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppOffsetInfos ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() ); -#else - if ( infos.size() != pOffsetInfos.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infos.size() , reinterpret_cast( infos.data() ), reinterpret_cast( pOffsetInfos.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast( pAttachments ), rectCount, reinterpret_cast( pRects ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast( attachments.data() ), rects.size() , reinterpret_cast( rects.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pColor ), rangeCount, reinterpret_cast( pRanges ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &color ), ranges.size() , reinterpret_cast( ranges.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pDepthStencil ), rangeCount, reinterpret_cast( pRanges ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &depthStencil ), ranges.size() , reinterpret_cast( ranges.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( stride ), static_cast( flags ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( stride ), static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), static_cast( counterBufferOffset ), counterOffset, vertexStride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), static_cast( counterBufferOffset ), counterOffset, vertexStride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndRenderPass( m_commandBuffer ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndRenderPass( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); -#else - if ( counterBuffers.size() != counterBufferOffsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( pGeneratedCommandsInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, size, pValues ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast( values.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrderCount, reinterpret_cast( pCustomSampleOrders ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrders.size() , reinterpret_cast( customSampleOrders.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast( discardRectangles.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast( exclusiveScissors.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceMarkerINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceOverrideINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceStreamMarkerINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast( scissors.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( ArrayProxy scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size() , reinterpret_cast( scissors.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast( faceMask ), static_cast( failOp ), static_cast( passOp ), static_cast( depthFailOp ), static_cast( compareOp ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast( faceMask ), static_cast( failOp ), static_cast( passOp ), static_cast( depthFailOp ), static_cast( compareOp ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast( viewports.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast( shadingRatePalettes.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast( viewportWScalings.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( ArrayProxy viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size() , reinterpret_cast( viewports.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), width, height, depth ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), width, height, depth ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( dataSize ), pData ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast( pEvents ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast( events.data() ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end(Dispatch const &d ) const - { - Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d ) const - { - Result result = static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const - { - Result result = static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireFullScreenExclusiveModeEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - - template - VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const - { - uint32_t imageIndex; - Result result = static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); - return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImage2KHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), pImageIndex ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const - { - uint32_t imageIndex; - Result result = static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); - return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( pAcquireInfo ), reinterpret_cast( pConfiguration ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; - Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( &acquireInfo ), reinterpret_cast( &configuration ) ) ); - return createResultValue( result, configuration, VULKAN_HPP_NAMESPACE_STRING"::Device::acquirePerformanceConfigurationINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d ) const - { - Result result = static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireProfilingLockKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const - { - std::vector commandBuffers( allocateInfo.commandBufferCount ); - Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); - return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector commandBuffers( allocateInfo.commandBufferCount, vectorAllocator ); - Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); - return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const - { - std::vector, Allocator> uniqueCommandBuffers; - std::vector commandBuffers( allocateInfo.commandBufferCount ); - Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(commandBuffers.data()) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); - PoolFree deleter( *this, allocateInfo.commandPool, d ); - for ( size_t i=0 ; i( commandBuffers[i], deleter ) ); - } - } - - return createResultValue( result, uniqueCommandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector, Allocator> uniqueCommandBuffers( vectorAllocator ); - std::vector commandBuffers( allocateInfo.commandBufferCount ); - Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(commandBuffers.data()) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); - PoolFree deleter( *this, allocateInfo.commandPool, d ); - for ( size_t i=0 ; i( commandBuffers[i], deleter ) ); - } - } - - return createResultValue( result, uniqueCommandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const - { - std::vector descriptorSets( allocateInfo.descriptorSetCount ); - Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector descriptorSets( allocateInfo.descriptorSetCount, vectorAllocator ); - Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const - { - std::vector, Allocator> uniqueDescriptorSets; - std::vector descriptorSets( allocateInfo.descriptorSetCount ); - Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(descriptorSets.data()) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); - for ( size_t i=0 ; i( descriptorSets[i], deleter ) ); - } - } - - return createResultValue( result, uniqueDescriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector, Allocator> uniqueDescriptorSets( vectorAllocator ); - std::vector descriptorSets( allocateInfo.descriptorSetCount ); - Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast(descriptorSets.data()) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); - for ( size_t i=0 ; i( descriptorSets[i], deleter ) ); - } - } - - return createResultValue( result, uniqueDescriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMemory ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DeviceMemory memory; - Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); - return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemory" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DeviceMemory memory; - Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); - - ObjectFree deleter( *this, allocator, d ); - return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemoryUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryKHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryKHR( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindAccelerationStructureMemoryKHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoKHR* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkBuildAccelerationStructureKHR( m_device, infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppOffsetInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::buildAccelerationStructureKHR( ArrayProxy infos, ArrayProxy pOffsetInfos, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() ); -#else - if ( infos.size() != pOffsetInfos.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - Result result = static_cast( d.vkBuildAccelerationStructureKHR( m_device, infos.size() , reinterpret_cast( infos.data() ), reinterpret_cast( pOffsetInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::buildAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const - { - Result result = static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast( pInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const - { - Result result = static_cast( d.vkCopyAccelerationStructureKHR( m_device, reinterpret_cast( &info ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast( pInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const - { - Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( m_device, reinterpret_cast( &info ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyAccelerationStructureToMemoryKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast( pInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const - { - Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( m_device, reinterpret_cast( &info ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::copyMemoryToAccelerationStructureKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; - Result result = static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; - Result result = static_cast( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; - Result result = static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNV" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; - Result result = static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNVUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pBuffer ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Buffer buffer; - Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); - return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBuffer" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Buffer buffer; - Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::BufferView view; - Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferView" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::BufferView view; - Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferViewUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCommandPool ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::CommandPool commandPool; - Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); - return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPool" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::CommandPool commandPool; - Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPoolUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector, Allocator> uniquePipelines; - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( pipelines[i], deleter ) ); - } - } - - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector, Allocator> uniquePipelines( vectorAllocator ); - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( pipelines[i], deleter ) ); - } - } - - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( pAllocator ), reinterpret_cast( pDeferredOperation ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDeferredOperationKHR( Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; - Result result = static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); - return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING"::Device::createDeferredOperationKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; - Result result = static_cast( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING"::Device::createDeferredOperationKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorPool ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; - Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); - return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPool" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; - Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPoolUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSetLayout ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; - Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); - return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayout" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; - Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayoutUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplate" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateEvent( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pEvent ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createEvent( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Event event; - Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); - return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEvent" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Event event; - Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEventUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateFence( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createFence( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFence" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFenceUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFramebuffer ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; - Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); - return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebuffer" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; - Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebufferUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector, Allocator> uniquePipelines; - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( pipelines[i], deleter ) ); - } - } - - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector, Allocator> uniquePipelines( vectorAllocator ); - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( pipelines[i], deleter ) ); - } - } - - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateImage( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pImage ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createImage( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Image image; - Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); - return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImage" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Image image; - Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateImageView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ImageView view; - Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageView" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ImageView view; - Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageViewUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; - Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); - return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNV" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; - Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineCache ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; - Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); - return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; - Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineLayout ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; - Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); - return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; - Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT* pPrivateDataSlot, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPrivateDataSlot ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot; - Result result = static_cast( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); - return createResultValue( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING"::Device::createPrivateDataSlotEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot; - Result result = static_cast( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING"::Device::createPrivateDataSlotEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pQueryPool ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::QueryPool queryPool; - Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); - return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPool" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::QueryPool queryPool; - Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPoolUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineKHR", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector, Allocator> uniquePipelines; - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( pipelines[i], deleter ) ); - } - } - - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector, Allocator> uniquePipelines( vectorAllocator ); - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( pipelines[i], deleter ) ); - } - } - - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineKHRUnique", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT }, deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector, Allocator> uniquePipelines; - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( pipelines[i], deleter ) ); - } - } - - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue,Allocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector, Allocator> uniquePipelines( vectorAllocator ); - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(pipelines.data()) ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( pipelines[i], deleter ) ); - } - } - - return createResultValue( result, uniquePipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - } - template - VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT }, deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPassUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2Unique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateSampler( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSampler ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Sampler sampler; - Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); - return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSampler" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Sampler sampler; - Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversion" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSemaphore ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Semaphore semaphore; - Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); - return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphore" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Semaphore semaphore; - Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphoreUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pShaderModule ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; - Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); - return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModule" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; - Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModuleUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchains ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector swapchains( createInfos.size() ); - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); - return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector swapchains( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); - return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector, Allocator> uniqueSwapchainKHRs; - std::vector swapchainKHRs( createInfos.size() ); - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(swapchainKHRs.data()) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - uniqueSwapchainKHRs.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( swapchainKHRs[i], deleter ) ); - } - } - - return createResultValue( result, uniqueSwapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector, Allocator> uniqueSwapchainKHRs( vectorAllocator ); - std::vector swapchainKHRs( createInfos.size() ); - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast(swapchainKHRs.data()) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - uniqueSwapchainKHRs.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( swapchainKHRs[i], deleter ) ); - } - } - - return createResultValue( result, uniqueSwapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchain ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pValidationCache ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; - Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); - return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; - Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); - } -#else - template - VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const - { - Result result = static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::deferredOperationJoinKHR", { Result::eSuccess, Result::eThreadDoneKHR, Result::eThreadIdleKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { -#ifdef VK_ENABLE_BETA_EXTENSIONS - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); -#else - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { -#ifdef VK_ENABLE_BETA_EXTENSIONS - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); -#else - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - template - VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDevice( m_device, reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkDeviceWaitIdle( m_device ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle(Dispatch const &d ) const - { - Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const - { - Result result = static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const - { - Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const - { - Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy data, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getAccelerationStructureMemoryRequirementsKHR( const AccelerationStructureMemoryRequirementsInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetAccelerationStructureMemoryRequirementsKHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR& memoryRequirements = structureChain.template get(); - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; - Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get(); - Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( pInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; - d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() ); -#else - if ( timestampInfos.size() != timestamps.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - uint64_t maxDeviation; - Result result = static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size() , reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); - return createResultValue( result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING"::Device::getCalibratedTimestampsEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); - } -#else - template - VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); - } -#else - template - VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getDeferredOperationResultKHR", { Result::eSuccess, Result::eNotReady } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; - d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - return support; - } - template - VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport& support = structureChain.template get(); - d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; - d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - return support; - } - template - VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport& support = structureChain.template get(); - d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( version ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( &version ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureCompatibilityKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; - d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); - return peerMemoryFeatures; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; - d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); - return peerMemoryFeatures; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; - Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); - return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupPresentCapabilitiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pModes ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; - Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ) ); - return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModes2EXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - - template - VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( pModes ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; - Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); - return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; - d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); - return committedMemoryInBytes; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetDeviceProcAddr( m_device, pName ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetDeviceProcAddr( m_device, name.c_str() ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::Queue queue; - d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); - return queue; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetDeviceQueue2( m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::Queue queue; - d.vkGetDeviceQueue2( m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); - return queue; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); - } -#else - template - VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const - { - int fd; - Result result = static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceFdKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); - } -#else - template - VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const - { - HANDLE handle; - Result result = static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceWin32HandleKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - - template - VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; - Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( &properties ) ) ); - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageDrmFormatModifierPropertiesEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; - d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements( vectorAllocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements( vectorAllocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements( vectorAllocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( pSubresource ), reinterpret_cast( pLayout ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::SubresourceLayout layout; - d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( &subresource ), reinterpret_cast( &layout ) ); - return layout; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; - Result result = static_cast( d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( &properties ) ) ); - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageViewAddressNVX" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( pInfo ), pBuffer ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const - { - struct AHardwareBuffer* buffer; - Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); - return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryAndroidHardwareBufferANDROID" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - - template - VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const - { - int fd; - Result result = static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; - Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); - return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdPropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( pMemoryHostPointerProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; - Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( &memoryHostPointerProperties ) ) ); - return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryHostPointerPropertiesEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const - { - HANDLE handle; - Result result = static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), pHandle ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const - { - HANDLE handle; - Result result = static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( pMemoryWin32HandleProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; - Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( &memoryWin32HandleProperties ) ) ); - return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandlePropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - - template - VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), pPresentationTimingCount, reinterpret_cast( pPresentationTimings ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const - { - std::vector presentationTimings; - uint32_t presentationTimingCount; - Result result; - do - { - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentationTimingCount ) - { - presentationTimings.resize( presentationTimingCount ); - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); - presentationTimings.resize( presentationTimingCount ); - } - return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector presentationTimings( vectorAllocator ); - uint32_t presentationTimingCount; - Result result; - do - { - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentationTimingCount ) - { - presentationTimings.resize( presentationTimingCount ); - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); - presentationTimings.resize( presentationTimingCount ); - } - return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( pValue ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; - Result result = static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( &value ) ) ); - return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getPerformanceParameterINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d ) const - { - std::vector data; - size_t dataSize; - Result result; - do - { - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && dataSize ) - { - data.resize( dataSize ); - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - data.resize( dataSize ); - } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector data( vectorAllocator ); - size_t dataSize; - Result result; - do - { - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && dataSize ) - { - data.resize( dataSize ); - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - data.resize( dataSize ); - } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast( pInternalRepresentations ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const - { - std::vector internalRepresentations; - uint32_t internalRepresentationCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && internalRepresentationCount ) - { - internalRepresentations.resize( internalRepresentationCount ); - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, reinterpret_cast( internalRepresentations.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); - internalRepresentations.resize( internalRepresentationCount ); - } - return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector internalRepresentations( vectorAllocator ); - uint32_t internalRepresentationCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && internalRepresentationCount ) - { - internalRepresentations.resize( internalRepresentationCount ); - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, reinterpret_cast( internalRepresentations.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); - internalRepresentations.resize( internalRepresentationCount ); - } - return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( pPipelineInfo ), pExecutableCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d ) const - { - std::vector properties; - uint32_t executableCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && executableCount ) - { - properties.resize( executableCount ); - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( executableCount <= properties.size() ); - properties.resize( executableCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t executableCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && executableCount ) - { - properties.resize( executableCount ); - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( executableCount <= properties.size() ); - properties.resize( executableCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( pExecutableInfo ), pStatisticCount, reinterpret_cast( pStatistics ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const - { - std::vector statistics; - uint32_t statisticCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && statisticCount ) - { - statistics.resize( statisticCount ); - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, reinterpret_cast( statistics.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); - statistics.resize( statisticCount ); - } - return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector statistics( vectorAllocator ); - uint32_t statisticCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && statisticCount ) - { - statistics.resize( statisticCount ); - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, reinterpret_cast( statistics.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); - statistics.resize( statisticCount ); - } - return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), pData ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - uint64_t data; - d.vkGetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), &data ); - return data; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast( stride ), static_cast( flags ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - - template - VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; - Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); - return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getRefreshCycleDurationGOOGLE" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::Extent2D granularity; - d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); - return granularity; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), pValue ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const - { - uint64_t value; - Result result = static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); - return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreCounterValue" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), pValue ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const - { - uint64_t value; - Result result = static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); - return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreCounterValueKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const - { - int fd; - Result result = static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreFdKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const - { - HANDLE handle; - Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreWin32HandleKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - - template - VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), pInfoSize, pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d ) const - { - std::vector info; - size_t infoSize; - Result result; - do - { - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && infoSize ) - { - info.resize( infoSize ); - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( infoSize <= info.size() ); - info.resize( infoSize ); - } - return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector info( vectorAllocator ); - size_t infoSize; - Result result; - do - { - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && infoSize ) - { - info.resize( infoSize ); - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( infoSize <= info.size() ); - info.resize( infoSize ); - } - return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const - { - uint64_t counterValue; - Result result = static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); - return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainCounterEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const - { - std::vector swapchainImages; - uint32_t swapchainImageCount; - Result result; - do - { - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && swapchainImageCount ) - { - swapchainImages.resize( swapchainImageCount ); - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); - swapchainImages.resize( swapchainImageCount ); - } - return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector swapchainImages( vectorAllocator ); - uint32_t swapchainImageCount; - Result result; - do - { - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && swapchainImageCount ) - { - swapchainImages.resize( swapchainImageCount ); - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); - swapchainImages.resize( swapchainImageCount ); - } - return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); - } -#else - template - VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), pDataSize, pData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d ) const - { - std::vector data; - size_t dataSize; - Result result; - do - { - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && dataSize ) - { - data.resize( dataSize ); - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - data.resize( dataSize ); - } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector data( vectorAllocator ); - size_t dataSize; - Result result; - do - { - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && dataSize ) - { - data.resize( dataSize ); - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - data.resize( dataSize ); - } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - - template - VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - - template - VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( pInitializeInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::initializePerformanceApiINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const - { - Result result = static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkMapMemory( m_device, static_cast( memory ), static_cast( offset ), static_cast( size ), static_cast( flags ), ppData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const &d ) const - { - void* pData; - Result result = static_cast( d.vkMapMemory( m_device, static_cast( memory ), static_cast( offset ), static_cast( size ), static_cast( flags ), &pData ) ); - return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING"::Device::mapMemory" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d ) const - { - Result result = static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d ) const - { - Result result = static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( pDeviceEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Fence fence; - Result result = static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const - { - Result result = static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releaseFullScreenExclusiveModeEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const - { - Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releasePerformanceConfigurationINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkReleaseProfilingLockKHR( m_device ); - } -#else - template - VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkReleaseProfilingLockKHR( m_device ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d ) const - { - Result result = static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d ) const - { - Result result = static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const - { - Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetFences( ArrayProxy fences, Dispatch const &d ) const - { - Result result = static_cast( d.vkResetFences( m_device, fences.size() , reinterpret_cast( fences.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); - } -#else - template - VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); - } -#else - template - VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const - { - Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); -#else - if ( swapchains.size() != metadata.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast( swapchains.data() ), reinterpret_cast( metadata.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); - } -#else - template - VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkSetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), data ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d ) const - { - Result result = static_cast( d.vkSetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), data ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setPrivateDataEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphore" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphoreKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); - } -#else - template - VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); - } -#else - template - VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkUninitializePerformanceApiINTEL( m_device ); - } -#else - template - VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkUninitializePerformanceApiINTEL( m_device ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkUnmapMemory( m_device, static_cast( memory ) ); - } -#else - template - VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkUnmapMemory( m_device, static_cast( memory ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - } -#else - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - } -#else - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast( pDescriptorCopies ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast( descriptorCopies.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const - { - Result result = static_cast( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const - { - Result result = static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphores", { Result::eSuccess, Result::eTimeout } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const - { - Result result = static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphoresKHR", { Result::eSuccess, Result::eTimeout } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), dataSize, pData, stride ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::writeAccelerationStructuresPropertiesKHR( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy data, size_t stride, Dispatch const &d ) const - { - Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ), stride ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::writeAccelerationStructuresPropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - - template - VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCallback ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; - Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); - return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; - Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMessenger ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; - Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); - return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; - Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_IOS_MVK - template - VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVK" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVKUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - - -#ifdef VK_USE_PLATFORM_FUCHSIA - template - VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIA" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - - -#ifdef VK_USE_PLATFORM_MACOS_MVK - template - VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVK" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVKUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - -#ifdef VK_USE_PLATFORM_METAL_EXT - template - VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - -#ifdef VK_USE_PLATFORM_GGP - template - VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGP" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGPUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_GGP*/ - - -#ifdef VK_USE_PLATFORM_VI_NN - template - VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNN" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNNUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_VI_NN*/ - - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - -#ifdef VK_USE_PLATFORM_XCB_KHR - template - VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - - -#ifdef VK_USE_PLATFORM_XLIB_KHR - template - VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - - - template - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, pLayerPrefix, pMessage ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() ); -#else - if ( layerPrefix.size() != message.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkInstance::debugReportMessageEXT: layerPrefix.size() != message.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroyInstance( m_instance, reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + Result result = static_cast( + d.vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + ObjectDestroy deleter( allocator, d ); + return createResultValue( + result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique", deleter ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); - } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const + template + VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - std::vector physicalDeviceGroupProperties; - uint32_t physicalDeviceGroupCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector physicalDeviceGroupProperties( vectorAllocator ); - uint32_t physicalDeviceGroupCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); + d.vkDestroyInstance( m_instance, + reinterpret_cast( + static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const - { - std::vector physicalDeviceGroupProperties; - uint32_t physicalDeviceGroupCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - std::vector physicalDeviceGroupProperties( vectorAllocator ); - uint32_t physicalDeviceGroupCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); + return static_cast( d.vkEnumeratePhysicalDevices( + m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); - } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDevices( Dispatch const & d ) const { - std::vector physicalDevices; - uint32_t physicalDeviceCount; - Result result; + std::vector physicalDevices; + uint32_t physicalDeviceCount; + Result result; do { result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); if ( ( result == Result::eSuccess ) && physicalDeviceCount ) { physicalDevices.resize( physicalDeviceCount ); - result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + result = static_cast( d.vkEnumeratePhysicalDevices( + m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); } } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) + if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) ) { - VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); physicalDevices.resize( physicalDeviceCount ); } - return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); + return createResultValue( + result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const { - std::vector physicalDevices( vectorAllocator ); - uint32_t physicalDeviceCount; - Result result; + std::vector physicalDevices( physicalDeviceAllocator ); + uint32_t physicalDeviceCount; + Result result; do { result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); if ( ( result == Result::eSuccess ) && physicalDeviceCount ) { physicalDevices.resize( physicalDeviceCount ); - result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + result = static_cast( d.vkEnumeratePhysicalDevices( + m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); } } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) + if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) ) { - VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); physicalDevices.resize( physicalDeviceCount ); } - return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); + return createResultValue( + result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const &d) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFormatProperties( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; + d.vkGetPhysicalDeviceFormatProperties( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, + Dispatch const & d ) const + { + std::vector queueFamilyProperties( + queueFamilyPropertiesAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, + reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, + reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetInstanceProcAddr( m_instance, pName ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return d.vkGetInstanceProcAddr( m_instance, name.c_str() ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( pCallbackData ) ); + return d.vkGetDeviceProcAddr( m_device, pName ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( &callbackData ) ); + return d.vkGetDeviceProcAddr( m_device, name.c_str() ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + return static_cast( d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDevice ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const - { - Display dpy; - Result result = static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); - return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDevice ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Device device; - Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); - return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDevice" ); + Result result = static_cast( + d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_NAMESPACE::Device device; - Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); - - ObjectDestroy deleter( allocator, d ); - return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDeviceUnique", deleter ); + Result result = static_cast( + d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + ObjectDestroy deleter( allocator, d ); + return createResultValue( + result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter ); } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMode ) ) ); + d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void Device::destroy( Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; - Result result = static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); - return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDisplayModeKHR" ); + d.vkDestroyDevice( m_device, + reinterpret_cast( + static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + enumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkEnumerateInstanceExtensionProperties( + pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, Dispatch const & d ) { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); } } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + Result result; do { - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); } } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, + Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) const + { + std::vector properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d ) + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( + &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) + { + std::vector properties( layerPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( + &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; do { result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); } } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, + Dispatch const & d ) const { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; + std::vector properties( layerPropertiesAllocator ); + uint32_t propertyCount; + Result result; do { result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); if ( ( result == Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); } } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); properties.resize( propertyCount ); } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast( pCounters ), reinterpret_cast( pCounterDescriptions ) ) ); + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - std::vector counterDescriptions; - uint32_t counterCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast( counters.data() ), nullptr ) ); - if ( ( result == Result::eSuccess ) && counterCount ) - { - counterDescriptions.resize( counterCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast( counters.data() ), reinterpret_cast( counterDescriptions.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() ); - counterDescriptions.resize( counterCount ); - } - return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy counters, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector counterDescriptions( vectorAllocator ); - uint32_t counterCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast( counters.data() ), nullptr ) ); - if ( ( result == Result::eSuccess ) && counterCount ) - { - counterDescriptions.resize( counterCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast( counters.data() ), reinterpret_cast( counterDescriptions.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() ); - counterDescriptions.resize( counterCount ); - } - return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + return queue; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkQueueSubmit( + m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::submit( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); + Result result = static_cast( d.vkQueueSubmit( m_queue, + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( pDisplayPlaneInfo ), reinterpret_cast( pCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; - Result result = static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( &displayPlaneInfo ), reinterpret_cast( &capabilities ) ) ); - return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; - Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); - return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const - { - std::vector displays; - uint32_t displayCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && displayCount ) - { - displays.resize( displayCount ); - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( displayCount <= displays.size() ); - displays.resize( displayCount ); - } - return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector displays( vectorAllocator ); - uint32_t displayCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && displayCount ) - { - displays.resize( displayCount ); - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( displayCount <= displays.size() ); - displays.resize( displayCount ); - } - return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const - { - std::vector timeDomains; - uint32_t timeDomainCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && timeDomainCount ) - { - timeDomains.resize( timeDomainCount ); - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); - timeDomains.resize( timeDomainCount ); - } - return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector timeDomains( vectorAllocator ); - uint32_t timeDomainCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && timeDomainCount ) - { - timeDomains.resize( timeDomainCount ); - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); - timeDomains.resize( timeDomainCount ); - } - return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; - d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); - return externalBufferProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; - d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); - return externalBufferProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; - d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); - return externalFenceProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); - return externalFenceProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( pExternalImageFormatProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( &externalImageFormatProperties ) ) ); - return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getExternalImageFormatPropertiesNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; - d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); - return externalSemaphoreProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); - return externalSemaphoreProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; - d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); - return features; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); - return features; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2& features = structureChain.template get(); - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); - return features; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2& features = structureChain.template get(); - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; - d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); - return formatProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); - return formatProperties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::FormatProperties2& formatProperties = structureChain.template get(); - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; - d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); - return formatProperties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::FormatProperties2& formatProperties = structureChain.template get(); - d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( pImageFormatProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get(); - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get(); - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; - d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - return memoryProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - return memoryProperties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get(); - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - return memoryProperties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get(); - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( pMultisampleProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( &multisampleProperties ) ); - return multisampleProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const - { - std::vector rects; - uint32_t rectCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && rectCount ) - { - rects.resize( rectCount ); - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( rectCount <= rects.size() ); - rects.resize( rectCount ); - } - return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector rects( vectorAllocator ); - uint32_t rectCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && rectCount ) - { - rects.resize( rectCount ); - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( rectCount <= rects.size() ); - rects.resize( rectCount ); - } - return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; - d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); - return properties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); - return properties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2& properties = structureChain.template get(); - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); - return properties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2& properties = structureChain.template get(); - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast( pPerformanceQueryCreateInfo ), pNumPasses ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - uint32_t numPasses; - d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast( &performanceQueryCreateInfo ), &numPasses ); - return numPasses; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - std::vector localVector( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) - { - localVector[i].pNext = queueFamilyProperties[i].template get().pNext; - } - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) - { - queueFamilyProperties[i].template get() = localVector[i]; - } - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - std::vector localVector( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) - { - localVector[i].pNext = queueFamilyProperties[i].template get().pNext; - } - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) - { - queueFamilyProperties[i].template get() = localVector[i]; - } - return queueFamilyProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - std::vector localVector( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) - { - localVector[i].pNext = queueFamilyProperties[i].template get().pNext; - } - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) - { - queueFamilyProperties[i].template get() = localVector[i]; - } - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - std::vector localVector( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) - { - localVector[i].pNext = queueFamilyProperties[i].template get().pNext; - } - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) - { - queueFamilyProperties[i].template get() = localVector[i]; - } - return queueFamilyProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), pPropertyCount, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast( pCombinations ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d ) const - { - std::vector combinations; - uint32_t combinationCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && combinationCount ) - { - combinations.resize( combinationCount ); - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); - combinations.resize( combinationCount ); - } - return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector combinations( vectorAllocator ); - uint32_t combinationCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && combinationCount ) - { - combinations.resize( combinationCount ); - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); - combinations.resize( combinationCount ); - } - return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2EXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pSurfaceCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const - { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get(); - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilitiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const - { - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - surfaceFormats.resize( surfaceFormatCount ); - } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector surfaceFormats( vectorAllocator ); - uint32_t surfaceFormatCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - surfaceFormats.resize( surfaceFormatCount ); - } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const - { - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - surfaceFormats.resize( surfaceFormatCount ); - } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector surfaceFormats( vectorAllocator ); - uint32_t surfaceFormatCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - surfaceFormats.resize( surfaceFormatCount ); - } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const - { - std::vector presentModes; - uint32_t presentModeCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - presentModes.resize( presentModeCount ); - } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector presentModes( vectorAllocator ); - uint32_t presentModeCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - presentModes.resize( presentModeCount ); - } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const - { - std::vector presentModes; - uint32_t presentModeCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - presentModes.resize( presentModeCount ); - } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector presentModes( vectorAllocator ); - uint32_t presentModeCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - presentModes.resize( presentModeCount ); - } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::Bool32 supported; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ) ); - return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceSupportKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolPropertiesEXT(Dispatch const &d ) const - { - std::vector toolProperties; - uint32_t toolCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && toolCount ) - { - toolProperties.resize( toolCount ); - result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); - toolProperties.resize( toolCount ); - } - return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector toolProperties( vectorAllocator ); - uint32_t toolCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && toolCount ) - { - toolProperties.resize( toolCount ); - result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); - toolProperties.resize( toolCount ); - } - return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); - } -#else - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - -#ifdef VK_USE_PLATFORM_XCB_KHR - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - - -#ifdef VK_USE_PLATFORM_XLIB_KHR - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const - { - VULKAN_HPP_NAMESPACE::DisplayKHR display; - Result result = static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); - return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getRandROutputDisplayEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const - { - Result result = static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Dispatch const &d ) const - { - std::vector checkpointData; - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - return checkpointData; - } - template - VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector checkpointData( vectorAllocator ); - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - return checkpointData; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::bindSparse( ArrayProxy bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const - { - Result result = static_cast( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkQueueEndDebugUtilsLabelEXT( m_queue ); - } -#else - template - VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkQueueEndDebugUtilsLabelEXT( m_queue ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); - } -#else - template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const - { - Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::setPerformanceConfigurationINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - - template - VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit( ArrayProxy submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const - { - Result result = static_cast( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast( submits.data() ), static_cast( fence ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkQueueWaitIdle( m_queue ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::waitIdle( Dispatch const & d ) const { Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct StructExtends{ enum { value = true }; }; +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkDeviceWaitIdle( m_device ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::waitIdle( Dispatch const & d ) const + { + Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAllocateMemory( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMemory ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + Result result = static_cast( + d.vkAllocateMemory( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + Result result = static_cast( + d.vkAllocateMemory( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + ObjectFree deleter( *this, allocator, d ); + return createResultValue( + result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( + m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( m_device, + static_cast( memory ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( + m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( m_device, + static_cast( memory ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + Dispatch const & d ) const + { + void * pData; + Result result = static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ) ); + return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUnmapMemory( m_device, static_cast( memory ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkFlushMappedMemoryRanges( + m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::flushMappedMemoryRanges( ArrayProxy const & memoryRanges, + Dispatch const & d ) const + { + Result result = static_cast( d.vkFlushMappedMemoryRanges( + m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkInvalidateMappedMemoryRanges( + m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::invalidateMappedMemoryRanges( + ArrayProxy const & memoryRanges, Dispatch const & d ) const + { + Result result = static_cast( d.vkInvalidateMappedMemoryRanges( + m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceMemoryCommitment( + m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; + d.vkGetDeviceMemoryCommitment( + m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); + return committedMemoryInBytes; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindBufferMemory( m_device, + static_cast( buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindBufferMemory( m_device, + static_cast( buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindImageMemory( m_device, + static_cast( image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindImageMemory( m_device, + static_cast( image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetBufferMemoryRequirements( + m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetBufferMemoryRequirements( + m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageMemoryRequirements( + m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetImageMemoryRequirements( + m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( + m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirementsAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d ) const + { + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirementsAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( + m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } + + template < + typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d ) const + { + std::vector properties( + sparseImageFormatPropertiesAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Queue::bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueBindSparse( m_queue, + bindInfoCount, + reinterpret_cast( pBindInfo ), + static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::bindSparse( ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkQueueBindSparse( m_queue, + bindInfo.size(), + reinterpret_cast( bindInfo.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateFence( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createFence( const FenceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkCreateFence( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createFenceUnique( const FenceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkCreateFence( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( + m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( m_device, + static_cast( fence ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( + m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( m_device, + static_cast( fence ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetFences( ArrayProxy const & fences, Dispatch const & d ) const + { + Result result = static_cast( + d.vkResetFences( m_device, fences.size(), reinterpret_cast( fences.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkWaitForFences( + m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitForFences( ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const + { + Result result = static_cast( d.vkWaitForFences( m_device, + fences.size(), + reinterpret_cast( fences.data() ), + static_cast( waitAll ), + timeout ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateSemaphore( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSemaphore ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSemaphore( const SemaphoreCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + Result result = static_cast( + d.vkCreateSemaphore( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + Result result = static_cast( + d.vkCreateSemaphore( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( + m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( m_device, + static_cast( semaphore ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( + m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( m_device, + static_cast( semaphore ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateEvent( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pEvent ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createEvent( const EventCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Event event; + Result result = static_cast( + d.vkCreateEvent( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createEventUnique( const EventCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Event event; + Result result = static_cast( + d.vkCreateEvent( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( + m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( m_device, + static_cast( event ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( + m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( m_device, + static_cast( event ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const + { + Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", + { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const + { + Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const + { + Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateQueryPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pQueryPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createQueryPool( const QueryPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + Result result = static_cast( + d.vkCreateQueryPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + Result result = static_cast( + d.vkCreateQueryPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( + m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( m_device, + static_cast( queryPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( + m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( m_device, + static_cast( queryPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + dataSize, + pData, + static_cast( stride ), + static_cast( flags ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + ArrayProxy const & data, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + return createResultValue( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + return createResultValue( result, + data, + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + T data; + Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + sizeof( T ), + reinterpret_cast( &data ), + static_cast( stride ), + static_cast( flags ) ) ); + return createResultValue( result, + data, + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateBuffer( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pBuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createBuffer( const BufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Buffer buffer; + Result result = static_cast( + d.vkCreateBuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createBufferUnique( const BufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Buffer buffer; + Result result = static_cast( + d.vkCreateBuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( + m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( m_device, + static_cast( buffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( + m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( m_device, + static_cast( buffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateBufferView( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createBufferView( const BufferViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::BufferView view; + Result result = static_cast( + d.vkCreateBufferView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::BufferView view; + Result result = static_cast( + d.vkCreateBufferView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateImage( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pImage ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createImage( const ImageCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Image image; + Result result = static_cast( + d.vkCreateImage( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createImageUnique( const ImageCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Image image; + Result result = static_cast( + d.vkCreateImage( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( + m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( m_device, + static_cast( image ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( + m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( m_device, + static_cast( image ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout + Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const ImageSubresource & subresource, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SubresourceLayout layout; + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + return layout; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateImageView( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createImageView( const ImageViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageView view; + Result result = static_cast( + d.vkCreateImageView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageView view; + Result result = static_cast( + d.vkCreateImageView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( + m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( m_device, + static_cast( imageView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( + m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( m_device, + static_cast( imageView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pShaderModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + Result result = static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + Result result = static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + Result result = static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + Result result = static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const + { + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, + static_cast( pipelineCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) + { + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + std::vector data( uint8_tAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, + static_cast( pipelineCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) + { + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkMergePipelineCaches( m_device, + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkMergePipelineCaches( m_device, + static_cast( dstCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( + m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( m_device, + static_cast( pipeline ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( + m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( m_device, + static_cast( pipeline ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + Result result = static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + Result result = static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateSampler( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSampler ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSampler( const SamplerCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Sampler sampler; + Result result = static_cast( + d.vkCreateSampler( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSamplerUnique( const SamplerCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Sampler sampler; + Result result = static_cast( + d.vkCreateSampler( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( + m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( m_device, + static_cast( sampler ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( + m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( m_device, + static_cast( sampler ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSetLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + Result result = static_cast( + d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + Result result = static_cast( + d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + Result result = static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + Result result = static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetDescriptorPool( + m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const + { + Result result = static_cast( d.vkResetDescriptorPool( + m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount, + descriptorSetAllocator ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const + { + std::vector, DescriptorSetAllocator> uniqueDescriptorSets; + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ ) + { + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSets[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + } + + template >::value, + int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + std::vector, DescriptorSetAllocator> uniqueDescriptorSets( + descriptorSetAllocator ); + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ ) + { + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSets[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSets( m_device, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ), + descriptorCopyCount, + reinterpret_cast( pDescriptorCopies ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSets( ArrayProxy const & descriptorWrites, + ArrayProxy const & descriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSets( m_device, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast( descriptorCopies.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateFramebuffer( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFramebuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createFramebuffer( const FramebufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + Result result = static_cast( + d.vkCreateFramebuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + Result result = static_cast( + d.vkCreateFramebuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateRenderPass( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createRenderPass( const RenderPassCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetRenderAreaGranularity( + m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderAreaGranularity( + m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); + return granularity; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateCommandPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCommandPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createCommandPool( const CommandPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + Result result = static_cast( + d.vkCreateCommandPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + Result result = static_cast( + d.vkCreateCommandPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d ) const + { + Result result = static_cast( d.vkResetCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pCommandBuffers ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount, + commandBufferAllocator ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const + { + std::vector, CommandBufferAllocator> uniqueCommandBuffers; + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ ) + { + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffers[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + } + + template >::value, + int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + std::vector, CommandBufferAllocator> uniqueCommandBuffers( + commandBufferAllocator ); + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ ) + { + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffers[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBuffers.size(), + reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBuffers.size(), + reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( + const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::end( Dispatch const & d ) const + { + Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( + VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const + { + Result result = + static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindPipeline( + m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewport( + m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewport( + m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissor( + m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, + float maxDepthBounds, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSetCount, + reinterpret_cast( pDescriptorSets ), + dynamicOffsetCount, + pDynamicOffsets ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + ArrayProxy const & descriptorSets, + ArrayProxy const & dynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindIndexBuffer( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirect( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirect( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ), + static_cast( filter ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ), + static_cast( filter ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( dataSize ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + ArrayProxy const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdFillBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pColor ), + rangeCount, + reinterpret_cast( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearColorValue & color, + ArrayProxy const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &color ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pDepthStencil ), + rangeCount, + reinterpret_cast( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const ClearDepthStencilValue & depthStencil, + ArrayProxy const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &depthStencil ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearAttachments( m_commandBuffer, + attachmentCount, + reinterpret_cast( pAttachments ), + rectCount, + reinterpret_cast( pRects ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::clearAttachments( ArrayProxy const & attachments, + ArrayProxy const & rects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearAttachments( m_commandBuffer, + attachments.size(), + reinterpret_cast( attachments.data() ), + rects.size(), + reinterpret_cast( rects.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWaitEvents( m_commandBuffer, + eventCount, + reinterpret_cast( pEvents ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents( ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWaitEvents( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginQuery( + m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteTimestamp( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( queryPool ), + query ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyQueryPoolResults( m_commandBuffer, + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushConstants( m_commandBuffer, + static_cast( layout ), + static_cast( stageFlags ), + offset, + size, + pValues ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy const & values, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushConstants( m_commandBuffer, + static_cast( layout ), + static_cast( stageFlags ), + offset, + values.size() * sizeof( T ), + reinterpret_cast( values.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + static_cast( contents ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + static_cast( contents ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteCommands( + m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::executeCommands( ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteCommands( + m_commandBuffer, commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d ) + { + uint32_t apiVersion; + Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); + return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindBufferMemory2( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindBufferMemory2( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindImageMemory2( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( + result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + } + + template < + typename PhysicalDeviceGroupPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( + PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( + result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( + const ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = + structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( &properties ) ); + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = + structureChain.template get(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d ) const + { + std::vector queueFamilyProperties( + queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount, + structureChainAllocator ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } + + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( + const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + std::vector properties( + sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkTrimCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceQueue2( + m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue + Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue2( + m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( + d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( + d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( + result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSetWithTemplate( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); + } + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalBufferProperties( + m_physicalDevice, + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferProperties( + m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalFenceProperties( + m_physicalDevice, + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFenceProperties( + m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalSemaphoreProperties( + m_physicalDevice, + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphoreProperties( + m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = + structureChain.template get(); + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateRenderPass2( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass2( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass2( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass2( m_commandBuffer, + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass2( m_commandBuffer, + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( + VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const + { + uint64_t value; + Result result = + static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkWaitSemaphores( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( + const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceMemoryOpaqueCaptureAddress( + m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceMemoryOpaqueCaptureAddress( + m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_surface === + + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( + m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( + m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( pSupported ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Bool32 supported; + Result result = + static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( &supported ) ) ); + return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d ) const + { + std::vector surfaceFormats( surfaceFormatKHRAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + std::vector presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) + { + presentModes.resize( presentModeCount ); + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) + { + presentModes.resize( presentModeCount ); + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchain ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + pSwapchainImageCount, + reinterpret_cast( pSwapchainImages ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + std::vector swapchainImages; + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = + static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ) ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) ) + { + swapchainImages.resize( swapchainImageCount ); + } + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + ImageAllocator & imageAllocator, + Dispatch const & d ) const + { + std::vector swapchainImages( imageAllocator ); + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = + static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ) ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) ) + { + swapchainImages.resize( swapchainImageCount ); + } + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquireNextImageKHR( m_device, + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImageKHR( m_device, + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + &imageIndex ) ); + return createResultValue( result, + imageIndex, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( + const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); + return createResultValue( + result, + VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + return createResultValue( + result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, + static_cast( surface ), + reinterpret_cast( pModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast( + d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, + static_cast( surface ), + reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + std::vector rects; + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) ) + { + rects.resize( rectCount ); + } + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Rect2DAllocator & rect2DAllocator, + Dispatch const & d ) const + { + std::vector rects( rect2DAllocator ); + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) ) + { + rects.resize( rectCount ); + } + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquireNextImage2KHR( + m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const + { + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImage2KHR( + m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); + return createResultValue( result, + imageIndex, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_display === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, + Dispatch const & d ) const + { + std::vector properties( displayPropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( + DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const + { + std::vector properties( + displayPlanePropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const + { + std::vector displays; + uint32_t displayCount; + Result result; + do + { + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) ) + { + displays.resize( displayCount ); + } + return createResultValue( + result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + DisplayKHRAllocator & displayKHRAllocator, + Dispatch const & d ) const + { + std::vector displays( displayKHRAllocator ); + uint32_t displayCount; + Result result; + do + { + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) ) + { + displays.resize( displayCount ); + } + return createResultValue( + result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d ) const + { + std::vector properties( + displayModePropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMode ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + Result result = static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + Result result = static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, + static_cast( mode ), + planeIndex, + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + Result result = static_cast( + d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, + static_cast( mode ), + planeIndex, + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_display_swapchain === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + swapchainCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSharedSwapchainsKHR( + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector swapchains( createInfos.size() ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSharedSwapchainsKHR( + ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + std::vector swapchains( createInfos.size(), swapchainKHRAllocator ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector, SwapchainKHRAllocator> uniqueSwapchains; + std::vector swapchains( createInfos.size() ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueSwapchains.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchains[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + } + + template >::value, + int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); + std::vector swapchains( createInfos.size() ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueSwapchains.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchains[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display & dpy, + VisualID visualID, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( + uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( + uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( + uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCallback ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + Result result = static_cast( + d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + return createResultValue( + result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + Result result = static_cast( + d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( m_instance, + static_cast( callback ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( m_instance, + static_cast( callback ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType ), + object, + location, + messageCode, + pLayerPrefix, + pMessage ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_debug_marker === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkDebugMarkerSetObjectNameEXT( + m_device, reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( + m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, + reinterpret_cast( pVideoProfile ), + reinterpret_cast( pCapabilities ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; + Result result = static_cast( + d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = + structureChain.template get(); + Result result = static_cast( + d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( pVideoFormatInfo ), + pVideoFormatPropertyCount, + reinterpret_cast( pVideoFormatProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + Dispatch const & d ) const + { + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return createResultValue( + result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d ) const + { + std::vector videoFormatProperties( + videoFormatPropertiesKHRAllocator ); + uint32_t videoFormatPropertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return createResultValue( + result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSession ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createVideoSessionKHR( const VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + Result result = static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + return createResultValue( result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createVideoSessionKHRUnique( const VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + Result result = static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pVideoSessionMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, + static_cast( videoSession ), + pVideoSessionMemoryRequirementsCount, + reinterpret_cast( pVideoSessionMemoryRequirements ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Dispatch const & d ) const + { + std::vector videoSessionMemoryRequirements; + uint32_t videoSessionMemoryRequirementsCount; + Result result; + do + { + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, static_cast( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, + static_cast( videoSession ), + &videoSessionMemoryRequirementsCount, + reinterpret_cast( videoSessionMemoryRequirements.data() ) ) ); + VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && + ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + } + return createResultValue( result, + videoSessionMemoryRequirements, + VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); + } + + template < + typename VideoGetMemoryPropertiesKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator, + Dispatch const & d ) const + { + std::vector videoSessionMemoryRequirements( + videoGetMemoryPropertiesKHRAllocator ); + uint32_t videoSessionMemoryRequirementsCount; + Result result; + do + { + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, static_cast( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, + static_cast( videoSession ), + &videoSessionMemoryRequirementsCount, + reinterpret_cast( videoSessionMemoryRequirements.data() ) ) ); + VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && + ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + } + return createResultValue( result, + videoSessionMemoryRequirements, + VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkBindVideoSessionMemoryKHR( m_device, + static_cast( videoSession ), + videoSessionBindMemoryCount, + reinterpret_cast( pVideoSessionBindMemories ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + ArrayProxy const & videoSessionBindMemories, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindVideoSessionMemoryKHR( + m_device, + static_cast( videoSession ), + videoSessionBindMemories.size(), + reinterpret_cast( videoSessionBindMemories.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionParametersKHR( + const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSessionParameters ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createVideoSessionParametersKHR( const VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + Result result = static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + return createResultValue( + result, videoSessionParameters, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createVideoSessionParametersKHRUnique( const VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + Result result = static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + videoSessionParameters, + VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkUpdateVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( pUpdateInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkUpdateVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( &updateInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionParametersKHR( m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionParametersKHR( m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( pBeginInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( &beginInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( pEndCodingInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( &endCodingInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdControlVideoCodingKHR( m_commandBuffer, + reinterpret_cast( pCodingControlInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdControlVideoCodingKHR( m_commandBuffer, + reinterpret_cast( &codingControlInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + template + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( pFrameInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( &frameInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( + uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( + uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginQueryIndexedEXT( + m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); + } + + //=== VK_NVX_binary_import === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createCuModuleNVX( const CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + Result result = static_cast( + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + return createResultValue( result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createCuModuleNVXUnique( const CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + Result result = static_cast( + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFunction ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createCuFunctionNVX( const CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + Result result = static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + return createResultValue( result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + Result result = static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuModuleNVX( + m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuModuleNVX( + m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( pLaunchInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( &launchInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NVX_image_view_handle === + + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetImageViewAddressNVX( m_device, + static_cast( imageView ), + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + Result result = static_cast( + d.vkGetImageViewAddressNVX( m_device, + static_cast( imageView ), + reinterpret_cast( &properties ) ) ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_draw_indirect_count === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_AMD_shader_info === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + pInfoSize, + pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d ) const + { + std::vector info; + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) ) + { + info.resize( infoSize ); + } + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + std::vector info( uint8_tAllocator ); + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) ) + { + info.resize( infoSize ); + } + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( pExternalImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ) ); + return createResultValue( result, + externalImageFormatProperties, + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetMemoryWin32HandleNV( m_device, + static_cast( memory ), + static_cast( handleType ), + pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d ) const + { + HANDLE handle; + Result result = + static_cast( d.vkGetMemoryWin32HandleNV( m_device, + static_cast( memory ), + static_cast( handleType ), + &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = + structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( &properties ) ); + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = + structureChain.template get(); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d ) const + { + std::vector queueFamilyProperties( + queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount, + structureChainAllocator ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } + + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( + const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + std::vector properties( + sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_device_group === + + template + VULKAN_HPP_INLINE void + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchBaseKHR( + m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkTrimCommandPoolKHR( + m_device, static_cast( commandPool ), static_cast( flags ) ); + } + + //=== VK_KHR_device_group_creation === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( result, + physicalDeviceGroupProperties, + VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } + + template < + typename PhysicalDeviceGroupPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( + PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( result, + physicalDeviceGroupProperties, + VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_memory_capabilities === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetMemoryWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( + m_device, + static_cast( handleType ), + handle, + reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( + m_device, + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ) ); + return createResultValue( + result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + int fd; + Result result = static_cast( + d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetMemoryFdPropertiesKHR( m_device, + static_cast( handleType ), + fd, + reinterpret_cast( pMemoryFdProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + Result result = static_cast( + d.vkGetMemoryFdPropertiesKHR( m_device, + static_cast( handleType ), + fd, + reinterpret_cast( &memoryFdProperties ) ) ); + return createResultValue( + result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_semaphore_capabilities === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphorePropertiesKHR( + const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportSemaphoreFdKHR( + m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkImportSemaphoreFdKHR( + m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + int fd; + Result result = static_cast( + d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_push_descriptor === + + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + ArrayProxy const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); + } + + //=== VK_EXT_conditional_rendering === + + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginConditionalRenderingEXT( + m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginConditionalRenderingEXT( + m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); + } + + //=== VK_KHR_descriptor_update_template === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( + result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplateKHR( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplateKHR( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); + } + + //=== VK_NV_clip_space_w_scaling === + + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWScalingNV( m_commandBuffer, + firstViewport, + viewportCount, + reinterpret_cast( pViewportWScalings ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( + uint32_t firstViewport, + ArrayProxy const & viewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWScalingNV( m_commandBuffer, + firstViewport, + viewportWScalings.size(), + reinterpret_cast( viewportWScalings.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_direct_mode_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + Result result = + static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( + Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + ObjectRelease deleter( *this, d ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_display_control === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkDisplayPowerControlEXT( m_device, + static_cast( display ), + reinterpret_cast( pDisplayPowerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( + VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkDisplayPowerControlEXT( m_device, + static_cast( display ), + reinterpret_cast( &displayPowerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( pDeviceEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( pDisplayEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSwapchainCounterEXT( m_device, + static_cast( swapchain ), + static_cast( counter ), + pCounterValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d ) const + { + uint64_t counterValue; + Result result = + static_cast( d.vkGetSwapchainCounterEXT( m_device, + static_cast( swapchain ), + static_cast( counter ), + &counterValue ) ); + return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_GOOGLE_display_timing === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, + static_cast( swapchain ), + reinterpret_cast( pDisplayTimingProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, + static_cast( swapchain ), + reinterpret_cast( &displayTimingProperties ) ) ); + return createResultValue( + result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + pPresentationTimingCount, + reinterpret_cast( pPresentationTimings ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + std::vector presentationTimings; + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) ) + { + presentationTimings.resize( presentationTimingCount ); + } + return createResultValue( + result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + } + + template < + typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d ) const + { + std::vector presentationTimings( + pastPresentationTimingGOOGLEAllocator ); + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) ) + { + presentationTimings.resize( presentationTimingCount ); + } + return createResultValue( + result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_discard_rectangles === + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, + firstDiscardRectangle, + discardRectangleCount, + reinterpret_cast( pDiscardRectangles ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + ArrayProxy const & discardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, + firstDiscardRectangle, + discardRectangles.size(), + reinterpret_cast( discardRectangles.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_hdr_metadata === + + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkSetHdrMetadataEXT( m_device, + swapchainCount, + reinterpret_cast( pSwapchains ), + reinterpret_cast( pMetadata ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::setHdrMetadataEXT( ArrayProxy const & swapchains, + ArrayProxy const & metadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +# else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkSetHdrMetadataEXT( m_device, + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_create_renderpass2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass2KHR( m_commandBuffer, + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass2KHR( m_commandBuffer, + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_shared_presentable_image === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + Result result = + static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + return createResultValue( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_fence_capabilities === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportFenceWin32HandleKHR( + m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkImportFenceWin32HandleKHR( + m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetFenceWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetFenceWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + int fd; + Result result = static_cast( + d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_performance_query === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + pCounterCount, + reinterpret_cast( pCounters ), + reinterpret_cast( pCounterDescriptions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + ArrayProxy const & counters, + Dispatch const & d ) const + { + std::vector counterDescriptions; + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() ); + counterDescriptions.resize( counterCount ); + } + return createResultValue( result, + counterDescriptions, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } + + template < + typename Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + ArrayProxy const & counters, + Allocator const & vectorAllocator, + Dispatch const & d ) const + { + std::vector counterDescriptions( vectorAllocator ); + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + counters.size(), + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() ); + counterDescriptions.resize( counterCount ); + } + return createResultValue( result, + counterDescriptions, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::pair, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + Dispatch const & d ) const + { + std::pair, + std::vector> + data; + std::vector & counters = data.first; + std::vector & counterDescriptions = + data.second; + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } + + template ::value && + std::is_same::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::pair, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d ) const + { + std::pair, + std::vector> + data( std::piecewise_construct, + std::forward_as_tuple( performanceCounterKHRAllocator ), + std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); + std::vector & counters = data.first; + std::vector & counterDescriptions = + data.second; + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, + reinterpret_cast( pPerformanceQueryCreateInfo ), + pNumPasses ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + uint32_t numPasses; + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, + reinterpret_cast( &performanceQueryCreateInfo ), + &numPasses ); + return numPasses; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( + const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const + { + Result result = static_cast( + d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkReleaseProfilingLockKHR( m_device ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d ) const + { + std::vector surfaceFormats( surfaceFormat2KHRAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_get_display_properties2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, + Dispatch const & d ) const + { + std::vector properties( displayProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( + DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const + { + std::vector properties( + displayPlaneProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d ) const + { + std::vector properties( + displayModeProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( pDisplayPlaneInfo ), + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + Result result = static_cast( + d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkSetDebugUtilsObjectNameEXT( + m_device, reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( + m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + } + + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMessenger ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + Result result = static_cast( + d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + return createResultValue( + result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + Result result = static_cast( + d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( pCallbackData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( + const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, buffer, reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, &buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, &buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + m_device, reinterpret_cast( pInfo ), pBuffer ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, + Dispatch const & d ) const + { + struct AHardwareBuffer * buffer; + Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + m_device, reinterpret_cast( &info ), &buffer ) ); + return createResultValue( + result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sample_locations === + + template + VULKAN_HPP_INLINE void + CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, + reinterpret_cast( pSampleLocationsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, + reinterpret_cast( &sampleLocationsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, + static_cast( samples ), + reinterpret_cast( pMultisampleProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, + static_cast( samples ), + reinterpret_cast( &multisampleProperties ) ); + return multisampleProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_get_memory_requirements2 === + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( + const ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_acceleration_structure === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateAccelerationStructureKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( + result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + accelerationStructure, + VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructuresKHR( + m_commandBuffer, + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresKHR( + m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructuresIndirectKHR( + m_commandBuffer, + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( pIndirectDeviceAddresses ), + pIndirectStrides, + ppMaxPrimitiveCounts ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + ArrayProxy const & infos, + ArrayProxy const & indirectDeviceAddresses, + ArrayProxy const & indirectStrides, + ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); + VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); + VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); +# else + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresIndirectKHR( + m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBuildAccelerationStructuresKHR( + m_device, + static_cast( deferredOperation ), + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + Result result = static_cast( d.vkBuildAccelerationStructuresKHR( + m_device, + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCopyAccelerationStructureKHR( m_device, + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkCopyAccelerationStructureKHR( m_device, + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const + { + Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + dataSize, + pData, + stride ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + ArrayProxy const & data, + size_t stride, + Dispatch const & d ) const + { + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + stride ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + stride ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::writeAccelerationStructuresPropertyKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d ) const + { + T data; + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + sizeof( T ), + reinterpret_cast( &data ), + stride ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, + reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureToMemoryKHR( + m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureToMemoryKHR( + m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyMemoryToAccelerationStructureKHR( + m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyMemoryToAccelerationStructureKHR( + m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( + m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( + const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetAccelerationStructureDeviceAddressKHR( + m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesKHR( + m_commandBuffer, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesKHR( + m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceAccelerationStructureCompatibilityKHR( + m_device, + reinterpret_cast( pVersionInfo ), + reinterpret_cast( pCompatibility ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceAccelerationStructureCompatibilityKHR( + m_device, + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + return compatibility; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetAccelerationStructureBuildSizesKHR( + m_device, + static_cast( buildType ), + reinterpret_cast( pBuildInfo ), + pMaxPrimitiveCounts, + reinterpret_cast( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const AccelerationStructureBuildGeometryInfoKHR & buildInfo, + ArrayProxy const & maxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); +# else + if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetAccelerationStructureBuildSizesKHR( + m_device, + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast( &sizeInfo ) ); + return sizeInfo; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_sampler_ycbcr_conversion === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateSamplerYcbcrConversionKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversionKHR( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversionKHR( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_bind_memory2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindBufferMemory2KHR( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindBufferMemory2KHR( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindImageMemory2KHR( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindImageMemory2KHR( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_image_drm_format_modifier === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, + static_cast( image ), + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, + static_cast( image ), + reinterpret_cast( &properties ) ) ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_validation_cache === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pValidationCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + Result result = static_cast( + d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + return createResultValue( + result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + Result result = static_cast( + d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkMergeValidationCachesEXT( m_device, + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkMergeValidationCachesEXT( m_device, + static_cast( dstCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Dispatch const & d ) const + { + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = + static_cast( d.vkGetValidationCacheDataEXT( m_device, + static_cast( validationCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) + { + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + std::vector data( uint8_tAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = + static_cast( d.vkGetValidationCacheDataEXT( m_device, + static_cast( validationCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) ) + { + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_shading_rate_image === + + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindShadingRateImageNV( + m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, + firstViewport, + viewportCount, + reinterpret_cast( pShadingRatePalettes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + ArrayProxy const & shadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportShadingRatePaletteNV( + m_commandBuffer, + firstViewport, + shadingRatePalettes.size(), + reinterpret_cast( shadingRatePalettes.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrderCount, + reinterpret_cast( pCustomSampleOrders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy const & customSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateAccelerationStructureNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( + result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + accelerationStructure, + VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = + structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindAccelerationStructureMemoryNV( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindAccelerationStructureMemoryNV( + ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( + m_device, + bindInfos.size(), + reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( pInfo ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysNV( m_commandBuffer, + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d ) const + { + T data; + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + sizeof( T ), + reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetAccelerationStructureHandleNV( + m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getAccelerationStructureHandleNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + ArrayProxy const & data, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, + static_cast( accelerationStructure ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, + static_cast( accelerationStructure ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Dispatch const & d ) const + { + T data; + Result result = static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, + static_cast( accelerationStructure ), + sizeof( T ), + reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesNV( + m_commandBuffer, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesNV( + m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const + { + Result result = + static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance3 === + + template + VULKAN_HPP_INLINE void + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = + structureChain.template get(); + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_draw_indirect_count === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_EXT_external_memory_host === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( + m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( pMemoryHostPointerProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( + m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ) ); + return createResultValue( + result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_buffer_marker === + + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const + { + std::vector timeDomains; + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) ) + { + timeDomains.resize( timeDomainCount ); + } + return createResultValue( + result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, + Dispatch const & d ) const + { + std::vector timeDomains( timeDomainEXTAllocator ); + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) ) + { + timeDomains.resize( timeDomainCount ); + } + return createResultValue( + result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampCount, + reinterpret_cast( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + ArrayProxy const & timestamps, + Dispatch const & d ) const + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() ); +# else + if ( timestampInfos.size() != timestamps.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + uint64_t maxDeviation; + Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + return createResultValue( + result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Dispatch const & d ) const + { + std::pair, uint64_t> data( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data.first; + uint64_t & maxDeviation = data.second; + Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const + { + std::pair, uint64_t> data( + std::piecewise_construct, + std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), + std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data.first; + uint64_t & maxDeviation = data.second; + Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_mesh_shader === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, + uint32_t firstTask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksIndirectNV( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_NV_scissor_exclusive === + + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, + firstExclusiveScissor, + exclusiveScissorCount, + reinterpret_cast( pExclusiveScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + ArrayProxy const & exclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, + firstExclusiveScissor, + exclusiveScissors.size(), + reinterpret_cast( exclusiveScissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_device_diagnostic_checkpoints === + + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + } + + template + VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetQueueCheckpointDataNV( + m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( Dispatch const & d ) const + { + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const + { + std::vector checkpointData( checkpointDataNVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_timeline_semaphore === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const + { + uint64_t value; + Result result = + static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + Result result = static_cast( + d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_INTEL_performance_query === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkInitializePerformanceApiINTEL( + m_device, reinterpret_cast( pInitializeInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkInitializePerformanceApiINTEL( + m_device, reinterpret_cast( &initializeInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUninitializePerformanceApiINTEL( m_device ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCmdSetPerformanceMarkerINTEL( + m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( + m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( + m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( + m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCmdSetPerformanceOverrideINTEL( + m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d ) const + { + Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( + m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( pAcquireInfo ), + reinterpret_cast( pConfiguration ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + return createResultValue( + result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + ObjectRelease deleter( *this, d ); + return createResultValue( + result, + configuration, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const + { + Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( + m_queue, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const + { + Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( + m_queue, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPerformanceParameterINTEL( m_device, + static_cast( parameter ), + reinterpret_cast( pValue ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + Result result = + static_cast( d.vkGetPerformanceParameterINTEL( m_device, + static_cast( parameter ), + reinterpret_cast( &value ) ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_display_native_hdr === + + template + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkSetLocalDimmingAMD( + m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR( + uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, + pFragmentShadingRateCount, + reinterpret_cast( pFragmentShadingRates ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const + { + std::vector + fragmentShadingRates; + uint32_t fragmentShadingRateCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, + &fragmentShadingRateCount, + reinterpret_cast( fragmentShadingRates.data() ) ) ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return createResultValue( + result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getFragmentShadingRatesKHR( + PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d ) const + { + std::vector + fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator ); + uint32_t fragmentShadingRateCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, + &fragmentShadingRateCount, + reinterpret_cast( fragmentShadingRates.data() ) ) ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return createResultValue( + result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( + const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, + reinterpret_cast( pFragmentSize ), + reinterpret_cast( combinerOps ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( + const Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, + reinterpret_cast( &fragmentSize ), + reinterpret_cast( combinerOps ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_buffer_device_address === + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_tooling_info === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const + { + std::vector toolProperties; + uint32_t toolCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, + &toolCount, + reinterpret_cast( toolProperties.data() ) ) ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) ) + { + toolProperties.resize( toolCount ); + } + return createResultValue( + result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + } + + template < + typename PhysicalDeviceToolPropertiesEXTAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getToolPropertiesEXT( + PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator, Dispatch const & d ) const + { + std::vector toolProperties( + physicalDeviceToolPropertiesEXTAllocator ); + uint32_t toolCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, + &toolCount, + reinterpret_cast( toolProperties.data() ) ) ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) ) + { + toolProperties.resize( toolCount ); + } + return createResultValue( + result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_cooperative_matrix === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + } + + template < + typename CooperativeMatrixPropertiesNVAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( + CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d ) const + { + std::vector properties( + cooperativeMatrixPropertiesNVAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) ) + { + properties.resize( propertyCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_coverage_reduction_mode === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, + VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + pCombinationCount, + reinterpret_cast( pCombinations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const + { + std::vector combinations; + uint32_t combinationCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) ) + { + combinations.resize( combinationCount ); + } + return createResultValue( result, + combinations, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, + Dispatch const & d ) const + { + std::vector combinations( + framebufferMixedSamplesCombinationNVAllocator ); + uint32_t combinationCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) ) + { + combinations.resize( combinationCount ); + } + return createResultValue( result, + combinations, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + std::vector presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) + { + presentModes.resize( presentModeCount ); + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) ) + { + presentModes.resize( presentModeCount ); + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + Result result = static_cast( + d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + Result result = static_cast( + d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_buffer_device_address === + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, + reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( + m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( + const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( + m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_line_rasterization === + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + template + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWithCountEXT( + m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWithCountEXT( ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWithCountEXT( + m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCountEXT( ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissorWithCountEXT( + m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ), + reinterpret_cast( pStrides ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes, + ArrayProxy const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilOpEXT( m_commandBuffer, + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + //=== VK_KHR_deferred_host_operations === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDeferredOperationKHR( m_device, + reinterpret_cast( pAllocator ), + reinterpret_cast( pDeferredOperation ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDeferredOperationKHR( Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + Result result = static_cast( + d.vkCreateDeferredOperationKHR( m_device, + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + return createResultValue( + result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + Result result = static_cast( + d.vkCreateDeferredOperationKHR( m_device, + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); + } + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const + { + Result result = static_cast( + d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const + { + Result result = + static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, + VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_pipeline_executable_properties === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( pPipelineInfo ), + pExecutableCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const + { + std::vector properties; + uint32_t executableCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) ) + { + properties.resize( executableCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + } + + template < + typename PipelineExecutablePropertiesKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutablePropertiesKHR( + const PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d ) const + { + std::vector properties( + pipelineExecutablePropertiesKHRAllocator ); + uint32_t executableCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) ) + { + properties.resize( executableCount ); + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( pExecutableInfo ), + pStatisticCount, + reinterpret_cast( pStatistics ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d ) const + { + std::vector statistics; + uint32_t statisticCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) ) + { + statistics.resize( statisticCount ); + } + return createResultValue( + result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + } + + template < + typename PipelineExecutableStatisticKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( + const PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d ) const + { + std::vector statistics( + pipelineExecutableStatisticKHRAllocator ); + uint32_t statisticCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) ) + { + statistics.resize( statisticCount ); + } + return createResultValue( + result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( pExecutableInfo ), + pInternalRepresentationCount, + reinterpret_cast( pInternalRepresentations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d ) const + { + std::vector + internalRepresentations; + uint32_t internalRepresentationCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return createResultValue( result, + internalRepresentations, + VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + } + + template < + typename PipelineExecutableInternalRepresentationKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableInternalRepresentationsKHR( + const PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d ) const + { + std::vector + internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); + uint32_t internalRepresentationCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return createResultValue( result, + internalRepresentations, + VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_device_generated_commands === + + template + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPreprocessGeneratedCommandsNV( + m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPreprocessGeneratedCommandsNV( + m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( + VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, + static_cast( isPreprocessed ), + reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( pipeline ), + groupIndex ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateIndirectCommandsLayoutNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectCommandsLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + return createResultValue( + result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + indirectCommandsLayout, + VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNV( m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNV( m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_private_data === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPrivateDataSlot ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot; + Result result = static_cast( + d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + return createResultValue( + result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot; + Result result = static_cast( + d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPrivateDataSlotEXT( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkSetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + data ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t data, + Dispatch const & d ) const + { + Result result = static_cast( d.vkSetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + data ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + uint64_t * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + uint64_t data; + d.vkGetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + &data ); + return data; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + template + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( pEncodeInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( &encodeInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetEvent2KHR( m_commandBuffer, + static_cast( event ), + reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const DependencyInfoKHR & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetEvent2KHR( m_commandBuffer, + static_cast( event ), + reinterpret_cast( &dependencyInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResetEvent2KHR( + m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWaitEvents2KHR( m_commandBuffer, + eventCount, + reinterpret_cast( pEvents ), + reinterpret_cast( pDependencyInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2KHR( ArrayProxy const & events, + ArrayProxy const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdWaitEvents2KHR( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteTimestamp2KHR( + m_commandBuffer, static_cast( stage ), static_cast( queryPool ), query ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Queue::submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueSubmit2KHR( + m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::submit2KHR( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkQueueSubmit2KHR( m_queue, + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteBufferMarker2AMD( m_commandBuffer, + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + template + VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkGetQueueCheckpointData2NV( + m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( Dispatch const & d ) const + { + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const + { + std::vector checkpointData( checkpointData2NVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_fragment_shading_rate_enums === + + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetFragmentShadingRateEnumNV( m_commandBuffer, + static_cast( shadingRate ), + reinterpret_cast( combinerOps ) ); + } + + //=== VK_KHR_copy_commands2 === + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( + const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, + reinterpret_cast( pCopyBufferToImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, + reinterpret_cast( ©BufferToImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( + const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, + reinterpret_cast( pCopyImageToBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, + reinterpret_cast( ©ImageToBufferInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( + VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + Result result = + static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); + return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); + ObjectRelease deleter( *this, d ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( + uint32_t queueFamilyIndex, IDirectFB * dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( + uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_KHR_ray_tracing_pipeline === + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + width, + height, + depth ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const StridedDeviceAddressRegionKHR & missShaderBindingTable, + const StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING + "::Device::createRayTracingPipelineKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getRayTracingShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d ) const + { + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d ) const + { + T data; + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + sizeof( T ), + reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d ) const + { + Result result = static_cast( + d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); + std::vector data( dataSize / sizeof( T ) ); + Result result = static_cast( + d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d ) const + { + T data; + Result result = + static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + sizeof( T ), + reinterpret_cast( &data ) ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysIndirectKHR( + m_commandBuffer, + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const StridedDeviceAddressRegionKHR & missShaderBindingTable, + const StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysIndirectKHR( + m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE DeviceSize + Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingShaderGroupStackSizeKHR( + m_device, static_cast( pipeline ), group, static_cast( groupShader ) ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); + } + + //=== VK_EXT_vertex_input_dynamic_state === + + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetVertexInputEXT( + m_commandBuffer, + vertexBindingDescriptionCount, + reinterpret_cast( pVertexBindingDescriptions ), + vertexAttributeDescriptionCount, + reinterpret_cast( pVertexAttributeDescriptions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + ArrayProxy const & vertexBindingDescriptions, + ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetVertexInputEXT( + m_commandBuffer, + vertexBindingDescriptions.size(), + reinterpret_cast( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast( vertexAttributeDescriptions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryZirconHandleFUCHSIA( + m_device, reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d ) const + { + zx_handle_t zirconHandle; + Result result = static_cast( d.vkGetMemoryZirconHandleFUCHSIA( + m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + return createResultValue( + result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandlePropertiesFUCHSIA( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryZirconHandlePropertiesFUCHSIA( + m_device, + static_cast( handleType ), + zirconHandle, + reinterpret_cast( pMemoryZirconHandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + Result result = static_cast( d.vkGetMemoryZirconHandlePropertiesFUCHSIA( + m_device, + static_cast( handleType ), + zirconHandle, + reinterpret_cast( &memoryZirconHandleProperties ) ) ); + return createResultValue( result, + memoryZirconHandleProperties, + VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreZirconHandleFUCHSIA( + const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( pGetZirconHandleInfo ), + pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d ) const + { + zx_handle_t zirconHandle; + Result result = static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( &getZirconHandleInfo ), + &zirconHandle ) ); + return createResultValue( + result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_extended_dynamic_state2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast( rasterizerDiscardEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast( depthBiasEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast( logicOp ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast( primitiveRestartEnable ) ); + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( + uint32_t queueFamilyIndex, struct _screen_window * window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( + d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( + uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + template + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetColorWriteEnableEXT( + m_commandBuffer, attachmentCount, reinterpret_cast( pColorWriteEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setColorWriteEnableEXT( ArrayProxy const & colorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetColorWriteEnableEXT( + m_commandBuffer, colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct StructExtends{ enum { value = true }; }; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_ENABLE_BETA_EXTENSIONS - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct StructExtends{ enum { value = true }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_ENABLE_BETA_EXTENSIONS - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - template <> struct StructExtends{ enum { value = true }; }; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_ENABLE_BETA_EXTENSIONS - template <> struct StructExtends{ enum { value = true }; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_GGP - template <> struct StructExtends{ enum { value = true }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_GGP ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_GGP*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct StructExtends{ enum { value = true }; }; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct StructExtends{ enum { value = true }; }; - template <> struct StructExtends{ enum { value = true }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL -# if defined( _WIN32 ) - namespace detail - { - extern "C" __declspec( dllimport ) void * __stdcall LoadLibraryA( char const * lpLibFileName ); - extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( void * hLibModule ); - extern "C" __declspec( dllimport ) void * __stdcall GetProcAddress( void * hModule, char const * lpProcName ); - } // namespace detail -# endif - class DynamicLoader { public: # ifdef VULKAN_HPP_NO_EXCEPTIONS - DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT : m_success( false ) + DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT # else - DynamicLoader( std::string const & vulkanLibraryName = {} ) : m_success( false ) + DynamicLoader( std::string const & vulkanLibraryName = {} ) # endif { if ( !vulkanLibraryName.empty() ) { -# if defined( __linux__ ) || defined( __APPLE__ ) +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); # elif defined( _WIN32 ) - m_library = detail::LoadLibraryA( vulkanLibraryName.c_str() ); + m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); # else # error unsupported platform # endif } else { -# if defined( __linux__ ) +# if defined( __unix__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); if ( m_library == nullptr ) { @@ -85253,37 +123799,34 @@ namespace VULKAN_HPP_NAMESPACE # elif defined( __APPLE__ ) m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); # elif defined( _WIN32 ) - m_library = detail::LoadLibraryA( "vulkan-1.dll" ); + m_library = ::LoadLibraryA( "vulkan-1.dll" ); # else # error unsupported platform # endif } - m_success = (m_library != nullptr); -#ifndef VULKAN_HPP_NO_EXCEPTIONS - if ( !m_success ) +# ifndef VULKAN_HPP_NO_EXCEPTIONS + if ( m_library == nullptr ) { - // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. + // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the + // scope of this function. throw std::runtime_error( "Failed to load vulkan library!" ); } -#endif +# endif } - DynamicLoader( DynamicLoader const& ) = delete; + DynamicLoader( DynamicLoader const & ) = delete; - DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT - : m_success(other.m_success) - , m_library(other.m_library) + DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) { other.m_library = nullptr; } - DynamicLoader &operator=( DynamicLoader const& ) = delete; + DynamicLoader & operator=( DynamicLoader const & ) = delete; - DynamicLoader &operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT { - m_success = other.m_success; - std::swap(m_library, other.m_library); + std::swap( m_library, other.m_library ); return *this; } @@ -85291,10 +123834,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( m_library ) { -# if defined( __linux__ ) || defined( __APPLE__ ) +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) dlclose( m_library ); # elif defined( _WIN32 ) - detail::FreeLibrary( m_library ); + ::FreeLibrary( m_library ); # else # error unsupported platform # endif @@ -85302,1925 +123845,2976 @@ namespace VULKAN_HPP_NAMESPACE } template - T getProcAddress( const char* function ) const VULKAN_HPP_NOEXCEPT + T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT { -# if defined( __linux__ ) || defined( __APPLE__ ) +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) return (T)dlsym( m_library, function ); # elif defined( _WIN32 ) - return (T)detail::GetProcAddress( m_library, function ); + return ( T )::GetProcAddress( m_library, function ); # else # error unsupported platform # endif } - bool success() const VULKAN_HPP_NOEXCEPT { return m_success; } + bool success() const VULKAN_HPP_NOEXCEPT + { + return m_library != nullptr; + } private: - bool m_success; -# if defined( __linux__ ) || defined( __APPLE__ ) || defined( _WIN32 ) +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) void * m_library; +# elif defined( _WIN32 ) + ::HINSTANCE m_library; # else # error unsupported platform # endif }; #endif - class DispatchLoaderDynamic { public: - PFN_vkCreateInstance vkCreateInstance = 0; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; - PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; - PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; - PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; - PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; - PFN_vkCmdBlitImage vkCmdBlitImage = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCmdBuildAccelerationStructureIndirectKHR vkCmdBuildAccelerationStructureIndirectKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCmdBuildAccelerationStructureKHR vkCmdBuildAccelerationStructureKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; - PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; - PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; - PFN_vkCmdCopyImage vkCmdCopyImage = 0; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; - PFN_vkCmdDraw vkCmdDraw = 0; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; - PFN_vkCmdEndQuery vkCmdEndQuery = 0; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; - PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; - PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; - PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; - PFN_vkCmdPushConstants vkCmdPushConstants = 0; - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - PFN_vkCmdResetEvent vkCmdResetEvent = 0; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; - PFN_vkCmdResolveImage vkCmdResolveImage = 0; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; - PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; - PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; - PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - PFN_vkCmdSetEvent vkCmdSetEvent = 0; - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; - PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; - PFN_vkCmdSetScissor vkCmdSetScissor = 0; - PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; - PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; - PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; - PFN_vkCmdSetViewport vkCmdSetViewport = 0; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; - PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; - PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR + using PFN_dummy = void ( * )(); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkAcquireFullScreenExclusiveModeEXT = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkBindAccelerationStructureMemoryKHR vkBindAccelerationStructureMemoryKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkBuildAccelerationStructureKHR vkBuildAccelerationStructureKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; - PFN_vkCreateBuffer vkCreateBuffer = 0; - PFN_vkCreateBufferView vkCreateBufferView = 0; - PFN_vkCreateCommandPool vkCreateCommandPool = 0; - PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; - PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; - PFN_vkCreateEvent vkCreateEvent = 0; - PFN_vkCreateFence vkCreateFence = 0; - PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; - PFN_vkCreateImage vkCreateImage = 0; - PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; - PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; - PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; - PFN_vkCreateQueryPool vkCreateQueryPool = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; - PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; - PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; - PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; - PFN_vkCreateSemaphore vkCreateSemaphore = 0; - PFN_vkCreateShaderModule vkCreateShaderModule = 0; - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; - PFN_vkDestroyBuffer vkDestroyBuffer = 0; - PFN_vkDestroyBufferView vkDestroyBufferView = 0; - PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; - PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; - PFN_vkDestroyDevice vkDestroyDevice = 0; - PFN_vkDestroyEvent vkDestroyEvent = 0; - PFN_vkDestroyFence vkDestroyFence = 0; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; - PFN_vkDestroyImage vkDestroyImage = 0; - PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; - PFN_vkDestroyPipeline vkDestroyPipeline = 0; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; - PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; - PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; - PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; - PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; - PFN_vkDestroySemaphore vkDestroySemaphore = 0; - PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; - PFN_vkFreeMemory vkFreeMemory = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkGetAccelerationStructureMemoryRequirementsKHR vkGetAccelerationStructureMemoryRequirementsKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; - PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; - PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; - PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; +#else + PFN_dummy placeholder_dont_call_vkAcquireWinrtDisplayNV = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkAcquireXlibDisplayEXT = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkBindVideoSessionMemoryKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCmdBeginVideoCodingKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCmdControlVideoCodingKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCmdDecodeVideoKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCmdEncodeVideoKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCmdEndVideoCodingKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateAndroidSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkCreateDevice vkCreateDevice = 0; +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateDirectFBSurfaceEXT = 0; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; +#if defined( VK_USE_PLATFORM_IOS_MVK ) + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateIOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + PFN_vkCreateImage vkCreateImage = 0; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateImagePipeSurfaceFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkCreateInstance vkCreateInstance = 0; +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateMacOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateMetalSurfaceEXT = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateScreenSurfaceQNX = 0; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; +#if defined( VK_USE_PLATFORM_GGP ) + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateStreamDescriptorSurfaceGGP = 0; +#endif /*VK_USE_PLATFORM_GGP*/ + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; +#if defined( VK_USE_PLATFORM_VI_NN ) + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateViSurfaceNN = 0; +#endif /*VK_USE_PLATFORM_VI_NN*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateVideoSessionKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateVideoSessionParametersKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateWaylandSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateWin32SurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateXcbSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkCreateXlibSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkDestroyVideoSessionKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkDestroyVideoSessionParametersKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; +#else + PFN_dummy placeholder_dont_call_vkGetAndroidHardwareBufferPropertiesANDROID = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +#else + PFN_dummy placeholder_dont_call_vkGetDeviceGroupSurfacePresentModes2EXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; - PFN_vkGetEventStatus vkGetEventStatus = 0; - PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - PFN_vkGetFenceStatus vkGetFenceStatus = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetFenceWin32HandleKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; -#ifdef VK_USE_PLATFORM_ANDROID_KHR + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryAndroidHardwareBufferANDROID = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryWin32HandleKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryWin32HandleNV = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryWin32HandlePropertiesKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkResetQueryPool vkResetQueryPool = 0; - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; - PFN_vkSignalSemaphore vkSignalSemaphore = 0; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkWaitForFences vkWaitForFences = 0; - PFN_vkWaitSemaphores vkWaitSemaphores = 0; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; -#ifdef VK_ENABLE_BETA_EXTENSIONS - PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; - PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; - PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; -#ifdef VK_USE_PLATFORM_IOS_MVK - PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ -#ifdef VK_USE_PLATFORM_FUCHSIA - PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryZirconHandleFUCHSIA = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ -#ifdef VK_USE_PLATFORM_MACOS_MVK - PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ -#ifdef VK_USE_PLATFORM_GGP - PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; -#endif /*VK_USE_PLATFORM_GGP*/ -#ifdef VK_USE_PLATFORM_VI_NN - PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; -#endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; - PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +#else + PFN_dummy placeholder_dont_call_vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; - PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; - PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; - PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; -#ifdef VK_USE_PLATFORM_WAYLAND_KHR + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR +#if defined( VK_USE_PLATFORM_WIN32_KHR ) PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR +#if defined( VK_USE_PLATFORM_XCB_KHR ) PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; #endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR +#if defined( VK_USE_PLATFORM_XLIB_KHR ) PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkGetRandROutputDisplayEXT = 0; #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; - PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; - PFN_vkQueueBindSparse vkQueueBindSparse = 0; - PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; - PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; - PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +#else + PFN_dummy placeholder_dont_call_vkGetSemaphoreZirconHandleFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkGetVideoSessionMemoryRequirementsKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +#else + PFN_dummy placeholder_dont_call_vkGetWinrtDisplayNV = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkImportFenceWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkImportSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; +#else + PFN_dummy placeholder_dont_call_vkImportSemaphoreZirconHandleFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; - PFN_vkQueueSubmit vkQueueSubmit = 0; - PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; +#else + PFN_dummy placeholder_dont_call_vkReleaseFullScreenExclusiveModeEXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; +#else + PFN_dummy placeholder_dont_call_vkUpdateVideoSessionParametersKHR = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; public: - DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; + DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; + DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined(VK_NO_PROTOTYPES) - // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. +#if !defined( VK_NO_PROTOTYPES ) + // This interface is designed to be used for per-device function pointers in combination with a linked vulkan + // library. template - void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device, DynamicLoader const& dl) VULKAN_HPP_NOEXCEPT + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Device const & device, + DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT { - PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress("vkGetInstanceProcAddr"); - PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress("vkGetDeviceProcAddr"); - init(static_cast(instance), getInstanceProcAddr, static_cast(device), device ? getDeviceProcAddr : nullptr); + PFN_vkGetInstanceProcAddr getInstanceProcAddr = + dl.template getProcAddress( "vkGetInstanceProcAddr" ); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = + dl.template getProcAddress( "vkGetDeviceProcAddr" ); + init( static_cast( instance ), + getInstanceProcAddr, + static_cast( device ), + device ? getDeviceProcAddr : nullptr ); } - // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. + // This interface is designed to be used for per-device function pointers in combination with a linked vulkan + // library. template - void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + = VULKAN_HPP_NAMESPACE::DynamicLoader +# endif + > + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT { static DynamicLoader dl; - init(instance, device, dl); + init( instance, device, dl ); } -#endif // !defined(VK_NO_PROTOTYPES) +#endif // !defined( VK_NO_PROTOTYPES ) - DispatchLoaderDynamic(PFN_vkGetInstanceProcAddr getInstanceProcAddr) VULKAN_HPP_NOEXCEPT + DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT { - init(getInstanceProcAddr); + init( getInstanceProcAddr ); } void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(getInstanceProcAddr); + VULKAN_HPP_ASSERT( getInstanceProcAddr ); vkGetInstanceProcAddr = getInstanceProcAddr; - vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); - vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); - vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); - vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); + vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); + vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( + vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); + vkEnumerateInstanceLayerProperties = + PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); + vkEnumerateInstanceVersion = + PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); } // This interface does not require a linked vulkan library. - DispatchLoaderDynamic( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT + DispatchLoaderDynamic( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT { init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); } // This interface does not require a linked vulkan library. - void init( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT + void init( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(instance && getInstanceProcAddr); + VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); vkGetInstanceProcAddr = getInstanceProcAddr; - init( VULKAN_HPP_NAMESPACE::Instance(instance) ); - if (device) { - init( VULKAN_HPP_NAMESPACE::Device(device) ); + init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); + if ( device ) + { + init( VULKAN_HPP_NAMESPACE::Device( device ) ); } } void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT { - VkInstance instance = static_cast(instanceCpp); -#ifdef VK_USE_PLATFORM_ANDROID_KHR - vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); + VkInstance instance = static_cast( instanceCpp ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkAcquireWinrtDisplayNV = + PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + vkAcquireXlibDisplayEXT = + PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + vkCreateAndroidSurfaceKHR = + PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); - vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); - vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); - vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); -#ifdef VK_USE_PLATFORM_IOS_MVK + vkCreateDebugReportCallbackEXT = + PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkCreateDebugUtilsMessengerEXT = + PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + vkCreateDirectFBSurfaceEXT = + PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + vkCreateDisplayModeKHR = + PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = + PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + vkCreateHeadlessSurfaceEXT = + PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); +#if defined( VK_USE_PLATFORM_IOS_MVK ) vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); #endif /*VK_USE_PLATFORM_IOS_MVK*/ -#ifdef VK_USE_PLATFORM_FUCHSIA - vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkCreateImagePipeSurfaceFUCHSIA = + PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ -#ifdef VK_USE_PLATFORM_MACOS_MVK - vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + vkCreateMacOSSurfaceMVK = + PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); #endif /*VK_USE_PLATFORM_MACOS_MVK*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +#if defined( VK_USE_PLATFORM_METAL_EXT ) + vkCreateMetalSurfaceEXT = + PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); #endif /*VK_USE_PLATFORM_METAL_EXT*/ -#ifdef VK_USE_PLATFORM_GGP - vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + vkCreateScreenSurfaceQNX = + PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +#if defined( VK_USE_PLATFORM_GGP ) + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( + vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); #endif /*VK_USE_PLATFORM_GGP*/ -#ifdef VK_USE_PLATFORM_VI_NN +#if defined( VK_USE_PLATFORM_VI_NN ) vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); #endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + vkCreateWaylandSurfaceKHR = + PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkCreateWin32SurfaceKHR = + PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR +#if defined( VK_USE_PLATFORM_XCB_KHR ) vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + vkCreateXlibSurfaceKHR = + PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_XLIB_KHR*/ - vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); - vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); - vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); - vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkDebugReportMessageEXT = + PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + vkDestroyDebugReportCallbackEXT = + PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = + PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); - vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); - vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); - vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); - vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); - vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); - vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); - vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); - vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); - vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); - vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); - vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); - vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); - vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); - vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); - vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); - vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); - vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); - vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); - vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); - vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); - vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); - vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); - vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); - vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); - vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); - vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); - vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); + vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( + vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = + PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkEnumeratePhysicalDeviceGroups = + PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkEnumeratePhysicalDevices = + PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetDisplayModeProperties2KHR = + PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayModePropertiesKHR = + PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = + PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = + PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( + vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr( vkGetInstanceProcAddr( instance, "vkGetInstanceProcAddr" ) ); + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + vkGetPhysicalDeviceFeatures = + PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFeatures2 = + PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceFeatures2KHR = + PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + vkGetPhysicalDeviceProperties = + PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceProperties2 = + PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceProperties2KHR = + PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); - vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); - vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkGetPhysicalDeviceVideoFormatPropertiesKHR = PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +#if defined( VK_USE_PLATFORM_XCB_KHR ) + vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_XLIB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + vkGetRandROutputDisplayEXT = + PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); - vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureIndirectKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); - vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + vkSubmitDebugUtilsMessageEXT = + PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( + vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkAcquireNextImage2KHR = + PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); - vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkAcquireProfilingLockKHR = + PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); + vkAllocateCommandBuffers = + PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); + vkAllocateDescriptorSets = + PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( + vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + vkBindBufferMemory2KHR = + PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructureKHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkBindVideoSessionMemoryKHR = + PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkBuildAccelerationStructuresKHR = + PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); + vkCmdBeginConditionalRenderingEXT = + PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = + PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); + vkCmdBeginQueryIndexedEXT = + PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); + vkCmdBeginRenderPass2KHR = + PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdBeginTransformFeedbackEXT = + PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdBeginVideoCodingKHR = + PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdBindDescriptorSets = + PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdBindPipelineShaderGroupNV = + PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCmdBindShadingRateImageNV = + PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( + vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBindVertexBuffers = + PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdBindVertexBuffers2EXT = + PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); + vkCmdBuildAccelerationStructureNV = + PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( + vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( + vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = + PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdControlVideoCodingKHR = + PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdCopyAccelerationStructureKHR = + PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureNV = + PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( + vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); + vkCmdCopyBufferToImage = + PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyBufferToImage2KHR = + PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); + vkCmdCopyImageToBuffer = + PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdCopyImageToBuffer2KHR = + PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( + vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkCmdCopyQueryPoolResults = + PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdCuLaunchKernelNVX = + PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + vkCmdDebugMarkerBeginEXT = + PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = + PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = + PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndexedIndirect = + PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDrawIndexedIndirectCount = + PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); + vkCmdDrawIndexedIndirectCountAMD = + PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + vkCmdDrawIndexedIndirectCountKHR = + PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndirectByteCountEXT = + PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + vkCmdDrawIndirectCount = + PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndirectCountAMD = + PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndirectCountKHR = + PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawMeshTasksIndirectCountNV = + PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + vkCmdDrawMeshTasksIndirectNV = + PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdEndConditionalRenderingEXT = + PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = + PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); + vkCmdEndQueryIndexedEXT = + PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); + vkCmdEndRenderPass2KHR = + PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + vkCmdEndTransformFeedbackEXT = + PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdEndVideoCodingKHR = + PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); + vkCmdExecuteGeneratedCommandsNV = + PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); + vkCmdInsertDebugUtilsLabelEXT = + PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); + vkCmdPipelineBarrier2KHR = + PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( + vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); + vkCmdPushDescriptorSetKHR = + PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( + vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); + vkCmdSetBlendConstants = + PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkCmdSetCoarseSampleOrderNV = + PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + vkCmdSetColorWriteEnableEXT = + PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetDepthBiasEnableEXT = + PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetDepthBoundsTestEnableEXT = + PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + vkCmdSetDepthCompareOpEXT = + PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); + vkCmdSetDepthTestEnableEXT = + PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); + vkCmdSetDepthWriteEnableEXT = + PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdSetDiscardRectangleEXT = + PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); + vkCmdSetExclusiveScissorNV = + PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + vkCmdSetFragmentShadingRateEnumNV = + PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); + vkCmdSetFragmentShadingRateKHR = + PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); + vkCmdSetLineStippleEXT = + PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPatchControlPointsEXT = + PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetPerformanceMarkerINTEL = + PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = + PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( + vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPrimitiveRestartEnableEXT = + PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + vkCmdSetPrimitiveTopologyEXT = + PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( + vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( + vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + vkCmdSetSampleLocationsEXT = + PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); + vkCmdSetScissorWithCountEXT = + PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); + vkCmdSetStencilCompareMask = + PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); + vkCmdSetStencilReference = + PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdSetStencilTestEnableEXT = + PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); + vkCmdSetStencilWriteMask = + PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetVertexInputEXT = + PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( + vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetViewportWScalingNV = + PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + vkCmdSetViewportWithCountEXT = + PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); + vkCmdTraceRaysIndirectKHR = + PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( + vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCmdWriteBufferMarker2AMD = + PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); + vkCmdWriteBufferMarkerAMD = + PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); + vkCmdWriteTimestamp2KHR = + PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); + vkCopyAccelerationStructureKHR = + PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( + vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( + vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkCreateAccelerationStructureKHR = + PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); + vkCreateAccelerationStructureNV = + PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); + vkCreateComputePipelines = + PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); + vkCreateDeferredOperationKHR = + PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); + vkCreateDescriptorPool = + PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); + vkCreateDescriptorSetLayout = + PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); + vkCreateDescriptorUpdateTemplate = + PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( + vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); + vkCreateGraphicsPipelines = + PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); + vkCreateIndirectCommandsLayoutNV = + PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); + vkCreatePipelineLayout = + PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkCreatePrivateDataSlotEXT = + PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); + vkCreateRayTracingPipelinesKHR = + PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); + vkCreateRayTracingPipelinesNV = + PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + vkCreateRenderPass2KHR = + PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); + vkCreateSamplerYcbcrConversion = + PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + vkCreateSamplerYcbcrConversionKHR = + PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); + vkCreateSharedSwapchainsKHR = + PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + vkCreateValidationCacheEXT = + PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCreateVideoSessionKHR = + PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCreateVideoSessionParametersKHR = + PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); + vkDebugMarkerSetObjectNameEXT = + PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); + vkDebugMarkerSetObjectTagEXT = + PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDeferredOperationJoinKHR = + PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + vkDestroyAccelerationStructureKHR = + PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureNV = + PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); + vkDestroyCuFunctionNVX = + PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); + vkDestroyDeferredOperationKHR = + PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); + vkDestroyDescriptorPool = + PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); + vkDestroyDescriptorSetLayout = + PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); + vkDestroyDescriptorUpdateTemplate = + PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( + vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); + vkDestroyIndirectCommandsLayoutNV = + PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); + vkDestroyPipelineCache = + PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); + vkDestroyPipelineLayout = + PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkDestroyPrivateDataSlotEXT = + PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); + vkDestroySamplerYcbcrConversion = + PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( + vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = + PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkDestroyVideoSessionKHR = + PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( + vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); + vkDisplayPowerControlEXT = + PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); + vkFlushMappedMemoryRanges = + PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); -#ifdef VK_USE_PLATFORM_ANDROID_KHR - vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); + vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( + vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + vkGetBufferDeviceAddress = + PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); + vkGetBufferDeviceAddressEXT = + PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + vkGetBufferDeviceAddressKHR = + PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferMemoryRequirements = + PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); + vkGetBufferMemoryRequirements2 = + PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2KHR = + PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetBufferOpaqueCaptureAddress = + PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetCalibratedTimestampsEXT = + PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( + vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = + PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); + vkGetDescriptorSetLayoutSupport = + PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetDeviceMemoryCommitment = + PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( + vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetFenceWin32HandleKHR = + PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); -#ifdef VK_USE_PLATFORM_ANDROID_KHR - vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( + vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + vkGetImageMemoryRequirements = + PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); + vkGetImageMemoryRequirements2 = + PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + vkGetImageMemoryRequirements2KHR = + PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + vkGetImageSubresourceLayout = + PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkGetImageViewAddressNVX = + PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + vkGetImageViewHandleNVX = + PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( + vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryFdPropertiesKHR = + PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandleKHR = + PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandleNV = + PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); - vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetMemoryZirconHandleFUCHSIA = + PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( + vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + vkGetPastPresentationTimingGOOGLE = + PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + vkGetPerformanceParameterINTEL = + PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + vkGetPipelineCacheData = + PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + vkGetQueueCheckpointData2NV = + PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); + vkGetQueueCheckpointDataNV = + PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkGetRefreshCycleDurationGOOGLE = + PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetRenderAreaGranularity = + PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); + vkGetSemaphoreCounterValue = + PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); + vkGetSemaphoreCounterValueKHR = + PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetSemaphoreWin32HandleKHR = + PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetSemaphoreZirconHandleFUCHSIA = + PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); - vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); - vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetSwapchainCounterEXT = + PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + vkGetSwapchainImagesKHR = + PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); + vkGetSwapchainStatusKHR = + PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + vkGetValidationCacheDataEXT = + PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( + vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkImportFenceWin32HandleKHR = + PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkImportSemaphoreFdKHR = + PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkImportSemaphoreWin32HandleKHR = + PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( + vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + vkInitializePerformanceApiINTEL = + PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); + vkInvalidateMappedMemoryRanges = + PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); + vkMergeValidationCachesEXT = + PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = + PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = + PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = + PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); - vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + vkRegisterDeviceEventEXT = + PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = + PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( + vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkReleaseProfilingLockKHR = + PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + vkSetDebugUtilsObjectNameEXT = + PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = + PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + vkUninitializePerformanceApiINTEL = + PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); + vkUpdateDescriptorSetWithTemplate = + PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( + vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + vkUpdateDescriptorSets = + PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkUpdateVideoSessionParametersKHR = + PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); } void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT { - VkDevice device = static_cast(deviceCpp); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); - vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdBuildAccelerationStructureIndirectKHR = PFN_vkCmdBuildAccelerationStructureIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureIndirectKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdBuildAccelerationStructureKHR = PFN_vkCmdBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); - vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + VkDevice device = static_cast( deviceCpp ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkAcquireFullScreenExclusiveModeEXT = + PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); - vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkBindAccelerationStructureMemoryKHR = PFN_vkBindAccelerationStructureMemoryKHR( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkAcquireProfilingLockKHR = + PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkAllocateCommandBuffers = + PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkAllocateDescriptorSets = + PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkBindAccelerationStructureMemoryNV = + PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkBuildAccelerationStructureKHR = PFN_vkBuildAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructureKHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkBindVideoSessionMemoryKHR = + PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkBuildAccelerationStructuresKHR = + PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCmdBeginConditionalRenderingEXT = + PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = + PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdBeginQueryIndexedEXT = + PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdBeginRenderPass2KHR = + PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdBeginTransformFeedbackEXT = + PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdBeginVideoCodingKHR = + PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdBindPipelineShaderGroupNV = + PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCmdBindShadingRateImageNV = + PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( + vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdBindVertexBuffers2EXT = + PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + vkCmdBuildAccelerationStructureNV = + PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( + vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = + PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = + PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdControlVideoCodingKHR = + PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureKHR = + PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureNV = + PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( + vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyBufferToImage2KHR = + PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdCopyImageToBuffer2KHR = + PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( + vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkCmdCopyQueryPoolResults = + PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + vkCmdDebugMarkerBeginEXT = + PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = + PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndexedIndirect = + PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDrawIndexedIndirectCount = + PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCmdDrawIndexedIndirectCountAMD = + PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + vkCmdDrawIndexedIndirectCountKHR = + PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndirectByteCountEXT = + PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndirectCountAMD = + PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndirectCountKHR = + PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawMeshTasksIndirectCountNV = + PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + vkCmdDrawMeshTasksIndirectNV = + PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkCmdEndConditionalRenderingEXT = + PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = + PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + vkCmdEndTransformFeedbackEXT = + PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + vkCmdExecuteGeneratedCommandsNV = + PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdInsertDebugUtilsLabelEXT = + PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdPipelineBarrier2KHR = + PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + vkCmdPreprocessGeneratedCommandsNV = + PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdPushDescriptorSetKHR = + PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( + vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkCmdSetCoarseSampleOrderNV = + PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + vkCmdSetColorWriteEnableEXT = + PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetDepthBiasEnableEXT = + PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetDepthBoundsTestEnableEXT = + PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + vkCmdSetDepthCompareOpEXT = + PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + vkCmdSetDepthTestEnableEXT = + PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + vkCmdSetDepthWriteEnableEXT = + PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdSetDiscardRectangleEXT = + PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + vkCmdSetExclusiveScissorNV = + PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + vkCmdSetFragmentShadingRateEnumNV = + PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + vkCmdSetFragmentShadingRateKHR = + PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPatchControlPointsEXT = + PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetPerformanceMarkerINTEL = + PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = + PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( + vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPrimitiveRestartEnableEXT = + PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + vkCmdSetPrimitiveTopologyEXT = + PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = + PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( + vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + vkCmdSetSampleLocationsEXT = + PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetScissorWithCountEXT = + PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + vkCmdSetStencilCompareMask = + PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + vkCmdSetStencilReference = + PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdSetStencilTestEnableEXT = + PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + vkCmdSetStencilWriteMask = + PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( + vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetViewportWScalingNV = + PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + vkCmdSetViewportWithCountEXT = + PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + vkCmdTraceRaysIndirectKHR = + PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( + vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( + vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCmdWriteBufferMarker2AMD = + PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + vkCmdWriteBufferMarkerAMD = + PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + vkCopyAccelerationStructureKHR = + PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( + vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( + vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkCreateAccelerationStructureKHR = + PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkCreateAccelerationStructureNV = + PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkCreateComputePipelines = + PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateDeferredOperationKHR = + PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkCreateDescriptorSetLayout = + PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkCreateDescriptorUpdateTemplate = + PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkCreateDescriptorUpdateTemplateKHR = + PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkCreateGraphicsPipelines = + PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkCreateIndirectCommandsLayoutNV = + PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + vkCreatePrivateDataSlotEXT = + PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCreateRayTracingPipelinesKHR = + PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkCreateRayTracingPipelinesNV = + PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkCreateSamplerYcbcrConversion = + PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkCreateSamplerYcbcrConversionKHR = + PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + vkCreateSharedSwapchainsKHR = + PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + vkCreateValidationCacheEXT = + PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkCreateVideoSessionParametersKHR = + PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkDebugMarkerSetObjectNameEXT = + PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkDebugMarkerSetObjectTagEXT = + PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDeferredOperationJoinKHR = + PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + vkDestroyAccelerationStructureKHR = + PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureNV = + PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyDeferredOperationKHR = + PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkDestroyDescriptorSetLayout = + PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkDestroyDescriptorUpdateTemplate = + PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( + vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkDestroyIndirectCommandsLayoutNV = + PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkDestroyPrivateDataSlotEXT = + PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkDestroySamplerYcbcrConversion = + PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversionKHR = + PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = + PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkDestroyVideoSessionKHR = + PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkDestroyVideoSessionParametersKHR = + PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkDisplayPowerControlEXT = + PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkFlushMappedMemoryRanges = + PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetAccelerationStructureMemoryRequirementsKHR = PFN_vkGetAccelerationStructureMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); -#ifdef VK_USE_PLATFORM_ANDROID_KHR - vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkGetAccelerationStructureHandleNV = + PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( + vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + vkGetBufferDeviceAddress = + PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferDeviceAddressEXT = + PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + vkGetBufferDeviceAddressKHR = + PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferMemoryRequirements = + PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetBufferMemoryRequirements2 = + PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2KHR = + PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetBufferOpaqueCaptureAddress = + PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetBufferOpaqueCaptureAddressKHR = + PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetCalibratedTimestampsEXT = + PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( + vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = + PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkGetDescriptorSetLayoutSupport = + PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + vkGetDescriptorSetLayoutSupportKHR = + PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetDeviceGroupPeerMemoryFeatures = + PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetDeviceMemoryCommitment = + PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( + vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetFenceWin32HandleKHR = + PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( + vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( + vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + vkGetImageMemoryRequirements = + PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageMemoryRequirements2 = + PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetImageMemoryRequirements2KHR = + PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements = + PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements2 = + PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( + vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + vkGetImageSubresourceLayout = + PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkGetImageViewAddressNVX = + PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); -#ifdef VK_USE_PLATFORM_ANDROID_KHR - vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( + vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryFdPropertiesKHR = + PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + vkGetMemoryHostPointerPropertiesEXT = + PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandleKHR = + PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandleNV = + PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetMemoryWin32HandlePropertiesKHR = + PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetMemoryZirconHandleFUCHSIA = + PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( + vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + vkGetPastPresentationTimingGOOGLE = + PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + vkGetPerformanceParameterINTEL = + PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); - vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + vkGetQueueCheckpointData2NV = + PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + vkGetQueueCheckpointDataNV = + PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingShaderGroupHandlesNV = + PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkGetRefreshCycleDurationGOOGLE = + PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetRenderAreaGranularity = + PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkGetSemaphoreCounterValue = + PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkGetSemaphoreCounterValueKHR = + PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkGetSemaphoreWin32HandleKHR = + PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkGetSemaphoreZirconHandleFUCHSIA = + PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + vkGetSwapchainCounterEXT = + PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + vkGetValidationCacheDataEXT = + PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkImportFenceWin32HandleKHR = + PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkImportSemaphoreWin32HandleKHR = + PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( + vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + vkInitializePerformanceApiINTEL = + PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkInvalidateMappedMemoryRanges = + PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); - vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); - vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); -#ifdef VK_ENABLE_BETA_EXTENSIONS - vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkMergeValidationCachesEXT = + PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = + PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = + PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = + PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); - vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkRegisterDeviceEventEXT = + PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = + PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + vkReleaseFullScreenExclusiveModeEXT = + PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkReleaseProfilingLockKHR = + PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + vkSetDebugUtilsObjectNameEXT = + PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = + PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + vkUninitializePerformanceApiINTEL = + PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkUpdateDescriptorSetWithTemplate = + PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( + vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + vkUpdateVideoSessionParametersKHR = + PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( + vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); } }; -} // namespace VULKAN_HPP_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE namespace std { - - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::AccelerationStructureKHR const& accelerationStructureKHR) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & accelerationStructureKHR ) const + VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(accelerationStructureKHR)); + return std::hash{}( + static_cast( accelerationStructureKHR ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::Buffer const& buffer) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & accelerationStructureNV ) const + VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(buffer)); + return std::hash{}( + static_cast( accelerationStructureNV ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::BufferView const& bufferView) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::Buffer const & buffer ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(bufferView)); + return std::hash{}( static_cast( buffer ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::CommandBuffer const& commandBuffer) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferView const & bufferView ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(commandBuffer)); + return std::hash{}( static_cast( bufferView ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::CommandPool const& commandPool) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBuffer const & commandBuffer ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(commandPool)); + return std::hash{}( static_cast( commandBuffer ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DebugReportCallbackEXT const& debugReportCallbackEXT) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandPool const & commandPool ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(debugReportCallbackEXT)); + return std::hash{}( static_cast( commandPool ) ); } }; - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DebugUtilsMessengerEXT const& debugUtilsMessengerEXT) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionNVX const & cuFunctionNVX ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(debugUtilsMessengerEXT)); + return std::hash{}( static_cast( cuFunctionNVX ) ); } }; -#ifdef VK_ENABLE_BETA_EXTENSIONS - template <> struct hash + template <> + struct hash { - std::size_t operator()(vk::DeferredOperationKHR const& deferredOperationKHR) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleNVX const & cuModuleNVX ) const VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(deferredOperationKHR)); + return std::hash{}( static_cast( cuModuleNVX ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & debugReportCallbackEXT ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( debugReportCallbackEXT ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & debugUtilsMessengerEXT ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( debugUtilsMessengerEXT ) ); + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & deferredOperationKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( deferredOperationKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPool const & descriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSet const & descriptorSet ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorSet ) ); + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & descriptorSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorSetLayout ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & descriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( descriptorUpdateTemplate ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Device const & device ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( device ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemory const & deviceMemory ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( deviceMemory ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayKHR const & displayKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( displayKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeKHR const & displayModeKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( displayModeKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Event const & event ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( event ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Fence const & fence ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( fence ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Framebuffer const & framebuffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( framebuffer ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Image const & image ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( image ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageView const & imageView ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( imageView ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & indirectCommandsLayoutNV ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( indirectCommandsLayoutNV ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Instance const & instance ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( instance ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & performanceConfigurationINTEL ) + const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( performanceConfigurationINTEL ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice const & physicalDevice ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( physicalDevice ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Pipeline const & pipeline ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipeline ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCache const & pipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineCache ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayout const & pipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineLayout ) ); + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT const & privateDataSlotEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( privateDataSlotEXT ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPool const & queryPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( queryPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Queue const & queue ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( queue ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPass const & renderPass ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( renderPass ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Sampler const & sampler ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( sampler ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & samplerYcbcrConversion ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( samplerYcbcrConversion ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Semaphore const & semaphore ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( semaphore ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModule const & shaderModule ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( shaderModule ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceKHR const & surfaceKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( surfaceKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainKHR const & swapchainKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( swapchainKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & validationCacheEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( validationCacheEXT ) ); + } + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionKHR const & videoSessionKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( videoSessionKHR ) ); } }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template <> struct hash +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash { - std::size_t operator()(vk::DescriptorPool const& descriptorPool) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & videoSessionParametersKHR ) const + VULKAN_HPP_NOEXCEPT { - return std::hash{}(static_cast(descriptorPool)); + return std::hash{}( + static_cast( videoSessionParametersKHR ) ); } }; - - template <> struct hash - { - std::size_t operator()(vk::DescriptorSet const& descriptorSet) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(descriptorSet)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::DescriptorSetLayout const& descriptorSetLayout) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(descriptorSetLayout)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::DescriptorUpdateTemplate const& descriptorUpdateTemplate) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(descriptorUpdateTemplate)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::Device const& device) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(device)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::DeviceMemory const& deviceMemory) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(deviceMemory)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::DisplayKHR const& displayKHR) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(displayKHR)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::DisplayModeKHR const& displayModeKHR) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(displayModeKHR)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::Event const& event) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(event)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::Fence const& fence) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(fence)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::Framebuffer const& framebuffer) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(framebuffer)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::Image const& image) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(image)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::ImageView const& imageView) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(imageView)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::IndirectCommandsLayoutNV const& indirectCommandsLayoutNV) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(indirectCommandsLayoutNV)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::Instance const& instance) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(instance)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::PerformanceConfigurationINTEL const& performanceConfigurationINTEL) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(performanceConfigurationINTEL)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::PhysicalDevice const& physicalDevice) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(physicalDevice)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::Pipeline const& pipeline) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(pipeline)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::PipelineCache const& pipelineCache) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(pipelineCache)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::PipelineLayout const& pipelineLayout) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(pipelineLayout)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::PrivateDataSlotEXT const& privateDataSlotEXT) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(privateDataSlotEXT)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::QueryPool const& queryPool) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(queryPool)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::Queue const& queue) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(queue)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::RenderPass const& renderPass) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(renderPass)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::Sampler const& sampler) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(sampler)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::SamplerYcbcrConversion const& samplerYcbcrConversion) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(samplerYcbcrConversion)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::Semaphore const& semaphore) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(semaphore)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::ShaderModule const& shaderModule) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(shaderModule)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::SurfaceKHR const& surfaceKHR) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(surfaceKHR)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::SwapchainKHR const& swapchainKHR) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(swapchainKHR)); - } - }; - - template <> struct hash - { - std::size_t operator()(vk::ValidationCacheEXT const& validationCacheEXT) const VULKAN_HPP_NOEXCEPT - { - return std::hash{}(static_cast(validationCacheEXT)); - } - }; -} +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +} // namespace std #endif diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_android.h b/externals/Vulkan-Headers/include/vulkan/vulkan_android.h index 50ef85f13..2160e3e7c 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_android.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_android.h @@ -2,7 +2,7 @@ #define VULKAN_ANDROID_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_beta.h b/externals/Vulkan-Headers/include/vulkan/vulkan_beta.h index 2904234a7..9aebb1abb 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_beta.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_beta.h @@ -2,7 +2,7 @@ #define VULKAN_BETA_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,408 +19,684 @@ extern "C" { -#define VK_KHR_deferred_host_operations 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) -#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 3 -#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations" -typedef struct VkDeferredOperationInfoKHR { +#define VK_KHR_video_queue 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR) +#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 1 +#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue" + +typedef enum VkQueryResultStatusKHR { + VK_QUERY_RESULT_STATUS_ERROR_KHR = -1, + VK_QUERY_RESULT_STATUS_NOT_READY_KHR = 0, + VK_QUERY_RESULT_STATUS_COMPLETE_KHR = 1, + VK_QUERY_RESULT_STATUS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkQueryResultStatusKHR; + +typedef enum VkVideoCodecOperationFlagBitsKHR { + VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR = 0, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT = 0x00010000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_EXT = 0x00000001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_EXT = 0x00000002, +#endif + VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodecOperationFlagBitsKHR; +typedef VkFlags VkVideoCodecOperationFlagsKHR; + +typedef enum VkVideoChromaSubsamplingFlagBitsKHR { + VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR = 0, + VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001, + VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002, + VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004, + VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR = 0x00000008, + VK_VIDEO_CHROMA_SUBSAMPLING_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoChromaSubsamplingFlagBitsKHR; +typedef VkFlags VkVideoChromaSubsamplingFlagsKHR; + +typedef enum VkVideoComponentBitDepthFlagBitsKHR { + VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR = 0, + VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR = 0x00000001, + VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR = 0x00000004, + VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR = 0x00000010, + VK_VIDEO_COMPONENT_BIT_DEPTH_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoComponentBitDepthFlagBitsKHR; +typedef VkFlags VkVideoComponentBitDepthFlagsKHR; + +typedef enum VkVideoCapabilitiesFlagBitsKHR { + VK_VIDEO_CAPABILITIES_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_CAPABILITIES_SEPARATE_REFERENCE_IMAGES_BIT_KHR = 0x00000002, + VK_VIDEO_CAPABILITIES_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCapabilitiesFlagBitsKHR; +typedef VkFlags VkVideoCapabilitiesFlagsKHR; + +typedef enum VkVideoSessionCreateFlagBitsKHR { + VK_VIDEO_SESSION_CREATE_DEFAULT_KHR = 0, + VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoSessionCreateFlagBitsKHR; +typedef VkFlags VkVideoSessionCreateFlagsKHR; +typedef VkFlags VkVideoBeginCodingFlagsKHR; +typedef VkFlags VkVideoEndCodingFlagsKHR; + +typedef enum VkVideoCodingControlFlagBitsKHR { + VK_VIDEO_CODING_CONTROL_DEFAULT_KHR = 0, + VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001, + VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodingControlFlagBitsKHR; +typedef VkFlags VkVideoCodingControlFlagsKHR; + +typedef enum VkVideoCodingQualityPresetFlagBitsKHR { + VK_VIDEO_CODING_QUALITY_PRESET_DEFAULT_BIT_KHR = 0, + VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR = 0x00000001, + VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR = 0x00000002, + VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR = 0x00000004, + VK_VIDEO_CODING_QUALITY_PRESET_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodingQualityPresetFlagBitsKHR; +typedef VkFlags VkVideoCodingQualityPresetFlagsKHR; +typedef struct VkVideoQueueFamilyProperties2KHR { + VkStructureType sType; + void* pNext; + VkVideoCodecOperationFlagsKHR videoCodecOperations; +} VkVideoQueueFamilyProperties2KHR; + +typedef struct VkVideoProfileKHR { + VkStructureType sType; + void* pNext; + VkVideoCodecOperationFlagBitsKHR videoCodecOperation; + VkVideoChromaSubsamplingFlagsKHR chromaSubsampling; + VkVideoComponentBitDepthFlagsKHR lumaBitDepth; + VkVideoComponentBitDepthFlagsKHR chromaBitDepth; +} VkVideoProfileKHR; + +typedef struct VkVideoProfilesKHR { + VkStructureType sType; + void* pNext; + uint32_t profileCount; + const VkVideoProfileKHR* pProfiles; +} VkVideoProfilesKHR; + +typedef struct VkVideoCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCapabilitiesFlagsKHR capabilityFlags; + VkDeviceSize minBitstreamBufferOffsetAlignment; + VkDeviceSize minBitstreamBufferSizeAlignment; + VkExtent2D videoPictureExtentGranularity; + VkExtent2D minExtent; + VkExtent2D maxExtent; + uint32_t maxReferencePicturesSlotsCount; + uint32_t maxReferencePicturesActiveCount; +} VkVideoCapabilitiesKHR; + +typedef struct VkPhysicalDeviceVideoFormatInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags imageUsage; + const VkVideoProfilesKHR* pVideoProfiles; +} VkPhysicalDeviceVideoFormatInfoKHR; + +typedef struct VkVideoFormatPropertiesKHR { + VkStructureType sType; + void* pNext; + VkFormat format; +} VkVideoFormatPropertiesKHR; + +typedef struct VkVideoPictureResourceKHR { + VkStructureType sType; + const void* pNext; + VkOffset2D codedOffset; + VkExtent2D codedExtent; + uint32_t baseArrayLayer; + VkImageView imageViewBinding; +} VkVideoPictureResourceKHR; + +typedef struct VkVideoReferenceSlotKHR { + VkStructureType sType; + const void* pNext; + int8_t slotIndex; + const VkVideoPictureResourceKHR* pPictureResource; +} VkVideoReferenceSlotKHR; + +typedef struct VkVideoGetMemoryPropertiesKHR { VkStructureType sType; const void* pNext; - VkDeferredOperationKHR operationHandle; -} VkDeferredOperationInfoKHR; + uint32_t memoryBindIndex; + VkMemoryRequirements2* pMemoryRequirements; +} VkVideoGetMemoryPropertiesKHR; -typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation); -typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); -typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation); -typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef struct VkVideoBindMemoryKHR { + VkStructureType sType; + const void* pNext; + uint32_t memoryBindIndex; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkDeviceSize memorySize; +} VkVideoBindMemoryKHR; + +typedef struct VkVideoSessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + VkVideoSessionCreateFlagsKHR flags; + const VkVideoProfileKHR* pVideoProfile; + VkFormat pictureFormat; + VkExtent2D maxCodedExtent; + VkFormat referencePicturesFormat; + uint32_t maxReferencePicturesSlotsCount; + uint32_t maxReferencePicturesActiveCount; +} VkVideoSessionCreateInfoKHR; + +typedef struct VkVideoSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersKHR videoSessionParametersTemplate; + VkVideoSessionKHR videoSession; +} VkVideoSessionParametersCreateInfoKHR; + +typedef struct VkVideoSessionParametersUpdateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t updateSequenceCount; +} VkVideoSessionParametersUpdateInfoKHR; + +typedef struct VkVideoBeginCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoBeginCodingFlagsKHR flags; + VkVideoCodingQualityPresetFlagsKHR codecQualityPreset; + VkVideoSessionKHR videoSession; + VkVideoSessionParametersKHR videoSessionParameters; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotKHR* pReferenceSlots; +} VkVideoBeginCodingInfoKHR; + +typedef struct VkVideoEndCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEndCodingFlagsKHR flags; +} VkVideoEndCodingInfoKHR; + +typedef struct VkVideoCodingControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodingControlFlagsKHR flags; +} VkVideoCodingControlInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pVideoSessionMemoryRequirementsCount, VkVideoGetMemoryPropertiesKHR* pVideoSessionMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t videoSessionBindMemoryCount, const VkVideoBindMemoryKHR* pVideoSessionBindMemories); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters); +typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBeginVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR( - VkDevice device, - const VkAllocationCallbacks* pAllocator, - VkDeferredOperationKHR* pDeferredOperation); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + const VkVideoProfileKHR* pVideoProfile, + VkVideoCapabilitiesKHR* pCapabilities); -VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, + uint32_t* pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR* pVideoFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionKHR( VkDevice device, - VkDeferredOperationKHR operation, + const VkVideoSessionCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionKHR* pVideoSession); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR( + VkDevice device, + VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator); -VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, - VkDeferredOperationKHR operation); + VkVideoSessionKHR videoSession, + uint32_t* pVideoSessionMemoryRequirementsCount, + VkVideoGetMemoryPropertiesKHR* pVideoSessionMemoryRequirements); -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR( VkDevice device, - VkDeferredOperationKHR operation); + VkVideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VkVideoBindMemoryKHR* pVideoSessionBindMemories); -VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR( VkDevice device, - VkDeferredOperationKHR operation); + const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionParametersKHR* pVideoSessionParameters); + +VKAPI_ATTR VkResult VKAPI_CALL vkUpdateVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR* pBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoEndCodingInfoKHR* pEndCodingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoCodingControlInfoKHR* pCodingControlInfo); #endif -#define VK_KHR_pipeline_library 1 -#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 -#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" -typedef struct VkPipelineLibraryCreateInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t libraryCount; - const VkPipeline* pLibraries; -} VkPipelineLibraryCreateInfoKHR; +#define VK_KHR_video_decode_queue 1 +#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 1 +#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue" - - -#define VK_KHR_ray_tracing 1 -#define VK_KHR_RAY_TRACING_SPEC_VERSION 8 -#define VK_KHR_RAY_TRACING_EXTENSION_NAME "VK_KHR_ray_tracing" - -typedef enum VkAccelerationStructureBuildTypeKHR { - VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0, - VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1, - VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2, - VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkAccelerationStructureBuildTypeKHR; -typedef union VkDeviceOrHostAddressKHR { - VkDeviceAddress deviceAddress; - void* hostAddress; -} VkDeviceOrHostAddressKHR; - -typedef union VkDeviceOrHostAddressConstKHR { - VkDeviceAddress deviceAddress; - const void* hostAddress; -} VkDeviceOrHostAddressConstKHR; - -typedef struct VkAccelerationStructureBuildOffsetInfoKHR { - uint32_t primitiveCount; - uint32_t primitiveOffset; - uint32_t firstVertex; - uint32_t transformOffset; -} VkAccelerationStructureBuildOffsetInfoKHR; - -typedef struct VkRayTracingShaderGroupCreateInfoKHR { +typedef enum VkVideoDecodeFlagBitsKHR { + VK_VIDEO_DECODE_DEFAULT_KHR = 0, + VK_VIDEO_DECODE_RESERVED_0_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeFlagBitsKHR; +typedef VkFlags VkVideoDecodeFlagsKHR; +typedef struct VkVideoDecodeInfoKHR { VkStructureType sType; const void* pNext; - VkRayTracingShaderGroupTypeKHR type; - uint32_t generalShader; - uint32_t closestHitShader; - uint32_t anyHitShader; - uint32_t intersectionShader; - const void* pShaderGroupCaptureReplayHandle; -} VkRayTracingShaderGroupCreateInfoKHR; + VkVideoDecodeFlagsKHR flags; + VkOffset2D codedOffset; + VkExtent2D codedExtent; + VkBuffer srcBuffer; + VkDeviceSize srcBufferOffset; + VkDeviceSize srcBufferRange; + VkVideoPictureResourceKHR dstPictureResource; + const VkVideoReferenceSlotKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotKHR* pReferenceSlots; +} VkVideoDecodeInfoKHR; -typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t maxPayloadSize; - uint32_t maxAttributeSize; - uint32_t maxCallableSize; -} VkRayTracingPipelineInterfaceCreateInfoKHR; - -typedef struct VkRayTracingPipelineCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkPipelineCreateFlags flags; - uint32_t stageCount; - const VkPipelineShaderStageCreateInfo* pStages; - uint32_t groupCount; - const VkRayTracingShaderGroupCreateInfoKHR* pGroups; - uint32_t maxRecursionDepth; - VkPipelineLibraryCreateInfoKHR libraries; - const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface; - VkPipelineLayout layout; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkRayTracingPipelineCreateInfoKHR; - -typedef struct VkAccelerationStructureGeometryTrianglesDataKHR { - VkStructureType sType; - const void* pNext; - VkFormat vertexFormat; - VkDeviceOrHostAddressConstKHR vertexData; - VkDeviceSize vertexStride; - VkIndexType indexType; - VkDeviceOrHostAddressConstKHR indexData; - VkDeviceOrHostAddressConstKHR transformData; -} VkAccelerationStructureGeometryTrianglesDataKHR; - -typedef struct VkAccelerationStructureGeometryAabbsDataKHR { - VkStructureType sType; - const void* pNext; - VkDeviceOrHostAddressConstKHR data; - VkDeviceSize stride; -} VkAccelerationStructureGeometryAabbsDataKHR; - -typedef struct VkAccelerationStructureGeometryInstancesDataKHR { - VkStructureType sType; - const void* pNext; - VkBool32 arrayOfPointers; - VkDeviceOrHostAddressConstKHR data; -} VkAccelerationStructureGeometryInstancesDataKHR; - -typedef union VkAccelerationStructureGeometryDataKHR { - VkAccelerationStructureGeometryTrianglesDataKHR triangles; - VkAccelerationStructureGeometryAabbsDataKHR aabbs; - VkAccelerationStructureGeometryInstancesDataKHR instances; -} VkAccelerationStructureGeometryDataKHR; - -typedef struct VkAccelerationStructureGeometryKHR { - VkStructureType sType; - const void* pNext; - VkGeometryTypeKHR geometryType; - VkAccelerationStructureGeometryDataKHR geometry; - VkGeometryFlagsKHR flags; -} VkAccelerationStructureGeometryKHR; - -typedef struct VkAccelerationStructureBuildGeometryInfoKHR { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureTypeKHR type; - VkBuildAccelerationStructureFlagsKHR flags; - VkBool32 update; - VkAccelerationStructureKHR srcAccelerationStructure; - VkAccelerationStructureKHR dstAccelerationStructure; - VkBool32 geometryArrayOfPointers; - uint32_t geometryCount; - const VkAccelerationStructureGeometryKHR* const* ppGeometries; - VkDeviceOrHostAddressKHR scratchData; -} VkAccelerationStructureBuildGeometryInfoKHR; - -typedef struct VkAccelerationStructureCreateGeometryTypeInfoKHR { - VkStructureType sType; - const void* pNext; - VkGeometryTypeKHR geometryType; - uint32_t maxPrimitiveCount; - VkIndexType indexType; - uint32_t maxVertexCount; - VkFormat vertexFormat; - VkBool32 allowsTransforms; -} VkAccelerationStructureCreateGeometryTypeInfoKHR; - -typedef struct VkAccelerationStructureCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceSize compactedSize; - VkAccelerationStructureTypeKHR type; - VkBuildAccelerationStructureFlagsKHR flags; - uint32_t maxGeometryCount; - const VkAccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos; - VkDeviceAddress deviceAddress; -} VkAccelerationStructureCreateInfoKHR; - -typedef struct VkAccelerationStructureMemoryRequirementsInfoKHR { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureMemoryRequirementsTypeKHR type; - VkAccelerationStructureBuildTypeKHR buildType; - VkAccelerationStructureKHR accelerationStructure; -} VkAccelerationStructureMemoryRequirementsInfoKHR; - -typedef struct VkPhysicalDeviceRayTracingFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 rayTracing; - VkBool32 rayTracingShaderGroupHandleCaptureReplay; - VkBool32 rayTracingShaderGroupHandleCaptureReplayMixed; - VkBool32 rayTracingAccelerationStructureCaptureReplay; - VkBool32 rayTracingIndirectTraceRays; - VkBool32 rayTracingIndirectAccelerationStructureBuild; - VkBool32 rayTracingHostAccelerationStructureCommands; - VkBool32 rayQuery; - VkBool32 rayTracingPrimitiveCulling; -} VkPhysicalDeviceRayTracingFeaturesKHR; - -typedef struct VkPhysicalDeviceRayTracingPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t shaderGroupHandleSize; - uint32_t maxRecursionDepth; - uint32_t maxShaderGroupStride; - uint32_t shaderGroupBaseAlignment; - uint64_t maxGeometryCount; - uint64_t maxInstanceCount; - uint64_t maxPrimitiveCount; - uint32_t maxDescriptorSetAccelerationStructures; - uint32_t shaderGroupHandleCaptureReplaySize; -} VkPhysicalDeviceRayTracingPropertiesKHR; - -typedef struct VkAccelerationStructureDeviceAddressInfoKHR { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureKHR accelerationStructure; -} VkAccelerationStructureDeviceAddressInfoKHR; - -typedef struct VkAccelerationStructureVersionKHR { - VkStructureType sType; - const void* pNext; - const uint8_t* versionData; -} VkAccelerationStructureVersionKHR; - -typedef struct VkStridedBufferRegionKHR { - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize stride; - VkDeviceSize size; -} VkStridedBufferRegionKHR; - -typedef struct VkTraceRaysIndirectCommandKHR { - uint32_t width; - uint32_t height; - uint32_t depth; -} VkTraceRaysIndirectCommandKHR; - -typedef struct VkCopyAccelerationStructureToMemoryInfoKHR { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureKHR src; - VkDeviceOrHostAddressKHR dst; - VkCopyAccelerationStructureModeKHR mode; -} VkCopyAccelerationStructureToMemoryInfoKHR; - -typedef struct VkCopyMemoryToAccelerationStructureInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceOrHostAddressConstKHR src; - VkAccelerationStructureKHR dst; - VkCopyAccelerationStructureModeKHR mode; -} VkCopyMemoryToAccelerationStructureInfoKHR; - -typedef struct VkCopyAccelerationStructureInfoKHR { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureKHR src; - VkAccelerationStructureKHR dst; - VkCopyAccelerationStructureModeKHR mode; -} VkCopyAccelerationStructureInfoKHR; - -typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure); -typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsKHR)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, VkMemoryRequirements2* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos); -typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureIndirectKHR)(VkCommandBuffer commandBuffer, const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, VkBuffer indirectBuffer, VkDeviceSize indirectOffset, uint32_t indirectStride); -typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructureKHR)(VkDevice device, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos); -typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, const VkCopyAccelerationStructureInfoKHR* pInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); -typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo); -typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); -typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); -typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth); -typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); -typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, const VkStridedBufferRegionKHR* pMissShaderBindingTable, const VkStridedBufferRegionKHR* pHitShaderBindingTable, const VkStridedBufferRegionKHR* pCallableShaderBindingTable, VkBuffer buffer, VkDeviceSize offset); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionKHR* version); +typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pFrameInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR( - VkDevice device, - const VkAccelerationStructureCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkAccelerationStructureKHR* pAccelerationStructure); - -VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsKHR( - VkDevice device, - const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo, - VkMemoryRequirements2* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureKHR( +VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, - const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos); - -VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureIndirectKHR( - VkCommandBuffer commandBuffer, - const VkAccelerationStructureBuildGeometryInfoKHR* pInfo, - VkBuffer indirectBuffer, - VkDeviceSize indirectOffset, - uint32_t indirectStride); - -VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructureKHR( - VkDevice device, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, - const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos); - -VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR( - VkDevice device, - const VkCopyAccelerationStructureInfoKHR* pInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR( - VkDevice device, - const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR( - VkDevice device, - const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR( - VkDevice device, - uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR* pAccelerationStructures, - VkQueryType queryType, - size_t dataSize, - void* pData, - size_t stride); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR( - VkCommandBuffer commandBuffer, - const VkCopyAccelerationStructureInfoKHR* pInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR( - VkCommandBuffer commandBuffer, - const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR( - VkCommandBuffer commandBuffer, - const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR( - VkCommandBuffer commandBuffer, - const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, - const VkStridedBufferRegionKHR* pMissShaderBindingTable, - const VkStridedBufferRegionKHR* pHitShaderBindingTable, - const VkStridedBufferRegionKHR* pCallableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR( - VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines); - -VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR( - VkDevice device, - const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( - VkDevice device, - VkPipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - void* pData); - -VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR( - VkCommandBuffer commandBuffer, - const VkStridedBufferRegionKHR* pRaygenShaderBindingTable, - const VkStridedBufferRegionKHR* pMissShaderBindingTable, - const VkStridedBufferRegionKHR* pHitShaderBindingTable, - const VkStridedBufferRegionKHR* pCallableShaderBindingTable, - VkBuffer buffer, - VkDeviceSize offset); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR( - VkDevice device, - const VkAccelerationStructureVersionKHR* version); + const VkVideoDecodeInfoKHR* pFrameInfo); #endif + +#define VK_KHR_portability_subset 1 +#define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset" +typedef struct VkPhysicalDevicePortabilitySubsetFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 constantAlphaColorBlendFactors; + VkBool32 events; + VkBool32 imageViewFormatReinterpretation; + VkBool32 imageViewFormatSwizzle; + VkBool32 imageView2DOn3DImage; + VkBool32 multisampleArrayImage; + VkBool32 mutableComparisonSamplers; + VkBool32 pointPolygons; + VkBool32 samplerMipLodBias; + VkBool32 separateStencilMaskRef; + VkBool32 shaderSampleRateInterpolationFunctions; + VkBool32 tessellationIsolines; + VkBool32 tessellationPointMode; + VkBool32 triangleFans; + VkBool32 vertexAttributeAccessBeyondStride; +} VkPhysicalDevicePortabilitySubsetFeaturesKHR; + +typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t minVertexInputBindingStrideAlignment; +} VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + + +#define VK_KHR_video_encode_queue 1 +#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 2 +#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue" + +typedef enum VkVideoEncodeFlagBitsKHR { + VK_VIDEO_ENCODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFlagBitsKHR; +typedef VkFlags VkVideoEncodeFlagsKHR; + +typedef enum VkVideoEncodeRateControlFlagBitsKHR { + VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_RATE_CONTROL_RESET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeRateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeRateControlFlagsKHR; + +typedef enum VkVideoEncodeRateControlModeFlagBitsKHR { + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR = 0, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR = 1, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR = 2, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeRateControlModeFlagBitsKHR; +typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR; +typedef struct VkVideoEncodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFlagsKHR flags; + uint32_t qualityLevel; + VkExtent2D codedExtent; + VkBuffer dstBitstreamBuffer; + VkDeviceSize dstBitstreamBufferOffset; + VkDeviceSize dstBitstreamBufferMaxRange; + VkVideoPictureResourceKHR srcPictureResource; + const VkVideoReferenceSlotKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotKHR* pReferenceSlots; +} VkVideoEncodeInfoKHR; + +typedef struct VkVideoEncodeRateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeRateControlFlagsKHR flags; + VkVideoEncodeRateControlModeFlagBitsKHR rateControlMode; + uint32_t averageBitrate; + uint16_t peakToAverageBitrateRatio; + uint16_t frameRateNumerator; + uint16_t frameRateDenominator; + uint32_t virtualBufferSizeInMs; +} VkVideoEncodeRateControlInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdEncodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoEncodeInfoKHR* pEncodeInfo); +#endif + + +#define VK_EXT_video_encode_h264 1 +#include "vk_video/vulkan_video_codec_h264std.h" +#include "vk_video/vulkan_video_codec_h264std_encode.h" +#define VK_EXT_VIDEO_ENCODE_H264_SPEC_VERSION 1 +#define VK_EXT_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_EXT_video_encode_h264" + +typedef enum VkVideoEncodeH264CapabilitiesFlagBitsEXT { + VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT = 0x00000008, + VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT = 0x00000010, + VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT = 0x00000020, + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT = 0x00000040, + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT = 0x00000080, + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT = 0x00000100, + VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT = 0x00000200, + VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT = 0x00000400, + VK_VIDEO_ENCODE_H264_CAPABILITIES_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264CapabilitiesFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264CapabilitiesFlagsEXT; + +typedef enum VkVideoEncodeH264InputModeFlagBitsEXT { + VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H264_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264InputModeFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264InputModeFlagsEXT; + +typedef enum VkVideoEncodeH264OutputModeFlagBitsEXT { + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264OutputModeFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264OutputModeFlagsEXT; + +typedef enum VkVideoEncodeH264CreateFlagBitsEXT { + VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT = 0, + VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264CreateFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264CreateFlagsEXT; +typedef struct VkVideoEncodeH264CapabilitiesEXT { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH264CapabilitiesFlagsEXT flags; + VkVideoEncodeH264InputModeFlagsEXT inputModeFlags; + VkVideoEncodeH264OutputModeFlagsEXT outputModeFlags; + VkExtent2D minPictureSizeInMbs; + VkExtent2D maxPictureSizeInMbs; + VkExtent2D inputImageDataAlignment; + uint8_t maxNumL0ReferenceForP; + uint8_t maxNumL0ReferenceForB; + uint8_t maxNumL1Reference; + uint8_t qualityLevelCount; + VkExtensionProperties stdExtensionVersion; +} VkVideoEncodeH264CapabilitiesEXT; + +typedef struct VkVideoEncodeH264SessionCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH264CreateFlagsEXT flags; + VkExtent2D maxPictureSizeInMbs; + const VkExtensionProperties* pStdExtensionVersion; +} VkVideoEncodeH264SessionCreateInfoEXT; + +typedef struct VkVideoEncodeH264SessionParametersAddInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t spsStdCount; + const StdVideoH264SequenceParameterSet* pSpsStd; + uint32_t ppsStdCount; + const StdVideoH264PictureParameterSet* pPpsStd; +} VkVideoEncodeH264SessionParametersAddInfoEXT; + +typedef struct VkVideoEncodeH264SessionParametersCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxSpsStdCount; + uint32_t maxPpsStdCount; + const VkVideoEncodeH264SessionParametersAddInfoEXT* pParametersAddInfo; +} VkVideoEncodeH264SessionParametersCreateInfoEXT; + +typedef struct VkVideoEncodeH264DpbSlotInfoEXT { + VkStructureType sType; + const void* pNext; + int8_t slotIndex; + const StdVideoEncodeH264PictureInfo* pStdPictureInfo; +} VkVideoEncodeH264DpbSlotInfoEXT; + +typedef struct VkVideoEncodeH264NaluSliceEXT { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH264SliceHeader* pSliceHeaderStd; + uint32_t mbCount; + uint8_t refFinalList0EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefFinalList0Entries; + uint8_t refFinalList1EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefFinalList1Entries; + uint32_t precedingNaluBytes; + uint8_t minQp; + uint8_t maxQp; +} VkVideoEncodeH264NaluSliceEXT; + +typedef struct VkVideoEncodeH264VclFrameInfoEXT { + VkStructureType sType; + const void* pNext; + uint8_t refDefaultFinalList0EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefDefaultFinalList0Entries; + uint8_t refDefaultFinalList1EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefDefaultFinalList1Entries; + uint32_t naluSliceEntryCount; + const VkVideoEncodeH264NaluSliceEXT* pNaluSliceEntries; + const VkVideoEncodeH264DpbSlotInfoEXT* pCurrentPictureInfo; +} VkVideoEncodeH264VclFrameInfoEXT; + +typedef struct VkVideoEncodeH264EmitPictureParametersEXT { + VkStructureType sType; + const void* pNext; + uint8_t spsId; + VkBool32 emitSpsEnable; + uint32_t ppsIdEntryCount; + const uint8_t* ppsIdEntries; +} VkVideoEncodeH264EmitPictureParametersEXT; + +typedef struct VkVideoEncodeH264ProfileEXT { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; +} VkVideoEncodeH264ProfileEXT; + + + +#define VK_EXT_video_decode_h264 1 +#include "vk_video/vulkan_video_codec_h264std_decode.h" +#define VK_EXT_VIDEO_DECODE_H264_SPEC_VERSION 1 +#define VK_EXT_VIDEO_DECODE_H264_EXTENSION_NAME "VK_EXT_video_decode_h264" + +typedef enum VkVideoDecodeH264FieldLayoutFlagBitsEXT { + VK_VIDEO_DECODE_H264_PROGRESSIVE_PICTURES_ONLY_EXT = 0, + VK_VIDEO_DECODE_H264_FIELD_LAYOUT_LINE_INTERLACED_PLANE_BIT_EXT = 0x00000001, + VK_VIDEO_DECODE_H264_FIELD_LAYOUT_SEPARATE_INTERLACED_PLANE_BIT_EXT = 0x00000002, + VK_VIDEO_DECODE_H264_FIELD_LAYOUT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoDecodeH264FieldLayoutFlagBitsEXT; +typedef VkFlags VkVideoDecodeH264FieldLayoutFlagsEXT; +typedef VkFlags VkVideoDecodeH264CreateFlagsEXT; +typedef struct VkVideoDecodeH264ProfileEXT { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; + VkVideoDecodeH264FieldLayoutFlagsEXT fieldLayout; +} VkVideoDecodeH264ProfileEXT; + +typedef struct VkVideoDecodeH264CapabilitiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxLevel; + VkOffset2D fieldOffsetGranularity; + VkExtensionProperties stdExtensionVersion; +} VkVideoDecodeH264CapabilitiesEXT; + +typedef struct VkVideoDecodeH264SessionCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkVideoDecodeH264CreateFlagsEXT flags; + const VkExtensionProperties* pStdExtensionVersion; +} VkVideoDecodeH264SessionCreateInfoEXT; + +typedef struct VkVideoDecodeH264SessionParametersAddInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t spsStdCount; + const StdVideoH264SequenceParameterSet* pSpsStd; + uint32_t ppsStdCount; + const StdVideoH264PictureParameterSet* pPpsStd; +} VkVideoDecodeH264SessionParametersAddInfoEXT; + +typedef struct VkVideoDecodeH264SessionParametersCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxSpsStdCount; + uint32_t maxPpsStdCount; + const VkVideoDecodeH264SessionParametersAddInfoEXT* pParametersAddInfo; +} VkVideoDecodeH264SessionParametersCreateInfoEXT; + +typedef struct VkVideoDecodeH264PictureInfoEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264PictureInfo* pStdPictureInfo; + uint32_t slicesCount; + const uint32_t* pSlicesDataOffsets; +} VkVideoDecodeH264PictureInfoEXT; + +typedef struct VkVideoDecodeH264MvcEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264Mvc* pStdMvc; +} VkVideoDecodeH264MvcEXT; + +typedef struct VkVideoDecodeH264DpbSlotInfoEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH264DpbSlotInfoEXT; + + + +#define VK_EXT_video_decode_h265 1 +#include "vk_video/vulkan_video_codec_h265std.h" +#include "vk_video/vulkan_video_codec_h265std_decode.h" +#define VK_EXT_VIDEO_DECODE_H265_SPEC_VERSION 1 +#define VK_EXT_VIDEO_DECODE_H265_EXTENSION_NAME "VK_EXT_video_decode_h265" +typedef VkFlags VkVideoDecodeH265CreateFlagsEXT; +typedef struct VkVideoDecodeH265ProfileEXT { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoDecodeH265ProfileEXT; + +typedef struct VkVideoDecodeH265CapabilitiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxLevel; + VkExtensionProperties stdExtensionVersion; +} VkVideoDecodeH265CapabilitiesEXT; + +typedef struct VkVideoDecodeH265SessionCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkVideoDecodeH265CreateFlagsEXT flags; + const VkExtensionProperties* pStdExtensionVersion; +} VkVideoDecodeH265SessionCreateInfoEXT; + +typedef struct VkVideoDecodeH265SessionParametersAddInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t spsStdCount; + const StdVideoH265SequenceParameterSet* pSpsStd; + uint32_t ppsStdCount; + const StdVideoH265PictureParameterSet* pPpsStd; +} VkVideoDecodeH265SessionParametersAddInfoEXT; + +typedef struct VkVideoDecodeH265SessionParametersCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxSpsStdCount; + uint32_t maxPpsStdCount; + const VkVideoDecodeH265SessionParametersAddInfoEXT* pParametersAddInfo; +} VkVideoDecodeH265SessionParametersCreateInfoEXT; + +typedef struct VkVideoDecodeH265PictureInfoEXT { + VkStructureType sType; + const void* pNext; + StdVideoDecodeH265PictureInfo* pStdPictureInfo; + uint32_t slicesCount; + const uint32_t* pSlicesDataOffsets; +} VkVideoDecodeH265PictureInfoEXT; + +typedef struct VkVideoDecodeH265DpbSlotInfoEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH265DpbSlotInfoEXT; + + #ifdef __cplusplus } #endif diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_core.h b/externals/Vulkan-Headers/include/vulkan/vulkan_core.h index 5f7e85aa9..2e9ef74a9 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_core.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_core.h @@ -2,7 +2,7 @@ #define VULKAN_CORE_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -25,35 +25,71 @@ extern "C" { #define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; -#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE) -#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) - #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; -#else - #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; -#endif +#ifndef VK_USE_64_BIT_PTR_DEFINES + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VK_USE_64_BIT_PTR_DEFINES 1 + #else + #define VK_USE_64_BIT_PTR_DEFINES 0 + #endif #endif + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #if (defined(__cplusplus) && (__cplusplus >= 201103L)) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201103L)) + #define VK_NULL_HANDLE nullptr + #else + #define VK_NULL_HANDLE ((void*)0) + #endif + #else + #define VK_NULL_HANDLE 0ULL + #endif +#endif +#ifndef VK_NULL_HANDLE + #define VK_NULL_HANDLE 0 +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; + #else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; + #endif +#endif + +// DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. #define VK_MAKE_VERSION(major, minor, patch) \ ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) // DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. //#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 +#define VK_MAKE_API_VERSION(variant, major, minor, patch) \ + ((((uint32_t)(variant)) << 29) | (((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + // Vulkan 1.0 version number -#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 +#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 145 +#define VK_HEADER_VERSION 180 // Complete version of this file -#define VK_HEADER_VERSION_COMPLETE VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION) +#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 2, VK_HEADER_VERSION) +// DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead. #define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) -#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) -#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) -#define VK_NULL_HANDLE 0 +// DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead. +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) +// DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead. +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) + +#define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29) +#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22) & 0x7FU) +#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) +#define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) typedef uint32_t VkBool32; typedef uint64_t VkDeviceAddress; typedef uint64_t VkDeviceSize; @@ -85,20 +121,20 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) #define VK_ATTACHMENT_UNUSED (~0U) -#define VK_FALSE 0 -#define VK_LOD_CLAMP_NONE 1000.0f +#define VK_FALSE 0U +#define VK_LOD_CLAMP_NONE 1000.0F #define VK_QUEUE_FAMILY_IGNORED (~0U) #define VK_REMAINING_ARRAY_LAYERS (~0U) #define VK_REMAINING_MIP_LEVELS (~0U) #define VK_SUBPASS_EXTERNAL (~0U) -#define VK_TRUE 1 +#define VK_TRUE 1U #define VK_WHOLE_SIZE (~0ULL) -#define VK_MAX_MEMORY_TYPES 32 -#define VK_MAX_MEMORY_HEAPS 16 -#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 -#define VK_UUID_SIZE 16 -#define VK_MAX_EXTENSION_NAME_SIZE 256 -#define VK_MAX_DESCRIPTION_SIZE 256 +#define VK_MAX_MEMORY_TYPES 32U +#define VK_MAX_MEMORY_HEAPS 16U +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256U +#define VK_UUID_SIZE 16U +#define VK_MAX_EXTENSION_NAME_SIZE 256U +#define VK_MAX_DESCRIPTION_SIZE 256U typedef enum VkResult { VK_SUCCESS = 0, @@ -131,7 +167,6 @@ typedef enum VkResult { VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, VK_ERROR_INVALID_SHADER_NV = -1000012000, - VK_ERROR_INCOMPATIBLE_VERSION_KHR = -1000150000, VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, VK_ERROR_NOT_PERMITTED_EXT = -1000174001, VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000, @@ -335,14 +370,119 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR = 1000023000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR = 1000023001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR = 1000023002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR = 1000023003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR = 1000023004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR = 1000023005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000023006, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR = 1000023007, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR = 1000023008, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR = 1000023009, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR = 1000023010, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR = 1000023011, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR = 1000023012, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR = 1000023013, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR = 1000023014, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR = 1000023015, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR = 1000024000, +#endif VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT = 1000028000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002, + VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX = 1000029000, + VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX = 1000029001, + VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX = 1000029002, VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000, VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT = 1000038000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT = 1000038001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000038002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000038003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT = 1000038004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT = 1000038005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT = 1000038006, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT = 1000038007, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT = 1000038008, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT = 1000040000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT = 1000040001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT = 1000040002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT = 1000040003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT = 1000040004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000040005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000040006, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT = 1000040007, +#endif VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, @@ -438,37 +578,43 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, - VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR = 1000165006, - VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000165007, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000150007, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000, - VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR = 1000150001, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006, - VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR = 1000150008, - VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR = 1000150009, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR = 1000150009, VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010, VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011, VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR = 1000150013, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR = 1000150014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR = 1000150013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR = 1000150014, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR = 1000150020, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR = 1000347000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR = 1000347001, VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015, VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016, - VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017, VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR = 1000348013, VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001, VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT = 1000158000, - VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT = 1000158002, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT = 1000158003, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT = 1000158004, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005, VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR = 1000163000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR = 1000163001, +#endif VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002, @@ -478,6 +624,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003, VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004, VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005, + VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009, VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011, @@ -494,6 +642,27 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000, VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = 1000184000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT = 1000187000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT = 1000187001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000187002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000187003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT = 1000187004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT = 1000187005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT = 1000187006, +#endif VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = 1000190001, @@ -520,6 +689,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD = 1000213000, VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD = 1000213001, VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA = 1000214000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR = 1000215000, VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT = 1000217000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001, @@ -527,8 +697,14 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = 1000225000, VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = 1000225001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = 1000225002, + VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000, VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001, @@ -546,6 +722,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV = 1000250002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT = 1000251000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT = 1000252000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT = 1000254000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT = 1000254001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT = 1000254002, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT = 1000255000, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001, @@ -553,9 +732,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000, - VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR = 1000268000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000, VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002, @@ -571,10 +750,15 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005, VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV = 1000278000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV = 1000278001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = 1000281001, VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000, VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT = 1000284000, + VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT = 1000284001, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT = 1000284002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT = 1000286000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT = 1000286001, VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT = 1000287000, @@ -585,8 +769,64 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT = 1000295001, VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT = 1000295002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = 1000297000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR = 1000299000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR = 1000299001, +#endif VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000, VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR = 1000314000, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR = 1000314001, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR = 1000314002, + VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR = 1000314003, + VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR = 1000314004, + VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR = 1000314005, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR = 1000314006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = 1000314007, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR = 1000323000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = 1000325000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV = 1000326002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT = 1000330000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001, + VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM = 1000333000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT = 1000335000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR = 1000337000, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR = 1000337001, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR = 1000337002, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR = 1000337003, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR = 1000337004, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR = 1000337005, + VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR = 1000337006, + VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR = 1000337007, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = 1000337008, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = 1000337009, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = 1000337010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000, + VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = 1000351000, + VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = 1000351002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000, + VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001, + VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000, + VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001, + VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT = 1000377000, + VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX = 1000378000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT = 1000381000, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT = 1000381001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = 1000388000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = 1000388001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, @@ -670,8 +910,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, - VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR, - VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, @@ -723,11 +961,32 @@ typedef enum VkImageLayout { VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002, VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR = 1000024000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR = 1000024001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR = 1000024002, +#endif VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, - VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, + VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = 1000164003, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR = 1000299000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002, +#endif + VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR = 1000314000, + VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR = 1000314001, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, @@ -769,16 +1028,24 @@ typedef enum VkObjectType { VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_OBJECT_TYPE_VIDEO_SESSION_KHR = 1000023000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR = 1000023001, +#endif + VK_OBJECT_TYPE_CU_MODULE_NVX = 1000029000, + VK_OBJECT_TYPE_CU_FUNCTION_NVX = 1000029001, VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, - VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000165000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000, VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000, VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = 1000295000, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, - VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF } VkObjectType; @@ -788,6 +1055,7 @@ typedef enum VkVendorId { VK_VENDOR_ID_KAZAN = 0x10003, VK_VENDOR_ID_CODEPLAY = 0x10004, VK_VENDOR_ID_MESA = 0x10005, + VK_VENDOR_ID_POCL = 0x10006, VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF } VkVendorId; @@ -1052,6 +1320,12 @@ typedef enum VkFormat { VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = 1000066011, VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = 1000066012, VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = 1000066013, + VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT = 1000330000, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT = 1000330001, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT = 1000330002, + VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT = 1000330003, + VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = 1000340000, + VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = 1000340001, VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM, VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM, VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, @@ -1116,12 +1390,18 @@ typedef enum VkQueryType { VK_QUERY_TYPE_OCCLUSION = 0, VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, VK_QUERY_TYPE_TIMESTAMP = 2, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR = 1000023000, +#endif VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004, VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000, - VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000165000, - VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000150000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150001, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000, VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000, - VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR = 1000299000, +#endif VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF } VkQueryType; @@ -1256,9 +1536,11 @@ typedef enum VkDynamicState { VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, + VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR = 1000347000, VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004, VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006, VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001, + VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR = 1000226000, VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = 1000259000, VK_DYNAMIC_STATE_CULL_MODE_EXT = 1000267000, VK_DYNAMIC_STATE_FRONT_FACE_EXT = 1000267001, @@ -1272,6 +1554,13 @@ typedef enum VkDynamicState { VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT = 1000267009, VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT = 1000267010, VK_DYNAMIC_STATE_STENCIL_OP_EXT = 1000267011, + VK_DYNAMIC_STATE_VERTEX_INPUT_EXT = 1000352000, + VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000, + VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT = 1000377001, + VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT = 1000377002, + VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003, + VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT = 1000377004, + VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000, VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF } VkDynamicState; @@ -1391,8 +1680,9 @@ typedef enum VkDescriptorType { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = 1000138000, - VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000165000, - VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = 1000351000, VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF } VkDescriptorType; @@ -1464,10 +1754,12 @@ typedef enum VkAccessFlagBits { VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000, VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000, - VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000, VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000, + VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000, VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000, VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000, + VK_ACCESS_NONE_KHR = 0, + VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -1518,8 +1810,21 @@ typedef enum VkFormatFeatureFlagBits { VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000, +#endif VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000, VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000, + VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000, +#endif VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, @@ -1582,8 +1887,27 @@ typedef enum VkImageUsageFlagBits { VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, - VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00000100, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00000400, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00000800, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000, +#endif VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, + VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00004000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000, +#endif + VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkImageUsageFlagBits; typedef VkFlags VkImageUsageFlags; @@ -1616,6 +1940,12 @@ typedef enum VkQueueFlagBits { VK_QUEUE_TRANSFER_BIT = 0x00000004, VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, VK_QUEUE_PROTECTED_BIT = 0x00000010, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUEUE_VIDEO_DECODE_BIT_KHR = 0x00000020, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040, +#endif VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkQueueFlagBits; typedef VkFlags VkQueueFlags; @@ -1647,13 +1977,15 @@ typedef enum VkPipelineStageFlagBits { VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000, VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000, - VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000, VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000, - VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00400000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = 0x00080000, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000, VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000, + VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000, VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000, + VK_PIPELINE_STAGE_NONE_KHR = 0, + VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -1681,6 +2013,11 @@ typedef enum VkFenceCreateFlagBits { } VkFenceCreateFlagBits; typedef VkFlags VkFenceCreateFlags; typedef VkFlags VkSemaphoreCreateFlags; + +typedef enum VkEventCreateFlagBits { + VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR = 0x00000001, + VK_EVENT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkEventCreateFlagBits; typedef VkFlags VkEventCreateFlags; typedef enum VkQueryPipelineStatisticFlagBits { @@ -1705,6 +2042,9 @@ typedef enum VkQueryResultFlagBits { VK_QUERY_RESULT_WAIT_BIT = 0x00000002, VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUERY_RESULT_WITH_STATUS_BIT_KHR = 0x00000010, +#endif VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkQueryResultFlagBits; typedef VkFlags VkQueryResultFlags; @@ -1732,11 +2072,25 @@ typedef enum VkBufferUsageFlagBits { VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT = 0x00020000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00004000, +#endif VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, - VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR = 0x00000400, - VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR, + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000, + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000, + VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000, +#endif + VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -1746,6 +2100,7 @@ typedef VkFlags VkBufferViewCreateFlags; typedef enum VkImageViewCreateFlagBits { VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001, + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002, VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkImageViewCreateFlagBits; typedef VkFlags VkImageViewCreateFlags; @@ -1782,6 +2137,7 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000, VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000, VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, + VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000, VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, @@ -1859,6 +2215,7 @@ typedef VkFlags VkSamplerCreateFlags; typedef enum VkDescriptorPoolCreateFlagBits { VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002, + VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = 0x00000004, VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkDescriptorPoolCreateFlagBits; @@ -1868,6 +2225,7 @@ typedef VkFlags VkDescriptorPoolResetFlags; typedef enum VkDescriptorSetLayoutCreateFlagBits { VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = 0x00000004, VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkDescriptorSetLayoutCreateFlagBits; @@ -4008,13 +4366,13 @@ VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( #define VK_VERSION_1_1 1 // Vulkan 1.1 version number -#define VK_API_VERSION_1_1 VK_MAKE_VERSION(1, 1, 0)// Patch version should always be set to 0 +#define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) -#define VK_MAX_DEVICE_GROUP_SIZE 32 -#define VK_LUID_SIZE 8 -#define VK_QUEUE_FAMILY_EXTERNAL (~0U-1) +#define VK_MAX_DEVICE_GROUP_SIZE 32U +#define VK_LUID_SIZE 8U +#define VK_QUEUE_FAMILY_EXTERNAL (~1U) typedef enum VkPointClippingBehavior { VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0, @@ -4121,6 +4479,7 @@ typedef enum VkExternalMemoryHandleTypeFlagBits { VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400, VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080, VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA = 0x00000800, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, @@ -4185,6 +4544,8 @@ typedef enum VkExternalSemaphoreHandleTypeFlagBits { VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA = 0x00000080, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, @@ -4869,10 +5230,10 @@ VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( #define VK_VERSION_1_2 1 // Vulkan 1.2 version number -#define VK_API_VERSION_1_2 VK_MAKE_VERSION(1, 2, 0)// Patch version should always be set to 0 +#define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0 -#define VK_MAX_DRIVER_NAME_SIZE 256 -#define VK_MAX_DRIVER_INFO_SIZE 256 +#define VK_MAX_DRIVER_NAME_SIZE 256U +#define VK_MAX_DRIVER_INFO_SIZE 256U typedef enum VkDriverId { VK_DRIVER_ID_AMD_PROPRIETARY = 1, @@ -4888,6 +5249,9 @@ typedef enum VkDriverId { VK_DRIVER_ID_GGP_PROPRIETARY = 11, VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12, VK_DRIVER_ID_MESA_LLVMPIPE = 13, + VK_DRIVER_ID_MOLTENVK = 14, + VK_DRIVER_ID_COREAVI_PROPRIETARY = 15, + VK_DRIVER_ID_JUICE_PROPRIETARY = 16, VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, @@ -6404,7 +6768,7 @@ typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeature #define VK_KHR_incremental_present 1 -#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 1 +#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2 #define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" typedef struct VkRectLayerKHR { VkOffset2D offset; @@ -6649,8 +7013,10 @@ typedef enum VkPerformanceCounterStorageKHR { } VkPerformanceCounterStorageKHR; typedef enum VkPerformanceCounterDescriptionFlagBitsKHR { - VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = 0x00000001, - VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = 0x00000002, + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002, + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR, VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkPerformanceCounterDescriptionFlagBitsKHR; typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR; @@ -7167,6 +7533,96 @@ typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryMo +#define VK_KHR_shader_terminate_invocation 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation" +typedef struct VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderTerminateInvocation; +} VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR; + + + +#define VK_KHR_fragment_shading_rate 1 +#define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 1 +#define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate" + +typedef enum VkFragmentShadingRateCombinerOpKHR { + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = 0, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = 1, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = 2, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = 3, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = 4, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_ENUM_KHR = 0x7FFFFFFF +} VkFragmentShadingRateCombinerOpKHR; +typedef struct VkFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + const VkAttachmentReference2* pFragmentShadingRateAttachment; + VkExtent2D shadingRateAttachmentTexelSize; +} VkFragmentShadingRateAttachmentInfoKHR; + +typedef struct VkPipelineFragmentShadingRateStateCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkExtent2D fragmentSize; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateStateCreateInfoKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineFragmentShadingRate; + VkBool32 primitiveFragmentShadingRate; + VkBool32 attachmentFragmentShadingRate; +} VkPhysicalDeviceFragmentShadingRateFeaturesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentShadingRateAttachmentTexelSize; + VkExtent2D maxFragmentShadingRateAttachmentTexelSize; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio; + VkBool32 primitiveFragmentShadingRateWithMultipleViewports; + VkBool32 layeredShadingRateAttachments; + VkBool32 fragmentShadingRateNonTrivialCombinerOps; + VkExtent2D maxFragmentSize; + uint32_t maxFragmentSizeAspectRatio; + uint32_t maxFragmentShadingRateCoverageSamples; + VkSampleCountFlagBits maxFragmentShadingRateRasterizationSamples; + VkBool32 fragmentShadingRateWithShaderDepthStencilWrites; + VkBool32 fragmentShadingRateWithSampleMask; + VkBool32 fragmentShadingRateWithShaderSampleMask; + VkBool32 fragmentShadingRateWithConservativeRasterization; + VkBool32 fragmentShadingRateWithFragmentShaderInterlock; + VkBool32 fragmentShadingRateWithCustomSampleLocations; + VkBool32 fragmentShadingRateStrictMultiplyCombiner; +} VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateKHR { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleCounts; + VkExtent2D fragmentSize; +} VkPhysicalDeviceFragmentShadingRateKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR( + VkCommandBuffer commandBuffer, + const VkExtent2D* pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif + + #define VK_KHR_spirv_1_4 1 #define VK_KHR_SPIRV_1_4_SPEC_VERSION 1 #define VK_KHR_SPIRV_1_4_EXTENSION_NAME "VK_KHR_spirv_1_4" @@ -7233,6 +7689,41 @@ VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR( #endif +#define VK_KHR_deferred_host_operations 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) +#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4 +#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations" +typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation); +typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); +typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR( + VkDevice device, + const VkAllocationCallbacks* pAllocator, + VkDeferredOperationKHR* pDeferredOperation); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR( + VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( + VkDevice device, + VkDeferredOperationKHR operation); +#endif + + #define VK_KHR_pipeline_executable_properties 1 #define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1 #define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties" @@ -7323,14 +7814,474 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR #endif +#define VK_KHR_pipeline_library 1 +#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" +typedef struct VkPipelineLibraryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t libraryCount; + const VkPipeline* pLibraries; +} VkPipelineLibraryCreateInfoKHR; + + + #define VK_KHR_shader_non_semantic_info 1 #define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1 #define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info" +#define VK_KHR_synchronization2 1 +typedef uint64_t VkFlags64; +#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1 +#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2" +typedef VkFlags64 VkPipelineStageFlags2KHR; + +// Flag bits for VkPipelineStageFlagBits2KHR +typedef VkFlags64 VkPipelineStageFlagBits2KHR; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR = 0x04000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = 0x08000000ULL; +#endif +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR = 0x00200000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV = 0x00200000; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000ULL; +static const VkPipelineStageFlagBits2KHR VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL; + +typedef VkFlags64 VkAccessFlags2KHR; + +// Flag bits for VkAccessFlagBits2KHR +typedef VkFlags64 VkAccessFlagBits2KHR; +static const VkAccessFlagBits2KHR VK_ACCESS_2_NONE_KHR = 0ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2KHR VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2KHR VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2KHR VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2KHR VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL; +#endif +static const VkAccessFlagBits2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000; +static const VkAccessFlagBits2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000; +static const VkAccessFlagBits2KHR VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000; +static const VkAccessFlagBits2KHR VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000ULL; +static const VkAccessFlagBits2KHR VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL; + + +typedef enum VkSubmitFlagBitsKHR { + VK_SUBMIT_PROTECTED_BIT_KHR = 0x00000001, + VK_SUBMIT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSubmitFlagBitsKHR; +typedef VkFlags VkSubmitFlagsKHR; +typedef struct VkMemoryBarrier2KHR { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2KHR srcStageMask; + VkAccessFlags2KHR srcAccessMask; + VkPipelineStageFlags2KHR dstStageMask; + VkAccessFlags2KHR dstAccessMask; +} VkMemoryBarrier2KHR; + +typedef struct VkBufferMemoryBarrier2KHR { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2KHR srcStageMask; + VkAccessFlags2KHR srcAccessMask; + VkPipelineStageFlags2KHR dstStageMask; + VkAccessFlags2KHR dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier2KHR; + +typedef struct VkImageMemoryBarrier2KHR { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2KHR srcStageMask; + VkAccessFlags2KHR srcAccessMask; + VkPipelineStageFlags2KHR dstStageMask; + VkAccessFlags2KHR dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier2KHR; + +typedef struct VkDependencyInfoKHR { + VkStructureType sType; + const void* pNext; + VkDependencyFlags dependencyFlags; + uint32_t memoryBarrierCount; + const VkMemoryBarrier2KHR* pMemoryBarriers; + uint32_t bufferMemoryBarrierCount; + const VkBufferMemoryBarrier2KHR* pBufferMemoryBarriers; + uint32_t imageMemoryBarrierCount; + const VkImageMemoryBarrier2KHR* pImageMemoryBarriers; +} VkDependencyInfoKHR; + +typedef struct VkSemaphoreSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; + VkPipelineStageFlags2KHR stageMask; + uint32_t deviceIndex; +} VkSemaphoreSubmitInfoKHR; + +typedef struct VkCommandBufferSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + VkCommandBuffer commandBuffer; + uint32_t deviceMask; +} VkCommandBufferSubmitInfoKHR; + +typedef struct VkSubmitInfo2KHR { + VkStructureType sType; + const void* pNext; + VkSubmitFlagsKHR flags; + uint32_t waitSemaphoreInfoCount; + const VkSemaphoreSubmitInfoKHR* pWaitSemaphoreInfos; + uint32_t commandBufferInfoCount; + const VkCommandBufferSubmitInfoKHR* pCommandBufferInfos; + uint32_t signalSemaphoreInfoCount; + const VkSemaphoreSubmitInfoKHR* pSignalSemaphoreInfos; +} VkSubmitInfo2KHR; + +typedef struct VkPhysicalDeviceSynchronization2FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 synchronization2; +} VkPhysicalDeviceSynchronization2FeaturesKHR; + +typedef struct VkQueueFamilyCheckpointProperties2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2KHR checkpointExecutionStageMask; +} VkQueueFamilyCheckpointProperties2NV; + +typedef struct VkCheckpointData2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2KHR stage; + void* pCheckpointMarker; +} VkCheckpointData2NV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfoKHR* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfoKHR* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence); +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfoKHR* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2KHR stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfoKHR* pDependencyInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR( + VkCommandBuffer commandBuffer, + const VkDependencyInfoKHR* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2KHR* pSubmits, + VkFence fence); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointData2NV* pCheckpointData); +#endif + + +#define VK_KHR_shader_subgroup_uniform_control_flow 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow" +typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupUniformControlFlow; +} VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + + +#define VK_KHR_zero_initialize_workgroup_memory 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory" +typedef struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderZeroInitializeWorkgroupMemory; +} VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + + + +#define VK_KHR_workgroup_memory_explicit_layout 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout" +typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 workgroupMemoryExplicitLayout; + VkBool32 workgroupMemoryExplicitLayoutScalarBlockLayout; + VkBool32 workgroupMemoryExplicitLayout8BitAccess; + VkBool32 workgroupMemoryExplicitLayout16BitAccess; +} VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + + +#define VK_KHR_copy_commands2 1 +#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1 +#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2" +typedef struct VkBufferCopy2KHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy2KHR; + +typedef struct VkCopyBufferInfo2KHR { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferCopy2KHR* pRegions; +} VkCopyBufferInfo2KHR; + +typedef struct VkImageCopy2KHR { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy2KHR; + +typedef struct VkCopyImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2KHR* pRegions; +} VkCopyImageInfo2KHR; + +typedef struct VkBufferImageCopy2KHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy2KHR; + +typedef struct VkCopyBufferToImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkBufferImageCopy2KHR* pRegions; +} VkCopyBufferToImageInfo2KHR; + +typedef struct VkCopyImageToBufferInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferImageCopy2KHR* pRegions; +} VkCopyImageToBufferInfo2KHR; + +typedef struct VkImageBlit2KHR { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit2KHR; + +typedef struct VkBlitImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageBlit2KHR* pRegions; + VkFilter filter; +} VkBlitImageInfo2KHR; + +typedef struct VkImageResolve2KHR { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve2KHR; + +typedef struct VkResolveImageInfo2KHR { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageResolve2KHR* pRegions; +} VkResolveImageInfo2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2KHR* pCopyBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2KHR* pCopyImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2KHR* pBlitImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2KHR* pResolveImageInfo); +#endif + + #define VK_EXT_debug_report 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) -#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 9 +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 10 #define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" typedef enum VkDebugReportObjectTypeEXT { @@ -7368,12 +8319,14 @@ typedef enum VkDebugReportObjectTypeEXT { VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, - VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000165000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT = 1000029000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF } VkDebugReportObjectTypeEXT; @@ -7646,6 +8599,77 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( #endif +#define VK_NVX_binary_import 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) +#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 1 +#define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import" +typedef struct VkCuModuleCreateInfoNVX { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCuModuleCreateInfoNVX; + +typedef struct VkCuFunctionCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuModuleNVX module; + const char* pName; +} VkCuFunctionCreateInfoNVX; + +typedef struct VkCuLaunchInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuFunctionNVX function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCuLaunchInfoNVX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuModuleNVX)(VkDevice device, const VkCuModuleCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuModuleNVX* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuFunctionNVX)(VkDevice device, const VkCuFunctionCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuFunctionNVX* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCuModuleNVX)(VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCuFunctionNVX)(VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCuLaunchKernelNVX)(VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX* pLaunchInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuModuleNVX( + VkDevice device, + const VkCuModuleCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuModuleNVX* pModule); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuFunctionNVX( + VkDevice device, + const VkCuFunctionCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuFunctionNVX* pFunction); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuModuleNVX( + VkDevice device, + VkCuModuleNVX module, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuFunctionNVX( + VkDevice device, + VkCuFunctionNVX function, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( + VkCommandBuffer commandBuffer, + const VkCuLaunchInfoNVX* pLaunchInfo); +#endif + + #define VK_NVX_image_view_handle 1 #define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2 #define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" @@ -7996,7 +9020,8 @@ VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( #define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" typedef enum VkSurfaceCounterFlagBitsEXT { - VK_SURFACE_COUNTER_VBLANK_EXT = 0x00000001, + VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001, + VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT, VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF } VkSurfaceCounterFlagBitsEXT; typedef VkFlags VkSurfaceCounterFlagsEXT; @@ -8340,7 +9365,7 @@ VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( #define VK_EXT_queue_family_foreign 1 #define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 #define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" -#define VK_QUEUE_FAMILY_FOREIGN_EXT (~0U-2) +#define VK_QUEUE_FAMILY_FOREIGN_EXT (~2U) #define VK_EXT_debug_utils 1 @@ -8963,9 +9988,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( #define VK_NV_ray_tracing 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) -typedef VkAccelerationStructureKHR VkAccelerationStructureNV; - +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) #define VK_NV_RAY_TRACING_SPEC_VERSION 3 #define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" #define VK_SHADER_UNUSED_KHR (~0U) @@ -8986,7 +10009,7 @@ typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV; typedef enum VkGeometryTypeKHR { VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0, VK_GEOMETRY_TYPE_AABBS_KHR = 1, - VK_GEOMETRY_TYPE_INSTANCES_KHR = 1000150000, + VK_GEOMETRY_TYPE_INSTANCES_KHR = 2, VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR, VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR, VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF @@ -8997,6 +10020,7 @@ typedef VkGeometryTypeKHR VkGeometryTypeNV; typedef enum VkAccelerationStructureTypeKHR { VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0, VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1, + VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR = 2, VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF @@ -9016,17 +10040,12 @@ typedef enum VkCopyAccelerationStructureModeKHR { typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV; -typedef enum VkAccelerationStructureMemoryRequirementsTypeKHR { - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR = 0, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR = 1, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR = 2, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkAccelerationStructureMemoryRequirementsTypeKHR; -typedef VkAccelerationStructureMemoryRequirementsTypeKHR VkAccelerationStructureMemoryRequirementsTypeNV; - +typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMemoryRequirementsTypeNV; typedef enum VkGeometryFlagBitsKHR { VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001, @@ -9155,26 +10174,22 @@ typedef struct VkAccelerationStructureCreateInfoNV { VkAccelerationStructureInfoNV info; } VkAccelerationStructureCreateInfoNV; -typedef struct VkBindAccelerationStructureMemoryInfoKHR { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureKHR accelerationStructure; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; -} VkBindAccelerationStructureMemoryInfoKHR; +typedef struct VkBindAccelerationStructureMemoryInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureNV accelerationStructure; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindAccelerationStructureMemoryInfoNV; -typedef VkBindAccelerationStructureMemoryInfoKHR VkBindAccelerationStructureMemoryInfoNV; - -typedef struct VkWriteDescriptorSetAccelerationStructureKHR { - VkStructureType sType; - const void* pNext; - uint32_t accelerationStructureCount; - const VkAccelerationStructureKHR* pAccelerationStructures; -} VkWriteDescriptorSetAccelerationStructureKHR; - -typedef VkWriteDescriptorSetAccelerationStructureKHR VkWriteDescriptorSetAccelerationStructureNV; +typedef struct VkWriteDescriptorSetAccelerationStructureNV { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureNV* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureNV; typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { VkStructureType sType; @@ -9225,20 +10240,17 @@ typedef struct VkAccelerationStructureInstanceKHR { typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV; typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); -typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); -typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryKHR)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); -typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); -typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkBuffer scratch, VkDeviceSize scratchOffset); -typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureKHR dst, VkAccelerationStructureKHR src, VkCopyAccelerationStructureModeKHR mode); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode); typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); -typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); -typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); #ifndef VK_NO_PROTOTYPES @@ -9248,14 +10260,9 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); -VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR( - VkDevice device, - VkAccelerationStructureKHR accelerationStructure, - const VkAllocationCallbacks* pAllocator); - VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( VkDevice device, - VkAccelerationStructureKHR accelerationStructure, + VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( @@ -9263,15 +10270,10 @@ VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); -VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryKHR( - VkDevice device, - uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); - VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos); + const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, @@ -9279,15 +10281,15 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, - VkAccelerationStructureKHR dst, - VkAccelerationStructureKHR src, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, - VkAccelerationStructureKHR dst, - VkAccelerationStructureKHR src, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode); VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( @@ -9333,22 +10335,14 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( VkDevice device, - VkAccelerationStructureKHR accelerationStructure, + VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); -VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR( - VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR* pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery); - VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR* pAccelerationStructures, + const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); @@ -9484,7 +10478,7 @@ typedef struct VkPipelineCompilerControlCreateInfoAMD { #define VK_EXT_calibrated_timestamps 1 -#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 1 +#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2 #define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" typedef enum VkTimeDomainEXT { @@ -10061,6 +11055,18 @@ typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { +#define VK_EXT_shader_image_atomic_int64 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64" +typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderImageInt64Atomics; + VkBool32 sparseImageInt64Atomics; +} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + + #define VK_EXT_memory_budget 1 #define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 #define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" @@ -10174,7 +11180,7 @@ typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; #define VK_EXT_validation_features 1 -#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 3 +#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 5 #define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" typedef enum VkValidationFeatureEnableEXT { @@ -10182,6 +11188,7 @@ typedef enum VkValidationFeatureEnableEXT { VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3, + VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4, VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF } VkValidationFeatureEnableEXT; @@ -10193,6 +11200,7 @@ typedef enum VkValidationFeatureDisableEXT { VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4, VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5, VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6, + VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT = 7, VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF } VkValidationFeatureDisableEXT; typedef struct VkValidationFeaturesEXT { @@ -10334,6 +11342,37 @@ typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT { +#define VK_EXT_provoking_vertex 1 +#define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1 +#define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex" + +typedef enum VkProvokingVertexModeEXT { + VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT = 0, + VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT = 1, + VK_PROVOKING_VERTEX_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkProvokingVertexModeEXT; +typedef struct VkPhysicalDeviceProvokingVertexFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexLast; + VkBool32 transformFeedbackPreservesProvokingVertex; +} VkPhysicalDeviceProvokingVertexFeaturesEXT; + +typedef struct VkPhysicalDeviceProvokingVertexPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexModePerPipeline; + VkBool32 transformFeedbackPreservesTriangleFanProvokingVertex; +} VkPhysicalDeviceProvokingVertexPropertiesEXT; + +typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkProvokingVertexModeEXT provokingVertexMode; +} VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; + + + #define VK_EXT_headless_surface 1 #define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1 #define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface" @@ -10402,6 +11441,28 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( #endif +#define VK_EXT_shader_atomic_float 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float" +typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat32Atomics; + VkBool32 shaderBufferFloat32AtomicAdd; + VkBool32 shaderBufferFloat64Atomics; + VkBool32 shaderBufferFloat64AtomicAdd; + VkBool32 shaderSharedFloat32Atomics; + VkBool32 shaderSharedFloat32AtomicAdd; + VkBool32 shaderSharedFloat64Atomics; + VkBool32 shaderSharedFloat64AtomicAdd; + VkBool32 shaderImageFloat32Atomics; + VkBool32 shaderImageFloat32AtomicAdd; + VkBool32 sparseImageFloat32Atomics; + VkBool32 sparseImageFloat32AtomicAdd; +} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + + #define VK_EXT_host_query_reset 1 #define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 #define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" @@ -10714,6 +11775,25 @@ VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( #endif +#define VK_NV_inherited_viewport_scissor 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor" +typedef struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 inheritedViewportScissor2D; +} VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + +typedef struct VkCommandBufferInheritanceViewportScissorInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportScissor2D; + uint32_t viewportDepthCount; + const VkViewport* pViewportDepths; +} VkCommandBufferInheritanceViewportScissorInfoNV; + + + #define VK_EXT_texel_buffer_alignment 1 #define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 #define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" @@ -10735,7 +11815,7 @@ typedef struct VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT { #define VK_QCOM_render_pass_transform 1 -#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 1 +#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 2 #define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform" typedef struct VkRenderPassTransformBeginInfoQCOM { VkStructureType sType; @@ -10752,6 +11832,51 @@ typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { +#define VK_EXT_device_memory_report 1 +#define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report" + +typedef enum VkDeviceMemoryReportEventTypeEXT { + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT = 0, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT = 1, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT = 2, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT = 3, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT = 4, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceMemoryReportEventTypeEXT; +typedef VkFlags VkDeviceMemoryReportFlagsEXT; +typedef struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceMemoryReport; +} VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + +typedef struct VkDeviceMemoryReportCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + VkDeviceMemoryReportEventTypeEXT type; + uint64_t memoryObjectId; + VkDeviceSize size; + VkObjectType objectType; + uint64_t objectHandle; + uint32_t heapIndex; +} VkDeviceMemoryReportCallbackDataEXT; + +typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)( + const VkDeviceMemoryReportCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback; + void* pUserData; +} VkDeviceDeviceMemoryReportCreateInfoEXT; + + + #define VK_EXT_robustness2 1 #define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1 #define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2" @@ -10899,8 +12024,739 @@ typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { #define VK_QCOM_render_pass_store_ops 1 -#define VK_QCOM_render_pass_store_ops_SPEC_VERSION 2 -#define VK_QCOM_render_pass_store_ops_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" +#define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2 +#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" + + +#define VK_NV_fragment_shading_rate_enums 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums" + +typedef enum VkFragmentShadingRateTypeNV { + VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV = 0, + VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV = 1, + VK_FRAGMENT_SHADING_RATE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateTypeNV; + +typedef enum VkFragmentShadingRateNV { + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV = 0, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV = 1, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV = 4, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV = 5, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV = 6, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV = 10, + VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV = 11, + VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV = 12, + VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV = 13, + VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV = 14, + VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV = 15, + VK_FRAGMENT_SHADING_RATE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateNV; +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShadingRateEnums; + VkBool32 supersampleFragmentShadingRates; + VkBool32 noInvocationFragmentShadingRates; +} VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV { + VkStructureType sType; + void* pNext; + VkSampleCountFlagBits maxFragmentShadingRateInvocationCount; +} VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +typedef struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkFragmentShadingRateTypeNV shadingRateType; + VkFragmentShadingRateNV shadingRate; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateEnumStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateEnumNV)(VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV( + VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif + + +#define VK_EXT_ycbcr_2plane_444_formats 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats" +typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 ycbcr2plane444Formats; +} VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + + +#define VK_EXT_fragment_density_map2 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2" +typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapDeferred; +} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subsampledLoads; + VkBool32 subsampledCoarseReconstructionEarlyAccess; + uint32_t maxSubsampledArrayLayers; + uint32_t maxDescriptorSetSubsampledSamplers; +} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + + + +#define VK_QCOM_rotated_copy_commands 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands" +typedef struct VkCopyCommandTransformInfoQCOM { + VkStructureType sType; + const void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkCopyCommandTransformInfoQCOM; + + + +#define VK_EXT_image_robustness 1 +#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness" +typedef struct VkPhysicalDeviceImageRobustnessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; +} VkPhysicalDeviceImageRobustnessFeaturesEXT; + + + +#define VK_EXT_4444_formats 1 +#define VK_EXT_4444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats" +typedef struct VkPhysicalDevice4444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 formatA4R4G4B4; + VkBool32 formatA4B4G4R4; +} VkPhysicalDevice4444FormatsFeaturesEXT; + + + +#define VK_NV_acquire_winrt_display 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireWinrtDisplayNV)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetWinrtDisplayNV)(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + uint32_t deviceRelativeId, + VkDisplayKHR* pDisplay); +#endif + + +#define VK_VALVE_mutable_descriptor_type 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type" +typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE { + VkStructureType sType; + void* pNext; + VkBool32 mutableDescriptorType; +} VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + +typedef struct VkMutableDescriptorTypeListVALVE { + uint32_t descriptorTypeCount; + const VkDescriptorType* pDescriptorTypes; +} VkMutableDescriptorTypeListVALVE; + +typedef struct VkMutableDescriptorTypeCreateInfoVALVE { + VkStructureType sType; + const void* pNext; + uint32_t mutableDescriptorTypeListCount; + const VkMutableDescriptorTypeListVALVE* pMutableDescriptorTypeLists; +} VkMutableDescriptorTypeCreateInfoVALVE; + + + +#define VK_EXT_vertex_input_dynamic_state 1 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state" +typedef struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexInputDynamicState; +} VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; + +typedef struct VkVertexInputBindingDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; + uint32_t divisor; +} VkVertexInputBindingDescription2EXT; + +typedef struct VkVertexInputAttributeDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription2EXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetVertexInputEXT)(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT( + VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); +#endif + + +#define VK_EXT_extended_dynamic_state2 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2" +typedef struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState2; + VkBool32 extendedDynamicState2LogicOp; + VkBool32 extendedDynamicState2PatchControlPoints; +} VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetPatchControlPointsEXT)(VkCommandBuffer commandBuffer, uint32_t patchControlPoints); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEXT)(VkCommandBuffer commandBuffer, VkLogicOp logicOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetPatchControlPointsEXT( + VkCommandBuffer commandBuffer, + uint32_t patchControlPoints); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEXT( + VkCommandBuffer commandBuffer, + VkLogicOp logicOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); +#endif + + +#define VK_EXT_color_write_enable 1 +#define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1 +#define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable" +typedef struct VkPhysicalDeviceColorWriteEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 colorWriteEnable; +} VkPhysicalDeviceColorWriteEnableFeaturesEXT; + +typedef struct VkPipelineColorWriteCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentCount; + const VkBool32* pColorWriteEnables; +} VkPipelineColorWriteCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteEnableEXT)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32* pColorWriteEnables); +#endif + + +#define VK_EXT_global_priority_query 1 +#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT 16U +#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query" +typedef struct VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 globalPriorityQuery; +} VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT; + +typedef struct VkQueueFamilyGlobalPriorityPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t priorityCount; + VkQueueGlobalPriorityEXT priorities[VK_MAX_GLOBAL_PRIORITY_SIZE_EXT]; +} VkQueueFamilyGlobalPriorityPropertiesEXT; + + + +#define VK_KHR_acceleration_structure 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) +#define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 11 +#define VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_KHR_acceleration_structure" + +typedef enum VkBuildAccelerationStructureModeKHR { + VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR = 0, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR = 1, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureModeKHR; + +typedef enum VkAccelerationStructureBuildTypeKHR { + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureBuildTypeKHR; + +typedef enum VkAccelerationStructureCompatibilityKHR { + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCompatibilityKHR; + +typedef enum VkAccelerationStructureCreateFlagBitsKHR { + VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001, + VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCreateFlagBitsKHR; +typedef VkFlags VkAccelerationStructureCreateFlagsKHR; +typedef union VkDeviceOrHostAddressKHR { + VkDeviceAddress deviceAddress; + void* hostAddress; +} VkDeviceOrHostAddressKHR; + +typedef union VkDeviceOrHostAddressConstKHR { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstKHR; + +typedef struct VkAccelerationStructureBuildRangeInfoKHR { + uint32_t primitiveCount; + uint32_t primitiveOffset; + uint32_t firstVertex; + uint32_t transformOffset; +} VkAccelerationStructureBuildRangeInfoKHR; + +typedef struct VkAccelerationStructureGeometryTrianglesDataKHR { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + uint32_t maxVertex; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceOrHostAddressConstKHR transformData; +} VkAccelerationStructureGeometryTrianglesDataKHR; + +typedef struct VkAccelerationStructureGeometryAabbsDataKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR data; + VkDeviceSize stride; +} VkAccelerationStructureGeometryAabbsDataKHR; + +typedef struct VkAccelerationStructureGeometryInstancesDataKHR { + VkStructureType sType; + const void* pNext; + VkBool32 arrayOfPointers; + VkDeviceOrHostAddressConstKHR data; +} VkAccelerationStructureGeometryInstancesDataKHR; + +typedef union VkAccelerationStructureGeometryDataKHR { + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +} VkAccelerationStructureGeometryDataKHR; + +typedef struct VkAccelerationStructureGeometryKHR { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkAccelerationStructureGeometryDataKHR geometry; + VkGeometryFlagsKHR flags; +} VkAccelerationStructureGeometryKHR; + +typedef struct VkAccelerationStructureBuildGeometryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeKHR type; + VkBuildAccelerationStructureFlagsKHR flags; + VkBuildAccelerationStructureModeKHR mode; + VkAccelerationStructureKHR srcAccelerationStructure; + VkAccelerationStructureKHR dstAccelerationStructure; + uint32_t geometryCount; + const VkAccelerationStructureGeometryKHR* pGeometries; + const VkAccelerationStructureGeometryKHR* const* ppGeometries; + VkDeviceOrHostAddressKHR scratchData; +} VkAccelerationStructureBuildGeometryInfoKHR; + +typedef struct VkAccelerationStructureCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureCreateFlagsKHR createFlags; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; + VkAccelerationStructureTypeKHR type; + VkDeviceAddress deviceAddress; +} VkAccelerationStructureCreateInfoKHR; + +typedef struct VkWriteDescriptorSetAccelerationStructureKHR { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureKHR* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureKHR; + +typedef struct VkPhysicalDeviceAccelerationStructureFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 accelerationStructure; + VkBool32 accelerationStructureCaptureReplay; + VkBool32 accelerationStructureIndirectBuild; + VkBool32 accelerationStructureHostCommands; + VkBool32 descriptorBindingAccelerationStructureUpdateAfterBind; +} VkPhysicalDeviceAccelerationStructureFeaturesKHR; + +typedef struct VkPhysicalDeviceAccelerationStructurePropertiesKHR { + VkStructureType sType; + void* pNext; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxPrimitiveCount; + uint32_t maxPerStageDescriptorAccelerationStructures; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures; + uint32_t maxDescriptorSetAccelerationStructures; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures; + uint32_t minAccelerationStructureScratchOffsetAlignment; +} VkPhysicalDeviceAccelerationStructurePropertiesKHR; + +typedef struct VkAccelerationStructureDeviceAddressInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; +} VkAccelerationStructureDeviceAddressInfoKHR; + +typedef struct VkAccelerationStructureVersionInfoKHR { + VkStructureType sType; + const void* pNext; + const uint8_t* pVersionData; +} VkAccelerationStructureVersionInfoKHR; + +typedef struct VkCopyAccelerationStructureToMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkDeviceOrHostAddressKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureToMemoryInfoKHR; + +typedef struct VkCopyMemoryToAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyMemoryToAccelerationStructureInfoKHR; + +typedef struct VkCopyAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureInfoKHR; + +typedef struct VkAccelerationStructureBuildSizesInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize accelerationStructureSize; + VkDeviceSize updateScratchSize; + VkDeviceSize buildScratchSize; +} VkAccelerationStructureBuildSizesInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresIndirectKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const* ppMaxPrimitiveCounts); +typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructuresKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureBuildSizesKHR)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR( + VkDevice device, + const VkAccelerationStructureCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureKHR* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR( + VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresIndirectKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkDeviceAddress* pIndirectDeviceAddresses, + const uint32_t* pIndirectStrides, + const uint32_t* const* ppMaxPrimitiveCounts); + +VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructuresKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR( + VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR( + VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionInfoKHR* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureBuildSizesKHR( + VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, + const uint32_t* pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); +#endif + + +#define VK_KHR_ray_tracing_pipeline 1 +#define VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME "VK_KHR_ray_tracing_pipeline" + +typedef enum VkShaderGroupShaderKHR { + VK_SHADER_GROUP_SHADER_GENERAL_KHR = 0, + VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR = 1, + VK_SHADER_GROUP_SHADER_ANY_HIT_KHR = 2, + VK_SHADER_GROUP_SHADER_INTERSECTION_KHR = 3, + VK_SHADER_GROUP_SHADER_MAX_ENUM_KHR = 0x7FFFFFFF +} VkShaderGroupShaderKHR; +typedef struct VkRayTracingShaderGroupCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; + const void* pShaderGroupCaptureReplayHandle; +} VkRayTracingShaderGroupCreateInfoKHR; + +typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxPipelineRayPayloadSize; + uint32_t maxPipelineRayHitAttributeSize; +} VkRayTracingPipelineInterfaceCreateInfoKHR; + +typedef struct VkRayTracingPipelineCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoKHR* pGroups; + uint32_t maxPipelineRayRecursionDepth; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingPipeline; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplay; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed; + VkBool32 rayTracingPipelineTraceRaysIndirect; + VkBool32 rayTraversalPrimitiveCulling; +} VkPhysicalDeviceRayTracingPipelineFeaturesKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRayRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint32_t shaderGroupHandleCaptureReplaySize; + uint32_t maxRayDispatchInvocationCount; + uint32_t shaderGroupHandleAlignment; + uint32_t maxRayHitAttributeSize; +} VkPhysicalDeviceRayTracingPipelinePropertiesKHR; + +typedef struct VkStridedDeviceAddressRegionKHR { + VkDeviceAddress deviceAddress; + VkDeviceSize stride; + VkDeviceSize size; +} VkStridedDeviceAddressRegionKHR; + +typedef struct VkTraceRaysIndirectCommandKHR { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommandKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress); +typedef VkDeviceSize (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupStackSizeKHR)(VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader); +typedef void (VKAPI_PTR *PFN_vkCmdSetRayTracingPipelineStackSizeKHR)(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress); + +VKAPI_ATTR VkDeviceSize VKAPI_CALL vkGetRayTracingShaderGroupStackSizeKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRayTracingPipelineStackSizeKHR( + VkCommandBuffer commandBuffer, + uint32_t pipelineStackSize); +#endif + + +#define VK_KHR_ray_query 1 +#define VK_KHR_RAY_QUERY_SPEC_VERSION 1 +#define VK_KHR_RAY_QUERY_EXTENSION_NAME "VK_KHR_ray_query" +typedef struct VkPhysicalDeviceRayQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayQuery; +} VkPhysicalDeviceRayQueryFeaturesKHR; + #ifdef __cplusplus } diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_directfb.h b/externals/Vulkan-Headers/include/vulkan/vulkan_directfb.h new file mode 100755 index 000000000..8eaac6e48 --- /dev/null +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_directfb.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_DIRECTFB_H_ +#define VULKAN_DIRECTFB_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_EXT_directfb_surface 1 +#define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1 +#define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface" +typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT; +typedef struct VkDirectFBSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDirectFBSurfaceCreateFlagsEXT flags; + IDirectFB* dfb; + IDirectFBSurface* surface; +} VkDirectFBSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDirectFBSurfaceEXT)(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT( + VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB* dfb); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_fuchsia.h b/externals/Vulkan-Headers/include/vulkan/vulkan_fuchsia.h index 03e27cb0a..d55871573 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_fuchsia.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_fuchsia.h @@ -2,7 +2,7 @@ #define VULKAN_FUCHSIA_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -40,6 +40,80 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA( VkSurfaceKHR* pSurface); #endif + +#define VK_FUCHSIA_external_memory 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory" +typedef struct VkImportMemoryZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + zx_handle_t handle; +} VkImportMemoryZirconHandleInfoFUCHSIA; + +typedef struct VkMemoryZirconHandlePropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryZirconHandlePropertiesFUCHSIA; + +typedef struct VkMemoryGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandleFUCHSIA)(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA( + VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); +#endif + + +#define VK_FUCHSIA_external_semaphore 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore" +typedef struct VkImportSemaphoreZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + zx_handle_t zirconHandle; +} VkImportSemaphoreZirconHandleInfoFUCHSIA; + +typedef struct VkSemaphoreGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); +#endif + #ifdef __cplusplus } #endif diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_ggp.h b/externals/Vulkan-Headers/include/vulkan/vulkan_ggp.h index 273c88005..9a6a582c5 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_ggp.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_ggp.h @@ -2,7 +2,7 @@ #define VULKAN_GGP_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_ios.h b/externals/Vulkan-Headers/include/vulkan/vulkan_ios.h index 651945cc7..6e7e6afea 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_ios.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_ios.h @@ -2,7 +2,7 @@ #define VULKAN_IOS_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -20,7 +20,7 @@ extern "C" { #define VK_MVK_ios_surface 1 -#define VK_MVK_IOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_IOS_SURFACE_SPEC_VERSION 3 #define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" typedef VkFlags VkIOSSurfaceCreateFlagsMVK; typedef struct VkIOSSurfaceCreateInfoMVK { diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_macos.h b/externals/Vulkan-Headers/include/vulkan/vulkan_macos.h index 3208b728e..c49b123d0 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_macos.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_macos.h @@ -2,7 +2,7 @@ #define VULKAN_MACOS_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -20,7 +20,7 @@ extern "C" { #define VK_MVK_macos_surface 1 -#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3 #define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; typedef struct VkMacOSSurfaceCreateInfoMVK { diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_metal.h b/externals/Vulkan-Headers/include/vulkan/vulkan_metal.h index 99f097d95..5cf4a703a 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_metal.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_metal.h @@ -2,7 +2,7 @@ #define VULKAN_METAL_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_screen.h b/externals/Vulkan-Headers/include/vulkan/vulkan_screen.h new file mode 100755 index 000000000..92ad9bfab --- /dev/null +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_screen.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_SCREEN_H_ +#define VULKAN_SCREEN_H_ 1 + +/* +** Copyright 2015-2021 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_QNX_screen_surface 1 +#define VK_QNX_SCREEN_SURFACE_SPEC_VERSION 1 +#define VK_QNX_SCREEN_SURFACE_EXTENSION_NAME "VK_QNX_screen_surface" +typedef VkFlags VkScreenSurfaceCreateFlagsQNX; +typedef struct VkScreenSurfaceCreateInfoQNX { + VkStructureType sType; + const void* pNext; + VkScreenSurfaceCreateFlagsQNX flags; + struct _screen_context* context; + struct _screen_window* window; +} VkScreenSurfaceCreateInfoQNX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateScreenSurfaceQNX)(VkInstance instance, const VkScreenSurfaceCreateInfoQNX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window* window); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateScreenSurfaceQNX( + VkInstance instance, + const VkScreenSurfaceCreateInfoQNX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceScreenPresentationSupportQNX( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window* window); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_vi.h b/externals/Vulkan-Headers/include/vulkan/vulkan_vi.h index 2e62d7d3a..9e0dcca20 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_vi.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_vi.h @@ -2,7 +2,7 @@ #define VULKAN_VI_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_wayland.h b/externals/Vulkan-Headers/include/vulkan/vulkan_wayland.h index f7b307e51..2a329be9d 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_wayland.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_wayland.h @@ -2,7 +2,7 @@ #define VULKAN_WAYLAND_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_win32.h b/externals/Vulkan-Headers/include/vulkan/vulkan_win32.h index 4b561ea10..1b680f0b1 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_win32.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_win32.h @@ -2,7 +2,7 @@ #define VULKAN_WIN32_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_xcb.h b/externals/Vulkan-Headers/include/vulkan/vulkan_xcb.h index c5441b239..5ba2ad850 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_xcb.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_xcb.h @@ -2,7 +2,7 @@ #define VULKAN_XCB_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_xlib.h b/externals/Vulkan-Headers/include/vulkan/vulkan_xlib.h index c54628a7e..75c75dc2e 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_xlib.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_xlib.h @@ -2,7 +2,7 @@ #define VULKAN_XLIB_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ diff --git a/externals/Vulkan-Headers/include/vulkan/vulkan_xlib_xrandr.h b/externals/Vulkan-Headers/include/vulkan/vulkan_xlib_xrandr.h index 436432f84..fa2749342 100755 --- a/externals/Vulkan-Headers/include/vulkan/vulkan_xlib_xrandr.h +++ b/externals/Vulkan-Headers/include/vulkan/vulkan_xlib_xrandr.h @@ -2,7 +2,7 @@ #define VULKAN_XLIB_XRANDR_H_ 1 /* -** Copyright (c) 2015-2020 The Khronos Group Inc. +** Copyright 2015-2021 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ diff --git a/externals/Vulkan-Headers/registry/cgenerator.py b/externals/Vulkan-Headers/registry/cgenerator.py index 11d54683b..561460213 100755 --- a/externals/Vulkan-Headers/registry/cgenerator.py +++ b/externals/Vulkan-Headers/registry/cgenerator.py @@ -1,6 +1,6 @@ #!/usr/bin/python3 -i # -# Copyright (c) 2013-2020 The Khronos Group Inc. +# Copyright 2013-2021 The Khronos Group Inc. # # SPDX-License-Identifier: Apache-2.0 @@ -32,6 +32,8 @@ class CGeneratorOptions(GeneratorOptions): genEnumBeginEndRange=False, genAliasMacro=False, aliasMacro='', + misracstyle=False, + misracppstyle=False, **kwargs ): """Constructor. @@ -68,7 +70,10 @@ class CGeneratorOptions(GeneratorOptions): be generated for enumerated types - genAliasMacro - True if the OpenXR alias macro should be generated for aliased types (unclear what other circumstances this is useful) - - aliasMacro - alias macro to inject when genAliasMacro is True""" + - aliasMacro - alias macro to inject when genAliasMacro is True + - misracstyle - generate MISRA C-friendly headers + - misracppstyle - generate MISRA C++-friendly headers""" + GeneratorOptions.__init__(self, **kwargs) self.prefixText = prefixText @@ -116,6 +121,12 @@ class CGeneratorOptions(GeneratorOptions): self.aliasMacro = aliasMacro """alias macro to inject when genAliasMacro is True""" + self.misracstyle = misracstyle + """generate MISRA C-friendly headers""" + + self.misracppstyle = misracppstyle + """generate MISRA C++-friendly headers""" + self.codeGenerator = True """True if this generator makes compilable code""" @@ -380,13 +391,11 @@ class COutputGenerator(OutputGenerator): self.appendSection(section, "\n" + body) def genEnum(self, enuminfo, name, alias): - """Generate enumerants. + """Generate the C declaration for a constant (a single value).""" - tags may specify their values in several ways, but are usually - just integers.""" OutputGenerator.genEnum(self, enuminfo, name, alias) - (_, strVal) = self.enumToValue(enuminfo.elem, False) - body = '#define ' + name.ljust(33) + ' ' + strVal + + body = self.buildConstantCDecl(enuminfo, name, alias) self.appendSection('enum', body) def genCmd(self, cmdinfo, name, alias): @@ -403,3 +412,9 @@ class COutputGenerator(OutputGenerator): self.appendSection('command', prefix + decls[0] + '\n') if self.genOpts.genFuncPointers: self.appendSection('commandPointer', decls[1]) + + def misracstyle(self): + return self.genOpts.misracstyle; + + def misracppstyle(self): + return self.genOpts.misracppstyle; diff --git a/externals/Vulkan-Headers/registry/conventions.py b/externals/Vulkan-Headers/registry/conventions.py index 6de7348bd..edf90596d 100755 --- a/externals/Vulkan-Headers/registry/conventions.py +++ b/externals/Vulkan-Headers/registry/conventions.py @@ -1,6 +1,6 @@ #!/usr/bin/python3 -i # -# Copyright (c) 2013-2020 The Khronos Group Inc. +# Copyright 2013-2021 The Khronos Group Inc. # # SPDX-License-Identifier: Apache-2.0 diff --git a/externals/Vulkan-Headers/registry/generator.py b/externals/Vulkan-Headers/registry/generator.py index 22269f2d0..19bbc3c16 100755 --- a/externals/Vulkan-Headers/registry/generator.py +++ b/externals/Vulkan-Headers/registry/generator.py @@ -1,6 +1,6 @@ #!/usr/bin/python3 -i # -# Copyright (c) 2013-2020 The Khronos Group Inc. +# Copyright 2013-2021 The Khronos Group Inc. # # SPDX-License-Identifier: Apache-2.0 """Base class for source/header/doc generators, as well as some utility functions.""" @@ -118,6 +118,7 @@ class GeneratorOptions: addExtensions=None, removeExtensions=None, emitExtensions=None, + emitSpirv=None, reparentEnums=True, sortProcedure=regSortFeatures): """Constructor. @@ -148,6 +149,9 @@ class GeneratorOptions: - emitExtensions - regex matching names of extensions to actually emit interfaces for (though all requested versions are considered when deciding which interfaces to generate). + to None. + - emitSpirv - regex matching names of extensions and capabilities + to actually emit interfaces for. - reparentEnums - move elements which extend an enumerated type from or elements to the target element. This is required for almost all purposes, but the @@ -209,6 +213,10 @@ class GeneratorOptions: interfaces for (though all requested versions are considered when deciding which interfaces to generate).""" + self.emitSpirv = self.emptyRegex(emitSpirv) + """regex matching names of extensions and capabilities + to actually emit interfaces for.""" + self.reparentEnums = reparentEnums """boolean specifying whether to remove elements from or when extending an type.""" @@ -298,7 +306,7 @@ class OutputGenerator: raise UserWarning( '*** FATAL ERROR in Generator.logMsg: unknown level:' + level) - def enumToValue(self, elem, needsNum): + def enumToValue(self, elem, needsNum, bitwidth = 32, forceSuffix = False): """Parse and convert an `` tag into a value. Returns a list: @@ -334,6 +342,11 @@ class OutputGenerator: # t = enuminfo.elem.get('type') # if t is not None and t != '' and t != 'i' and t != 's': # value += enuminfo.type + if forceSuffix: + if bitwidth == 64: + value = value + 'ULL' + else: + value = value + 'U' self.logMsg('diag', 'Enum', name, '-> value [', numVal, ',', value, ']') return [numVal, value] if 'bitpos' in elem.keys(): @@ -341,8 +354,10 @@ class OutputGenerator: bitpos = int(value, 0) numVal = 1 << bitpos value = '0x%08x' % numVal - if bitpos >= 32: - value = value + 'ULL' + if bitwidth == 64: + value = value + 'ULL' + elif forceSuffix: + value = value + 'U' self.logMsg('diag', 'Enum', name, '-> bitpos [', numVal, ',', value, ']') return [numVal, value] if 'offset' in elem.keys(): @@ -370,7 +385,7 @@ class OutputGenerator: return [None, None] def checkDuplicateEnums(self, enums): - """Sanity check enumerated values. + """Check enumerated values for duplicates. - enums - list of `` Elements @@ -425,6 +440,12 @@ class OutputGenerator: # Return the list return stripped + def misracstyle(self): + return False; + + def misracppstyle(self): + return False; + def buildEnumCDecl(self, expand, groupinfo, groupName): """Generate the C declaration for an enum""" groupElem = groupinfo.elem @@ -432,11 +453,11 @@ class OutputGenerator: # Determine the required bit width for the enum group. # 32 is the default, which generates C enum types for the values. bitwidth = 32 - + # If the constFlagBits preference is set, 64 is the default for bitmasks if self.genOpts.conventions.constFlagBits and groupElem.get('type') == 'bitmask': bitwidth = 64 - + # Check for an explicitly defined bitwidth, which will override any defaults. if groupElem.get('bitwidth'): try: @@ -444,56 +465,107 @@ class OutputGenerator: except ValueError as ve: self.logMsg('error', 'Invalid value for bitwidth attribute (', groupElem.get('bitwidth'), ') for ', groupName, ' - must be an integer value\n') exit(1) - - # Bitmask types support 64-bit flags, so have different handling + + usebitmask = False + usedefine = False + + # Bitmask flags can be generated as either "static const uint{32,64}_t" values, + # or as 32-bit C enums. 64-bit types must use uint64_t values. if groupElem.get('type') == 'bitmask': - + if bitwidth > 32 or self.misracppstyle(): + usebitmask = True + if self.misracstyle(): + usedefine = True + + if usedefine or usebitmask: # Validate the bitwidth and generate values appropriately - # Bitmask flags up to 64-bit are generated as static const uint64_t values - # Bitmask flags up to 32-bit are generated as C enum values if bitwidth > 64: self.logMsg('error', 'Invalid value for bitwidth attribute (', groupElem.get('bitwidth'), ') for bitmask type ', groupName, ' - must be less than or equal to 64\n') exit(1) - elif bitwidth > 32: - return self.buildEnumCDecl_Bitmask(groupinfo, groupName) else: - return self.buildEnumCDecl_Enum(expand, groupinfo, groupName) + return self.buildEnumCDecl_BitmaskOrDefine(groupinfo, groupName, bitwidth, usedefine) else: # Validate the bitwidth and generate values appropriately - # Enum group types up to 32-bit are generated as C enum values if bitwidth > 32: self.logMsg('error', 'Invalid value for bitwidth attribute (', groupElem.get('bitwidth'), ') for enum type ', groupName, ' - must be less than or equal to 32\n') exit(1) else: return self.buildEnumCDecl_Enum(expand, groupinfo, groupName) - def buildEnumCDecl_Bitmask(self, groupinfo, groupName): + def buildEnumCDecl_BitmaskOrDefine(self, groupinfo, groupName, bitwidth, usedefine): """Generate the C declaration for an "enum" that is actually a set of flag bits""" groupElem = groupinfo.elem - flagTypeName = groupinfo.flagType.elem.get('name') + flagTypeName = groupElem.get('name') # Prefix body = "// Flag bits for " + flagTypeName + "\n" - + + if bitwidth == 64: + body += "typedef VkFlags64 %s;\n" % flagTypeName; + else: + body += "typedef VkFlags %s;\n" % flagTypeName; + # Maximum allowable value for a flag (unsigned 64-bit integer) maxValidValue = 2**(64) - 1 minValidValue = 0 + # Get a list of nested 'enum' tags. + enums = groupElem.findall('enum') + + # Check for and report duplicates, and return a list with them + # removed. + enums = self.checkDuplicateEnums(enums) + + # Accumulate non-numeric enumerant values separately and append + # them following the numeric values, to allow for aliases. + # NOTE: this doesn't do a topological sort yet, so aliases of + # aliases can still get in the wrong order. + aliasText = '' + # Loop over the nested 'enum' tags. - for elem in groupElem.findall('enum'): + for elem in enums: # Convert the value to an integer and use that to track min/max. # Values of form -(number) are accepted but nothing more complex. # Should catch exceptions here for more complex constructs. Not yet. - (numVal, strVal) = self.enumToValue(elem, True) + (numVal, strVal) = self.enumToValue(elem, True, bitwidth, True) name = elem.get('name') - + # Range check for the enum value if numVal is not None and (numVal > maxValidValue or numVal < minValidValue): self.logMsg('error', 'Allowable range for flag types in C is [', minValidValue, ',', maxValidValue, '], but', name, 'flag has a value outside of this (', strVal, ')\n') exit(1) - - body += "static const {} {} = {};\n".format(flagTypeName, name, strVal) + + decl = self.genRequirements(name, mustBeFound = False) + + if self.isEnumRequired(elem): + protect = elem.get('protect') + if protect is not None: + body += '#ifdef {}\n'.format(protect) + + if usedefine: + decl += "#define {} {}\n".format(name, strVal) + elif self.misracppstyle(): + decl += "static constexpr {} {} {{{}}};\n".format(flagTypeName, name, strVal) + else: + # Some C compilers only allow initializing a 'static const' variable with a literal value. + # So initializing an alias from another 'static const' value would fail to compile. + # Work around this by chasing the aliases to get the actual value. + while numVal is None: + alias = self.registry.tree.find("enums/enum[@name='" + strVal + "']") + (numVal, strVal) = self.enumToValue(alias, True) + decl += "static const {} {} = {};\n".format(flagTypeName, name, strVal) + + if numVal is not None: + body += decl + else: + aliasText += decl + + if protect is not None: + body += '#endif\n' + + # Now append the non-numeric enumerant values + body += aliasText # Postfix @@ -505,7 +577,7 @@ class OutputGenerator: # Break the group name into prefix and suffix portions for range # enum generation - expandName = re.sub(r'([0-9a-z_])([A-Z0-9])', r'\1_\2', groupName).upper() + expandName = re.sub(r'([0-9]+|[a-z_])([A-Z0-9])', r'\1_\2', groupName).upper() expandPrefix = expandName expandSuffix = '' expandSuffixMatch = re.search(r'[A-Z][A-Z]+$', groupName) @@ -519,11 +591,11 @@ class OutputGenerator: # @@ Should use the type="bitmask" attribute instead isEnum = ('FLAG_BITS' not in expandPrefix) - + # Allowable range for a C enum - which is that of a signed 32-bit integer maxValidValue = 2**(32 - 1) - 1 minValidValue = (maxValidValue * -1) - 1 - + # Get a list of nested 'enum' tags. enums = groupElem.findall('enum') @@ -553,7 +625,22 @@ class OutputGenerator: # Extension enumerants are only included if they are required if self.isEnumRequired(elem): - decl = " {} = {},".format(name, strVal) + decl = '' + + protect = elem.get('protect') + if protect is not None: + decl += '#ifdef {}\n'.format(protect) + + # Indent requirements comment, if there is one + requirements = self.genRequirements(name, mustBeFound = False) + if requirements != '': + requirements = ' ' + requirements + decl += requirements + decl += ' {} = {},'.format(name, strVal) + + if protect is not None: + decl += '\n#endif' + if numVal is not None: body.append(decl) else: @@ -563,7 +650,6 @@ class OutputGenerator: if numVal is not None and (numVal > maxValidValue or numVal < minValidValue): self.logMsg('error', 'Allowable range for C enum types is [', minValidValue, ',', maxValidValue, '], but', name, 'has a value outside of this (', strVal, ')\n') exit(1) - # Don't track min/max for non-numbers (numVal is None) if isEnum and numVal is not None and elem.get('extends') is None: @@ -605,6 +691,47 @@ class OutputGenerator: return (section, '\n'.join(body)) + def buildConstantCDecl(self, enuminfo, name, alias): + """Generate the C declaration for a constant (a single + value). + + tags may specify their values in several ways, but are + usually just integers or floating-point numbers.""" + + (_, strVal) = self.enumToValue(enuminfo.elem, False) + + if self.misracppstyle() and enuminfo.elem.get('type') and not alias: + # Generate e.g.: static constexpr uint32_t x = ~static_cast(1U); + # This appeases MISRA "underlying type" rules. + typeStr = enuminfo.elem.get('type'); + invert = '~' in strVal + number = strVal.strip("()~UL") + if typeStr != "float": + number += 'U' + strVal = "~" if invert else "" + strVal += "static_cast<" + typeStr + ">(" + number + ")" + body = 'static constexpr ' + typeStr.ljust(9) + name.ljust(33) + ' {' + strVal + '};' + elif enuminfo.elem.get('type') and not alias: + # Generate e.g.: #define x (~0ULL) + typeStr = enuminfo.elem.get('type'); + invert = '~' in strVal + paren = '(' in strVal + number = strVal.strip("()~UL") + if typeStr != "float": + if typeStr == "uint64_t": + number += 'ULL' + else: + number += 'U' + strVal = "~" if invert else "" + strVal += number + if paren: + strVal = "(" + strVal + ")"; + body = '#define ' + name.ljust(33) + ' ' + strVal; + else: + body = '#define ' + name.ljust(33) + ' ' + strVal + + return body + def makeDir(self, path): """Create a directory, if not already done. @@ -683,6 +810,20 @@ class OutputGenerator: self.featureName = None self.featureExtraProtect = None + def genRequirements(self, name, mustBeFound = True): + """Generate text showing what core versions and extensions introduce + an API. This exists in the base Generator class because it's used by + the shared enumerant-generating interfaces (buildEnumCDecl, etc.). + Here it returns an empty string for most generators, but can be + overridden by e.g. DocGenerator. + + - name - name of the API + - mustBeFound - If True, when requirements for 'name' cannot be + determined, a warning comment is generated. + """ + + return '' + def validateFeature(self, featureType, featureName): """Validate we're generating something only inside a `` tag""" if self.featureName is None: @@ -738,6 +879,14 @@ class OutputGenerator: Extend to generate as desired in your derived class.""" self.validateFeature('command', cmdinfo) + def genSpirv(self, spirv, spirvinfo, alias): + """Generate interface for a spirv element. + + - spirvinfo - SpirvInfo for a command + + Extend to generate as desired in your derived class.""" + return + def makeProtoName(self, name, tail): """Turn a `` `` into C-language prototype and typedef declarations for that name. @@ -759,7 +908,9 @@ class OutputGenerator: - aligncol - if non-zero, attempt to align the nested `` element at this column""" indent = ' ' - paramdecl = indent + noneStr(param.text) + paramdecl = indent + prefix = noneStr(param.text) + for elem in param: text = noneStr(elem.text) tail = noneStr(elem.tail) @@ -778,7 +929,16 @@ class OutputGenerator: paramdecl = paramdecl.ljust(aligncol - 1) + ' ' newLen = len(paramdecl) self.logMsg('diag', 'Adjust length of parameter decl from', oldLen, 'to', newLen, ':', paramdecl) - paramdecl += text + tail + + if (self.misracppstyle() and prefix.find('const ') != -1): + # Change pointer type order from e.g. "const void *" to "void const *". + # If the string starts with 'const', reorder it to be after the first type. + paramdecl += prefix.replace('const ', '') + text + ' const' + tail + else: + paramdecl += prefix + text + tail + + # Clear prefix for subsequent iterations + prefix = '' if aligncol == 0: # Squeeze out multiple spaces other than the indentation paramdecl = indent + ' '.join(paramdecl.split()) @@ -985,8 +1145,28 @@ class OutputGenerator: # Non-indented parameters paramdecl = '(' if n > 0: - paramnames = (''.join(t for t in p.itertext()) - for p in params) + paramnames = [] + if self.misracppstyle(): + for p in params: + param = '' + firstIter = True; + for t in p.itertext(): + if (firstIter): + prefix = t + firstIter = False + else: + # Change pointer type order from e.g. "const void *" to "void const *". + # If the string starts with 'const', reorder it to be after the first type. + if (prefix.find('const ') != -1): + param += prefix.replace('const ', '') + t + ' const ' + else: + param += prefix + t + # Clear prefix for subsequent iterations + prefix = '' + paramnames.append(param); + else: + paramnames = (''.join(t for t in p.itertext()) + for p in params) paramdecl += ', '.join(paramnames) else: paramdecl += 'void' diff --git a/externals/Vulkan-Headers/registry/genvk.py b/externals/Vulkan-Headers/registry/genvk.py index 288b43e16..069ee68bf 100755 --- a/externals/Vulkan-Headers/registry/genvk.py +++ b/externals/Vulkan-Headers/registry/genvk.py @@ -1,6 +1,6 @@ #!/usr/bin/python3 # -# Copyright (c) 2013-2020 The Khronos Group Inc. +# Copyright 2013-2021 The Khronos Group Inc. # # SPDX-License-Identifier: Apache-2.0 @@ -17,6 +17,7 @@ from extensionmetadocgenerator import (ExtensionMetaDocGeneratorOptions, ExtensionMetaDocOutputGenerator) from interfacedocgenerator import InterfaceDocGenerator from generator import write +from spirvcapgenerator import SpirvCapabilityOutputGenerator from hostsyncgenerator import HostSynchronizationOutputGenerator from pygenerator import PyOutputGenerator from reflib import logDiag, logWarn, setLogFile @@ -73,6 +74,9 @@ def makeGenOpts(args): # Extensions to emit (list of extensions) emitExtensions = args.emitExtensions + # SPIR-V capabilities / features to emit (list of extensions & capabilities) + emitSpirv = args.emitSpirv + # Features to include (list of features) features = args.feature @@ -85,21 +89,28 @@ def makeGenOpts(args): # Path to generated files, particularly api.py genpath = args.genpath + # Generate MISRA C-friendly headers + misracstyle = args.misracstyle; + + # Generate MISRA C++-friendly headers + misracppstyle = args.misracppstyle; + # Descriptive names for various regexp patterns used to select # versions and extensions - allFeatures = allExtensions = r'.*' + allSpirv = allFeatures = allExtensions = r'.*' # Turn lists of names/patterns into matching regular expressions addExtensionsPat = makeREstring(extensions, None) removeExtensionsPat = makeREstring(removeExtensions, None) emitExtensionsPat = makeREstring(emitExtensions, allExtensions) + emitSpirvPat = makeREstring(emitSpirv, allSpirv) featuresPat = makeREstring(features, allFeatures) # Copyright text prefixing all headers (list of strings). # The SPDX formatting below works around constraints of the 'reuse' tool prefixStrings = [ '/*', - '** Copyright (c) 2015-2020 The Khronos Group Inc.', + '** Copyright 2015-2021 The Khronos Group Inc.', '**', '** SPDX' + '-License-Identifier: Apache-2.0', '*/', @@ -249,6 +260,25 @@ def makeGenOpts(args): reparentEnums = False) ] + genOpts['spirvcapinc'] = [ + SpirvCapabilityOutputGenerator, + DocGeneratorOptions( + conventions = conventions, + filename = 'timeMarker', + directory = directory, + genpath = None, + apiname = 'vulkan', + profile = None, + versions = featuresPat, + emitversions = featuresPat, + defaultExtensions = None, + addExtensions = addExtensionsPat, + removeExtensions = removeExtensionsPat, + emitExtensions = emitExtensionsPat, + emitSpirv = emitSpirvPat, + reparentEnums = False) + ] + # Platform extensions, in their own header files # Each element of the platforms[] array defines information for # generating a single platform: @@ -269,14 +299,25 @@ def makeGenOpts(args): # Extensions required and suppressed for beta "platform". This can # probably eventually be derived from the requires= attributes of # the extension blocks. - betaRequireExtensions = [ 'VK_KHR_ray_tracing', 'VK_KHR_deferred_host_operations', 'VK_KHR_pipeline_library' ] - betaSuppressExtensions = [ 'VK_NV_ray_tracing' ] + betaRequireExtensions = [ + 'VK_KHR_portability_subset', + 'VK_KHR_video_queue', + 'VK_KHR_video_decode_queue', + 'VK_KHR_video_encode_queue', + 'VK_EXT_video_decode_h264', + 'VK_EXT_video_decode_h265', + 'VK_EXT_video_encode_h264', + ] + + betaSuppressExtensions = [] platforms = [ [ 'vulkan_android.h', [ 'VK_KHR_android_surface', 'VK_ANDROID_external_memory_android_hardware_buffer' ], commonSuppressExtensions ], - [ 'vulkan_fuchsia.h', [ 'VK_FUCHSIA_imagepipe_surface'], commonSuppressExtensions ], + [ 'vulkan_fuchsia.h', [ 'VK_FUCHSIA_imagepipe_surface', + 'VK_FUCHSIA_external_memory', + 'VK_FUCHSIA_external_semaphore' ], commonSuppressExtensions ], [ 'vulkan_ggp.h', [ 'VK_GGP_stream_descriptor_surface', 'VK_GGP_frame_token' ], commonSuppressExtensions ], [ 'vulkan_ios.h', [ 'VK_MVK_ios_surface' ], commonSuppressExtensions ], @@ -294,8 +335,10 @@ def makeGenOpts(args): ] ], [ 'vulkan_xcb.h', [ 'VK_KHR_xcb_surface' ], commonSuppressExtensions ], [ 'vulkan_xlib.h', [ 'VK_KHR_xlib_surface' ], commonSuppressExtensions ], + [ 'vulkan_directfb.h', [ 'VK_EXT_directfb_surface' ], commonSuppressExtensions ], [ 'vulkan_xlib_xrandr.h', [ 'VK_EXT_acquire_xlib_display' ], commonSuppressExtensions ], [ 'vulkan_metal.h', [ 'VK_EXT_metal_surface' ], commonSuppressExtensions ], + [ 'vulkan_screen.h', [ 'VK_QNX_screen_surface' ], commonSuppressExtensions ], [ 'vulkan_beta.h', betaRequireExtensions, betaSuppressExtensions ], ] @@ -331,7 +374,9 @@ def makeGenOpts(args): apicall = 'VKAPI_ATTR ', apientry = 'VKAPI_CALL ', apientryp = 'VKAPI_PTR *', - alignFuncParam = 48) + alignFuncParam = 48, + misracstyle = misracstyle, + misracppstyle = misracppstyle) genOpts[headername] = [ COutputGenerator, opts ] @@ -370,7 +415,9 @@ def makeGenOpts(args): apicall = 'VKAPI_ATTR ', apientry = 'VKAPI_CALL ', apientryp = 'VKAPI_PTR *', - alignFuncParam = 48) + alignFuncParam = 48, + misracstyle = misracstyle, + misracppstyle = misracppstyle) ] # Unused - vulkan10.h target. @@ -402,7 +449,9 @@ def makeGenOpts(args): apicall = 'VKAPI_ATTR ', apientry = 'VKAPI_CALL ', apientryp = 'VKAPI_PTR *', - alignFuncParam = 48) + alignFuncParam = 48, + misracstyle = misracstyle, + misracppstyle = misracppstyle) ] # Unused - vulkan11.h target. @@ -434,7 +483,9 @@ def makeGenOpts(args): apicall = 'VKAPI_ATTR ', apientry = 'VKAPI_CALL ', apientryp = 'VKAPI_PTR *', - alignFuncParam = 48) + alignFuncParam = 48, + misracstyle = misracstyle, + misracppstyle = misracppstyle) ] genOpts['alias.h'] = [ @@ -481,6 +532,8 @@ def genTarget(args): # Create generator options with parameters specified on command line makeGenOpts(args) + # pdb.set_trace() + # Select a generator matching the requested target if args.target in genOpts: createGenerator = genOpts[args.target][0] @@ -522,6 +575,9 @@ if __name__ == '__main__': parser.add_argument('-emitExtensions', action='append', default=[], help='Specify an extension or extensions to emit in targets') + parser.add_argument('-emitSpirv', action='append', + default=[], + help='Specify a SPIR-V extension or capability to emit in targets') parser.add_argument('-feature', action='append', default=[], help='Specify a core API feature name or names to add to targets') @@ -557,6 +613,10 @@ if __name__ == '__main__': help='Suppress script output during normal execution.') parser.add_argument('-verbose', action='store_false', dest='quiet', default=True, help='Enable script output during normal execution.') + parser.add_argument('-misracstyle', dest='misracstyle', action='store_true', + help='generate MISRA C-friendly headers') + parser.add_argument('-misracppstyle', dest='misracppstyle', action='store_true', + help='generate MISRA C++-friendly headers') args = parser.parse_args() diff --git a/externals/Vulkan-Headers/registry/reg.py b/externals/Vulkan-Headers/registry/reg.py index c63804b11..afb374df2 100755 --- a/externals/Vulkan-Headers/registry/reg.py +++ b/externals/Vulkan-Headers/registry/reg.py @@ -1,6 +1,6 @@ #!/usr/bin/python3 -i # -# Copyright 2013-2020 The Khronos Group Inc. +# Copyright 2013-2021 The Khronos Group Inc. # # SPDX-License-Identifier: Apache-2.0 @@ -12,7 +12,7 @@ import sys import xml.etree.ElementTree as etree from collections import defaultdict, namedtuple from generator import OutputGenerator, GeneratorOptions, write - +import pdb def apiNameMatch(str, supported): """Return whether a required api name matches a pattern specified for an @@ -253,6 +253,12 @@ class FeatureInfo(BaseInfo): self.number = 0 self.supported = elem.get('supported') +class SpirvInfo(BaseInfo): + """Registry information about an API + or .""" + + def __init__(self, elem): + BaseInfo.__init__(self, elem) class Registry: """Object representing an API registry, loaded from an XML file.""" @@ -299,6 +305,12 @@ class Registry: self.extdict = {} "dictionary of FeatureInfo objects for `` elements keyed by extension name" + self.spirvextdict = {} + "dictionary of FeatureInfo objects for `` elements keyed by spirv extension name" + + self.spirvcapdict = {} + "dictionary of FeatureInfo objects for `` elements keyed by spirv capability name" + self.emitFeatures = False """True to actually emit features for a version / extension, or False to just treat them as emitted""" @@ -344,10 +356,10 @@ class Registry: Intended for internal use only. - - elem - ``/``/``/``/``/`` Element - - info - corresponding {Type|Group|Enum|Cmd|Feature}Info object - - infoName - 'type' / 'group' / 'enum' / 'command' / 'feature' / 'extension' - - dictionary - self.{type|group|enum|cmd|api|ext}dict + - elem - ``/``/``/``/``/``/``/`` Element + - info - corresponding {Type|Group|Enum|Cmd|Feature|Spirv}Info object + - infoName - 'type' / 'group' / 'enum' / 'command' / 'feature' / 'extension' / 'spirvextension' / 'spirvcapability' + - dictionary - self.{type|group|enum|cmd|api|ext|spirvext|spirvcap}dict If the Element has an 'api' attribute, the dictionary key is the tuple (name,api). If not, the key is the name. 'name' is an @@ -591,6 +603,15 @@ class Registry: for parent in self.validextensionstructs: self.validextensionstructs[parent].sort() + # Parse out all spirv tags in dictionaries + # Use addElementInfo to catch duplicates + for spirv in self.reg.findall('spirvextensions/spirvextension'): + spirvInfo = SpirvInfo(spirv) + self.addElementInfo(spirv, spirvInfo, 'spirvextension', self.spirvextdict) + for spirv in self.reg.findall('spirvcapabilities/spirvcapability'): + spirvInfo = SpirvInfo(spirv) + self.addElementInfo(spirv, spirvInfo, 'spirvcapability', self.spirvcapdict) + def dumpReg(self, maxlen=120, filehandle=sys.stdout): """Dump all the dictionaries constructed from the Registry object. @@ -623,6 +644,13 @@ class Registry: for key in self.extdict: write(' Extension', key, '->', etree.tostring(self.extdict[key].elem)[0:maxlen], file=filehandle) + write('// SPIR-V', file=filehandle) + for key in self.spirvextdict: + write(' SPIR-V Extension', key, '->', + etree.tostring(self.spirvextdict[key].elem)[0:maxlen], file=filehandle) + for key in self.spirvcapdict: + write(' SPIR-V Capability', key, '->', + etree.tostring(self.spirvcapdict[key].elem)[0:maxlen], file=filehandle) def markTypeRequired(self, typename, required): """Require (along with its dependencies) or remove (but not its dependencies) a type. @@ -1126,6 +1154,19 @@ class Registry: for c in features.findall('command'): self.generateFeature(c.get('name'), 'command', self.cmddict) + def generateSpirv(self, spirv, dictionary): + if spirv is None: + self.gen.logMsg('diag', 'No entry found for element', name, + 'returning!') + return + + name = spirv.elem.get('name') + # No known alias for spirv elements + alias = None + if spirv.emit: + genProc = self.gen.genSpirv + genProc(spirv, name, alias) + def apiGen(self): """Generate interface for specified versions using the current generator and generator options""" @@ -1145,6 +1186,7 @@ class Registry: regAddExtensions = re.compile(self.genOpts.addExtensions) regRemoveExtensions = re.compile(self.genOpts.removeExtensions) regEmitExtensions = re.compile(self.genOpts.emitExtensions) + regEmitSpirv = re.compile(self.genOpts.emitSpirv) # Get all matching API feature names & add to list of FeatureInfo # Note we used to select on feature version attributes, not names. @@ -1202,10 +1244,16 @@ class Registry: # the regexp specified in the generator options. This allows # forcing extensions into an interface even if they're not # tagged appropriately in the registry. + # However we still respect the 'supported' attribute. if regAddExtensions.match(extName) is not None: - self.gen.logMsg('diag', 'Including extension', - extName, '(matches explicitly requested extensions to add)') - include = True + if not apiNameMatch(self.genOpts.apiname, ei.elem.get('supported')): + self.gen.logMsg('diag', 'NOT including extension', + extName, '(matches explicitly requested, but does not match the \'supported\' attribute)') + include = False + else: + self.gen.logMsg('diag', 'Including extension', + extName, '(matches explicitly requested extensions to add)') + include = True # Remove extensions if the name matches the regexp specified # in generator options. This allows forcing removal of # extensions from an interface even if they're tagged that @@ -1233,6 +1281,20 @@ class Registry: self.gen.logMsg('diag', 'NOT including extension', extName, '(does not match api attribute or explicitly requested extensions)') + # Add all spirv elements to list + # generators decide to emit them all or not + # Currently no filtering as no client of these elements needs filtering + spirvexts = [] + for key in self.spirvextdict: + si = self.spirvextdict[key] + si.emit = (regEmitSpirv.match(key) is not None) + spirvexts.append(si) + spirvcaps = [] + for key in self.spirvcapdict: + si = self.spirvcapdict[key] + si.emit = (regEmitSpirv.match(key) is not None) + spirvcaps.append(si) + # Sort the features list, if a sort procedure is defined if self.genOpts.sortProcedure: self.genOpts.sortProcedure(features) @@ -1271,6 +1333,11 @@ class Registry: self.gen.beginFeature(f.elem, emit) self.generateRequiredInterface(f.elem) self.gen.endFeature() + # Generate spirv elements + for s in spirvexts: + self.generateSpirv(s, self.spirvextdict) + for s in spirvcaps: + self.generateSpirv(s, self.spirvcapdict) self.gen.endFile() def apiReset(self): diff --git a/externals/Vulkan-Headers/registry/spec_tools/util.py b/externals/Vulkan-Headers/registry/spec_tools/util.py index ce11fd74f..b89067986 100755 --- a/externals/Vulkan-Headers/registry/spec_tools/util.py +++ b/externals/Vulkan-Headers/registry/spec_tools/util.py @@ -1,6 +1,6 @@ """Utility functions not closely tied to other spec_tools types.""" -# Copyright (c) 2018-2019 Collabora, Ltd. -# Copyright (c) 2013-2020 The Khronos Group Inc. +# Copyright 2018-2019 Collabora, Ltd. +# Copyright 2013-2021 The Khronos Group Inc. # # SPDX-License-Identifier: Apache-2.0 diff --git a/externals/Vulkan-Headers/registry/validusage.json b/externals/Vulkan-Headers/registry/validusage.json index 7c30a66ef..f9864c07d 100755 --- a/externals/Vulkan-Headers/registry/validusage.json +++ b/externals/Vulkan-Headers/registry/validusage.json @@ -1,9 +1,9 @@ { "version info": { "schema version": 2, - "api version": "1.2.145", - "comment": "from git branch: github-master commit: 1e4e9cad5c761f05463ab65aa95e38b0dac534bc", - "date": "2020-06-20 14:48:00Z" + "api version": "1.2.180", + "comment": "from git branch: github-main commit: b4e8cd820b2487bc892b391fb26b49501473a6a6", + "date": "2021-06-06 12:24:39Z" }, "validation": { "vkGetInstanceProcAddr": { @@ -70,7 +70,7 @@ }, { "vuid": "VUID-VkInstanceCreateInfo-sType-unique", - "text": " The sType value of each struct in the pNext chain must be unique" + "text": " The sType value of each struct in the pNext chain must be unique, with the exception of structures of type VkDebugUtilsMessengerCreateInfoEXT" }, { "vuid": "VUID-VkInstanceCreateInfo-flags-zerobitmask", @@ -134,7 +134,7 @@ "core": [ { "vuid": "VUID-VkApplicationInfo-apiVersion-04010", - "text": " If apiVersion is not 0, then it must be greater or equal to VK_API_VERSION_1_0" + "text": " If apiVersion is not 0, then it must be greater than or equal to VK_API_VERSION_1_0" }, { "vuid": "VUID-VkApplicationInfo-sType-sType", @@ -226,7 +226,7 @@ }, { "vuid": "VUID-VkPhysicalDeviceProperties2-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceCustomBorderColorPropertiesEXT, VkPhysicalDeviceDepthStencilResolveProperties, VkPhysicalDeviceDescriptorIndexingProperties, VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDriverProperties, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsProperties, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceInlineUniformBlockPropertiesEXT, VkPhysicalDeviceLineRasterizationPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePerformanceQueryPropertiesKHR, VkPhysicalDevicePointClippingProperties, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingPropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceRobustness2PropertiesEXT, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxProperties, VkPhysicalDeviceShaderCoreProperties2AMD, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceSubgroupSizeControlPropertiesEXT, VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT, VkPhysicalDeviceTimelineSemaphoreProperties, VkPhysicalDeviceTransformFeedbackPropertiesEXT, VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, VkPhysicalDeviceVulkan11Properties, or VkPhysicalDeviceVulkan12Properties" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceAccelerationStructurePropertiesKHR, VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceCustomBorderColorPropertiesEXT, VkPhysicalDeviceDepthStencilResolveProperties, VkPhysicalDeviceDescriptorIndexingProperties, VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDriverProperties, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsProperties, VkPhysicalDeviceFragmentDensityMap2PropertiesEXT, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV, VkPhysicalDeviceFragmentShadingRatePropertiesKHR, VkPhysicalDeviceIDProperties, VkPhysicalDeviceInlineUniformBlockPropertiesEXT, VkPhysicalDeviceLineRasterizationPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePerformanceQueryPropertiesKHR, VkPhysicalDevicePointClippingProperties, VkPhysicalDevicePortabilitySubsetPropertiesKHR, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDeviceProvokingVertexPropertiesEXT, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingPipelinePropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceRobustness2PropertiesEXT, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxProperties, VkPhysicalDeviceShaderCoreProperties2AMD, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceSubgroupSizeControlPropertiesEXT, VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT, VkPhysicalDeviceTimelineSemaphoreProperties, VkPhysicalDeviceTransformFeedbackPropertiesEXT, VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, VkPhysicalDeviceVulkan11Properties, or VkPhysicalDeviceVulkan12Properties" }, { "vuid": "VUID-VkPhysicalDeviceProperties2-sType-unique", @@ -314,7 +314,7 @@ }, { "vuid": "VUID-VkQueueFamilyProperties2-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkQueueFamilyCheckpointPropertiesNV" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkQueueFamilyCheckpointProperties2NV, VkQueueFamilyCheckpointPropertiesNV, VkQueueFamilyGlobalPriorityPropertiesEXT, or VkVideoQueueFamilyProperties2KHR" }, { "vuid": "VUID-VkQueueFamilyProperties2-sType-unique", @@ -322,6 +322,26 @@ } ] }, + "VkQueueFamilyGlobalPriorityPropertiesEXT": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_EXT_global_priority_query)": [ + { + "vuid": "VUID-VkQueueFamilyGlobalPriorityPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT" + }, + { + "vuid": "VUID-VkQueueFamilyGlobalPriorityPropertiesEXT-priorities-parameter", + "text": " Any given element of priorities must be a valid VkQueueGlobalPriorityEXT value" + } + ] + }, + "VkQueueFamilyCheckpointProperties2NV": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_NV_device_diagnostic_checkpoints)+(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkQueueFamilyCheckpointProperties2NV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV" + } + ] + }, "VkQueueFamilyCheckpointPropertiesNV": { "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_NV_device_diagnostic_checkpoints)": [ { @@ -463,6 +483,12 @@ "text": " ppEnabledExtensionNames must not contain both VK_KHR_buffer_device_address and VK_EXT_buffer_device_address" } ], + "(VK_VERSION_1_2)+(VK_EXT_buffer_device_address)": [ + { + "vuid": "VUID-VkDeviceCreateInfo-pNext-04748", + "text": " if the pNext chain includes a VkPhysicalDeviceVulkan12Features structure and VkPhysicalDeviceVulkan12Features::bufferDeviceAddress is VK_TRUE, ppEnabledExtensionNames must not contain VK_EXT_buffer_device_address" + } + ], "(VK_VERSION_1_2)": [ { "vuid": "VUID-VkDeviceCreateInfo-pNext-02829", @@ -473,6 +499,12 @@ "text": " If the pNext chain includes a VkPhysicalDeviceVulkan12Features structure, then it must not include a VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceBufferDeviceAddressFeatures, or VkPhysicalDeviceVulkanMemoryModelFeatures structure" } ], + "(VK_VERSION_1_2)+(VK_KHR_shader_draw_parameters)": [ + { + "vuid": "VUID-VkDeviceCreateInfo-ppEnabledExtensions-04476", + "text": " If ppEnabledExtensions contains \"VK_KHR_shader_draw_parameters\" and the pNext chain includes a VkPhysicalDeviceVulkan11Features structure, then VkPhysicalDeviceVulkan11Features::shaderDrawParameters must be VK_TRUE" + } + ], "(VK_VERSION_1_2)+(VK_KHR_draw_indirect_count)": [ { "vuid": "VUID-VkDeviceCreateInfo-ppEnabledExtensions-02831", @@ -503,6 +535,56 @@ "text": " If ppEnabledExtensions contains \"VK_EXT_shader_viewport_index_layer\" and the pNext chain includes a VkPhysicalDeviceVulkan12Features structure, then VkPhysicalDeviceVulkan12Features::shaderOutputViewportIndex and VkPhysicalDeviceVulkan12Features::shaderOutputLayer must both be VK_TRUE" } ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkDeviceCreateInfo-pProperties-04451", + "text": " If the [VK_KHR_portability_subset] extension is included in pProperties of vkEnumerateDeviceExtensionProperties, ppEnabledExtensions must include \"VK_KHR_portability_subset\"." + } + ], + "(VK_KHR_fragment_shading_rate,VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-VkDeviceCreateInfo-shadingRateImage-04478", + "text": " If shadingRateImage is enabled, pipelineFragmentShadingRate must not be enabled" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-shadingRateImage-04479", + "text": " If shadingRateImage is enabled, primitiveFragmentShadingRate must not be enabled" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-shadingRateImage-04480", + "text": " If shadingRateImage is enabled, attachmentFragmentShadingRate must not be enabled" + } + ], + "(VK_KHR_fragment_shading_rate,VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-VkDeviceCreateInfo-fragmentDensityMap-04481", + "text": " If fragmentDensityMap is enabled, pipelineFragmentShadingRate must not be enabled" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-fragmentDensityMap-04482", + "text": " If fragmentDensityMap is enabled, primitiveFragmentShadingRate must not be enabled" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-fragmentDensityMap-04483", + "text": " If fragmentDensityMap is enabled, attachmentFragmentShadingRate must not be enabled" + } + ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-VkDeviceCreateInfo-None-04896", + "text": " If sparseImageInt64Atomics is enabled, shaderImageInt64Atomics must be enabled" + } + ], + "(VK_EXT_shader_atomic_float)": [ + { + "vuid": "VUID-VkDeviceCreateInfo-None-04897", + "text": " If sparseImageFloat32Atomics is enabled, shaderImageFloat32Atomics must be enabled" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-None-04898", + "text": " If sparseImageFloat32AtomicAdd is enabled, shaderImageFloat32AtomicAdd must be enabled" + } + ], "core": [ { "vuid": "VUID-VkDeviceCreateInfo-sType-sType", @@ -510,11 +592,11 @@ }, { "vuid": "VUID-VkDeviceCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceDiagnosticsConfigCreateInfoNV, VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkDevicePrivateDataCreateInfoEXT, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeatures, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceCustomBorderColorFeaturesEXT, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, VkPhysicalDeviceDiagnosticsConfigFeaturesNV, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDevicePrivateDataFeaturesEXT, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceRayTracingFeaturesKHR, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceRobustness2FeaturesEXT, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVulkan11Features, VkPhysicalDeviceVulkan12Features, VkPhysicalDeviceVulkanMemoryModelFeatures, or VkPhysicalDeviceYcbcrImageArraysFeaturesEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceDeviceMemoryReportCreateInfoEXT, VkDeviceDiagnosticsConfigCreateInfoNV, VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkDevicePrivateDataCreateInfoEXT, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice4444FormatsFeaturesEXT, VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceAccelerationStructureFeaturesKHR, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeatures, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceColorWriteEnableFeaturesEXT, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceCustomBorderColorFeaturesEXT, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, VkPhysicalDeviceDeviceMemoryReportFeaturesEXT, VkPhysicalDeviceDiagnosticsConfigFeaturesNV, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceExtendedDynamicState2FeaturesEXT, VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV, VkPhysicalDeviceFragmentShadingRateFeaturesKHR, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceImageRobustnessFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInheritedViewportScissorFeaturesNV, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDevicePortabilitySubsetFeaturesKHR, VkPhysicalDevicePrivateDataFeaturesEXT, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceProvokingVertexFeaturesEXT, VkPhysicalDeviceRayQueryFeaturesKHR, VkPhysicalDeviceRayTracingPipelineFeaturesKHR, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceRobustness2FeaturesEXT, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceShaderAtomicFloatFeaturesEXT, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT, VkPhysicalDeviceSynchronization2FeaturesKHR, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT, VkPhysicalDeviceVulkan11Features, VkPhysicalDeviceVulkan12Features, VkPhysicalDeviceVulkanMemoryModelFeatures, VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, VkPhysicalDeviceYcbcrImageArraysFeaturesEXT, or VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR" }, { "vuid": "VUID-VkDeviceCreateInfo-sType-unique", - "text": " The sType value of each struct in the pNext chain must be unique, with the exception of structures of type VkDevicePrivateDataCreateInfoEXT" + "text": " The sType value of each struct in the pNext chain must be unique, with the exception of structures of type VkDeviceDeviceMemoryReportCreateInfoEXT or VkDevicePrivateDataCreateInfoEXT" }, { "vuid": "VUID-VkDeviceCreateInfo-flags-zerobitmask", @@ -590,6 +672,38 @@ } ] }, + "VkDeviceDeviceMemoryReportCreateInfoEXT": { + "(VK_EXT_device_memory_report)": [ + { + "vuid": "VUID-VkDeviceDeviceMemoryReportCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkDeviceDeviceMemoryReportCreateInfoEXT-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkDeviceDeviceMemoryReportCreateInfoEXT-pfnUserCallback-parameter", + "text": " pfnUserCallback must be a valid PFN_vkDeviceMemoryReportCallbackEXT value" + }, + { + "vuid": "VUID-VkDeviceDeviceMemoryReportCreateInfoEXT-pUserData-parameter", + "text": " pUserData must be a pointer value" + } + ] + }, + "VkDeviceMemoryReportCallbackDataEXT": { + "(VK_EXT_device_memory_report)": [ + { + "vuid": "VUID-VkDeviceMemoryReportCallbackDataEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT" + }, + { + "vuid": "VUID-VkDeviceMemoryReportCallbackDataEXT-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, "VkDevicePrivateDataCreateInfoEXT": { "(VK_EXT_private_data)": [ { @@ -882,16 +996,12 @@ }, { "vuid": "VUID-vkAllocateCommandBuffers-pAllocateInfo::commandBufferCount-arraylength", - "text": " The value referenced by pAllocateInfo->commandBufferCount must be greater than 0" + "text": " pAllocateInfo->commandBufferCount must be greater than 0" } ] }, "VkCommandBufferAllocateInfo": { "core": [ - { - "vuid": "VUID-VkCommandBufferAllocateInfo-commandBufferCount-00044", - "text": " commandBufferCount must be greater than 0" - }, { "vuid": "VUID-VkCommandBufferAllocateInfo-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO" @@ -978,7 +1088,7 @@ }, { "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-00052", - "text": " If commandBuffer is a secondary command buffer and either the occlusionQueryEnable member of the pInheritanceInfo member of pBeginInfo is VK_FALSE, or the precise occlusion queries feature is not enabled, the queryFlags member of the pInheritanceInfo member pBeginInfo must not contain VK_QUERY_CONTROL_PRECISE_BIT" + "text": " If commandBuffer is a secondary command buffer and either the occlusionQueryEnable member of the pInheritanceInfo member of pBeginInfo is VK_FALSE, or the precise occlusion queries feature is not enabled, then pBeginInfo->pInheritanceInfo->queryFlags must not contain VK_QUERY_CONTROL_PRECISE_BIT" }, { "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-02840", @@ -1054,7 +1164,7 @@ }, { "vuid": "VUID-VkCommandBufferInheritanceInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkCommandBufferInheritanceConditionalRenderingInfoEXT or VkCommandBufferInheritanceRenderPassTransformInfoQCOM" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkCommandBufferInheritanceConditionalRenderingInfoEXT, VkCommandBufferInheritanceRenderPassTransformInfoQCOM, or VkCommandBufferInheritanceViewportScissorInfoNV" }, { "vuid": "VUID-VkCommandBufferInheritanceInfo-sType-unique", @@ -1090,6 +1200,34 @@ } ] }, + "VkCommandBufferInheritanceViewportScissorInfoNV": { + "(VK_NV_inherited_viewport_scissor)": [ + { + "vuid": "VUID-VkCommandBufferInheritanceViewportScissorInfoNV-viewportScissor2D-04782", + "text": " If the inherited viewport scissor feature is not enabled, viewportScissor2D must be VK_FALSE" + }, + { + "vuid": "VUID-VkCommandBufferInheritanceViewportScissorInfoNV-viewportScissor2D-04783", + "text": " If the multiple viewports feature is not enabled and viewportScissor2D is VK_TRUE, then viewportDepthCount must be 1" + }, + { + "vuid": "VUID-VkCommandBufferInheritanceViewportScissorInfoNV-viewportScissor2D-04784", + "text": " If viewportScissor2D is VK_TRUE, then viewportDepthCount must be greater than 0" + }, + { + "vuid": "VUID-VkCommandBufferInheritanceViewportScissorInfoNV-viewportScissor2D-04785", + "text": " If viewportScissor2D is VK_TRUE, then pViewportDepths must be a valid pointer to an array of viewportDepthCount valid VkViewport structures, except any requirements on x, y, width, and height do not apply." + }, + { + "vuid": "VUID-VkCommandBufferInheritanceViewportScissorInfoNV-viewportScissor2D-04786", + "text": " If viewportScissor2D is VK_TRUE, then the command buffer must be recorded with the VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT." + }, + { + "vuid": "VUID-VkCommandBufferInheritanceViewportScissorInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV" + } + ] + }, "vkEndCommandBuffer": { "core": [ { @@ -1128,6 +1266,260 @@ } ] }, + "vkQueueSubmit2KHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkQueueSubmit2KHR-fence-04894", + "text": " If fence is not VK_NULL_HANDLE, fence must be unsignaled" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-fence-04895", + "text": " If fence is not VK_NULL_HANDLE, fence must not be associated with any other queue command that has not yet completed execution on that queue" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-synchronization2-03866", + "text": " The synchronization2 feature must be enabled" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03867", + "text": " If a command recorded into the commandBuffer member of any element of the pCommandBufferInfos member of any element of pSubmits referenced an VkEvent, that event must not be referenced by a command that has been submitted to another queue and is still in the pending state" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-semaphore-03868", + "text": " The semaphore member of any binary semaphore element of the pSignalSemaphoreInfos member of any element of pSubmits must be unsignaled when the semaphore signal operation it defines is executed on the device" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-stageMask-03869", + "text": " The stageMask member of any element of the pSignalSemaphoreInfos member of any element of pSubmits must only include pipeline stages that are supported by the queue family which queue belongs to" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-stageMask-03870", + "text": " The stageMask member of any element of the pWaitSemaphoreInfos member of any element of pSubmits must only include pipeline stages that are supported by the queue family which queue belongs to" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-semaphore-03871", + "text": " When a semaphore wait operation for a binary semaphore is executed, as defined by the semaphore member of any element of the pWaitSemaphoreInfos member of any element of pSubmits, there must be no other queues waiting on the same semaphore" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-semaphore-03872", + "text": " The semaphore member of any element of the pWaitSemaphoreInfos member of any element of pSubmits must be semaphores that are signaled, or have semaphore signal operations previously submitted for execution" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03874", + "text": " The commandBuffer member of any element of the pCommandBufferInfos member of any element of pSubmits must be in the pending or executable state" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03875", + "text": " If a command recorded into the commandBuffer member of any element of the pCommandBufferInfos member of any element of pSubmits was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, it must not be in the pending state" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03876", + "text": " Any secondary command buffers recorded into the commandBuffer member of any element of the pCommandBufferInfos member of any element of pSubmits must be in the pending or executable state" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03877", + "text": " If any secondary command buffers recorded into the commandBuffer member of any element of the pCommandBufferInfos member of any element of pSubmits was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, it must not be in the pending state" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03878", + "text": " The commandBuffer member of any element of the pCommandBufferInfos member of any element of pSubmits must have been allocated from a VkCommandPool that was created for the same queue family queue belongs to" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03879", + "text": " If a command recorded into the commandBuffer member of any element of the pCommandBufferInfos member of any element of pSubmits includes a Queue Family Transfer Acquire Operation, there must exist a previously submitted Queue Family Transfer Release Operation on a queue in the queue family identified by the acquire operation, with parameters matching the acquire operation as defined in the definition of such acquire operations, and which happens before the acquire operation" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-queue-parameter", + "text": " queue must be a valid VkQueue handle" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-pSubmits-parameter", + "text": " If submitCount is not 0, pSubmits must be a valid pointer to an array of submitCount valid VkSubmitInfo2KHR structures" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-fence-parameter", + "text": " If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-vkQueueSubmit2KHR-commonparent", + "text": " Both of fence, and queue that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_timeline_semaphore)": [ + { + "vuid": "VUID-vkQueueSubmit2KHR-semaphore-03873", + "text": " Any semaphore member of any element of the pWaitSemaphoreInfos member of any element of pSubmits that was created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_BINARY_KHR must reference a semaphore signal operation that has been submitted for execution and any semaphore signal operations on which it depends (if any) must have also been submitted for execution" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_performance_query)": [ + { + "vuid": "VUID-vkQueueSubmit2KHR-commandBuffer-03880", + "text": " If a command recorded into the commandBuffer member of any element of the pCommandBufferInfos member of any element of pSubmits was a vkCmdBeginQuery whose queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the profiling lock must have been held continuously on the VkDevice that queue was retrieved from, throughout recording of those command buffers" + } + ] + }, + "VkSubmitInfo2KHR": { + "(VK_KHR_synchronization2)+(VK_KHR_timeline_semaphore)": [ + { + "vuid": "VUID-VkSubmitInfo2KHR-semaphore-03881", + "text": " If the same semaphore is used as the semaphore member of both an element of pSignalSemaphoreInfos and pWaitSemaphoreInfos, and that semaphore is a timeline semaphore, the value member of the pSignalSemaphoreInfos element must be greater than the value member of the pWaitSemaphoreInfos element" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-semaphore-03882", + "text": " If the semaphore member of any element of pSignalSemaphoreInfos is a timeline semaphore, the value member of that element must have a value greater than the current value of the semaphore when the semaphore signal operation is executed" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-semaphore-03883", + "text": " If the semaphore member of any element of pSignalSemaphoreInfos is a timeline semaphore, the value member of that element must have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-semaphore-03884", + "text": " If the semaphore member of any element of pWaitSemaphoreInfos is a timeline semaphore, the value member of that element must have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkSubmitInfo2KHR-flags-03885", + "text": " If the protected memory feature is not enabled, flags must not include VK_SUBMIT_PROTECTED_BIT_KHR" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-flags-03886", + "text": " If flags includes VK_SUBMIT_PROTECTED_BIT_KHR, all elements of pCommandBuffers must be protected command buffers" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-flags-03887", + "text": " If flags does not include VK_SUBMIT_PROTECTED_BIT_KHR, each element of pCommandBuffers must not be a protected command buffer" + } + ], + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkSubmitInfo2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPerformanceQuerySubmitInfoKHR, VkWin32KeyedMutexAcquireReleaseInfoKHR, or VkWin32KeyedMutexAcquireReleaseInfoNV" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-flags-parameter", + "text": " flags must be a valid combination of VkSubmitFlagBitsKHR values" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-pWaitSemaphoreInfos-parameter", + "text": " If waitSemaphoreInfoCount is not 0, pWaitSemaphoreInfos must be a valid pointer to an array of waitSemaphoreInfoCount valid VkSemaphoreSubmitInfoKHR structures" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-pCommandBufferInfos-parameter", + "text": " If commandBufferInfoCount is not 0, pCommandBufferInfos must be a valid pointer to an array of commandBufferInfoCount valid VkCommandBufferSubmitInfoKHR structures" + }, + { + "vuid": "VUID-VkSubmitInfo2KHR-pSignalSemaphoreInfos-parameter", + "text": " If signalSemaphoreInfoCount is not 0, pSignalSemaphoreInfos must be a valid pointer to an array of signalSemaphoreInfoCount valid VkSemaphoreSubmitInfoKHR structures" + } + ] + }, + "VkSemaphoreSubmitInfoKHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-stageMask-03929", + "text": " If the geometry shaders feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-stageMask-03930", + "text": " If the tessellation shaders feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR or VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-device-03888", + "text": " If the device that semaphore was created on is not a device group, deviceIndex must be 0" + }, + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR" + }, + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-semaphore-parameter", + "text": " semaphore must be a valid VkSemaphore handle" + }, + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-stageMask-parameter", + "text": " stageMask must be a valid combination of VkPipelineStageFlagBits2KHR values" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_conditional_rendering)": [ + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-stageMask-03931", + "text": " If the conditional rendering feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-stageMask-03932", + "text": " If the fragment density map feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_transform_feedback)": [ + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-stageMask-03933", + "text": " If the transform feedback feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-stageMask-03934", + "text": " If the mesh shaders feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV" + }, + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-stageMask-03935", + "text": " If the task shaders feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-stageMask-03936", + "text": " If the shading rate image feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_device_group_creation,VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkSemaphoreSubmitInfoKHR-device-03889", + "text": " If the device that semaphore was created on is a device group, deviceIndex must be a valid device index" + } + ] + }, + "VkCommandBufferSubmitInfoKHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkCommandBufferSubmitInfoKHR-commandBuffer-03890", + "text": " commandBuffer must not have been allocated with VK_COMMAND_BUFFER_LEVEL_SECONDARY" + }, + { + "vuid": "VUID-VkCommandBufferSubmitInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR" + }, + { + "vuid": "VUID-VkCommandBufferSubmitInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCommandBufferSubmitInfoKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_device_group_creation,VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkCommandBufferSubmitInfoKHR-deviceMask-03891", + "text": " If deviceMask is not 0, it must be a valid device mask" + } + ] + }, "vkQueueSubmit": { "core": [ { @@ -1148,7 +1540,7 @@ }, { "vuid": "VUID-vkQueueSubmit-pSignalSemaphores-00067", - "text": " Each element of the pSignalSemaphores member of any element of pSubmits must be unsignaled when the semaphore signal operation it defines is executed on the device" + "text": " Each binary semaphore element of the pSignalSemaphores member of any element of pSubmits must be unsignaled when the semaphore signal operation it defines is executed on the device" }, { "vuid": "VUID-vkQueueSubmit-pWaitSemaphores-00068", @@ -1182,6 +1574,10 @@ "vuid": "VUID-vkQueueSubmit-pSubmits-02808", "text": " Any resource created with VK_SHARING_MODE_EXCLUSIVE that is read by an operation specified by pSubmits must not be owned by any queue family other than the one which queue belongs to, at the time it is executed" }, + { + "vuid": "VUID-vkQueueSubmit-pSubmits-04626", + "text": " Any resource created with VK_SHARING_MODE_CONCURRENT that is accessed by an operation specified by pSubmits must have included the queue family of queue at resource creation time" + }, { "vuid": "VUID-vkQueueSubmit-queue-parameter", "text": " queue must be a valid VkQueue handle" @@ -1288,15 +1684,15 @@ }, { "vuid": "VUID-VkSubmitInfo-pSignalSemaphores-03242", - "text": " For each element of pSignalSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pSignalSemaphoreValues must have a value greater than the current value of the semaphore when the semaphore signal operation is executed" + "text": " For each element of pSignalSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pSignalSemaphoreValues must have a value greater than the current value of the semaphore when the semaphore signal operation is executed" }, { "vuid": "VUID-VkSubmitInfo-pWaitSemaphores-03243", - "text": " For each element of pWaitSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pWaitSemaphoreValues must have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference" + "text": " For each element of pWaitSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pWaitSemaphoreValues must have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference" }, { "vuid": "VUID-VkSubmitInfo-pSignalSemaphores-03244", - "text": " For each element of pSignalSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pSignalSemaphoreValues must have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference" + "text": " For each element of pSignalSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pSignalSemaphoreValues must have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference" } ], "(VK_NV_mesh_shader)": [ @@ -1308,6 +1704,16 @@ "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-02090", "text": " If the task shaders feature is not enabled, each element of pWaitDstStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV" } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkSubmitInfo-pNext-04120", + "text": " If the pNext chain of this structure does not include a VkProtectedSubmitInfo structure with protectedSubmit set to VK_TRUE, then each element of the pCommandBuffers array must be an unprotected command buffer" + }, + { + "vuid": "VUID-VkSubmitInfo-pNext-04148", + "text": " If the pNext chain of this structure includes a VkProtectedSubmitInfo structure with protectedSubmit set to VK_TRUE, then each element of the pCommandBuffers array must be a protected command buffer" + } ] }, "VkTimelineSemaphoreSubmitInfo": { @@ -1330,11 +1736,11 @@ "(VK_KHR_external_semaphore_win32)": [ { "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-waitSemaphoreValuesCount-00079", - "text": " waitSemaphoreValuesCount must be the same value as VkSubmitInfo::waitSemaphoreCount, where VkSubmitInfo is in the pNext chain of this VkD3D12FenceSubmitInfoKHR structure" + "text": " waitSemaphoreValuesCount must be the same value as VkSubmitInfo::waitSemaphoreCount, where VkSubmitInfo is in the pNext chain of this VkD3D12FenceSubmitInfoKHR structure." }, { "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-signalSemaphoreValuesCount-00080", - "text": " signalSemaphoreValuesCount must be the same value as VkSubmitInfo::signalSemaphoreCount, where VkSubmitInfo is in the pNext chain of this VkD3D12FenceSubmitInfoKHR structure" + "text": " signalSemaphoreValuesCount must be the same value as VkSubmitInfo::signalSemaphoreCount, where VkSubmitInfo is in the pNext chain of this VkD3D12FenceSubmitInfoKHR structure." }, { "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-sType-sType", @@ -1424,18 +1830,6 @@ "vuid": "VUID-VkProtectedSubmitInfo-protectedSubmit-01816", "text": " If the protected memory feature is not enabled, protectedSubmit must not be VK_TRUE" }, - { - "vuid": "VUID-VkProtectedSubmitInfo-protectedSubmit-01817", - "text": " If protectedSubmit is VK_TRUE, then each element of the pCommandBuffers array must be a protected command buffer" - }, - { - "vuid": "VUID-VkProtectedSubmitInfo-protectedSubmit-01818", - "text": " If protectedSubmit is VK_FALSE, then each element of the pCommandBuffers array must be an unprotected command buffer" - }, - { - "vuid": "VUID-VkProtectedSubmitInfo-pNext-01819", - "text": " If the VkSubmitInfo::pNext chain does not include a VkProtectedSubmitInfo structure, then each element of the command buffer of the pCommandBuffers array must be an unprotected command buffer" - }, { "vuid": "VUID-VkProtectedSubmitInfo-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO" @@ -2026,6 +2420,10 @@ }, "vkImportFenceWin32HandleKHR": { "(VK_KHR_external_fence_win32)": [ + { + "vuid": "VUID-vkImportFenceWin32HandleKHR-fence-04448", + "text": " fence must not be associated with any queue command that has not yet completed execution on that queue" + }, { "vuid": "VUID-vkImportFenceWin32HandleKHR-device-parameter", "text": " device must be a valid VkDevice handle" @@ -2048,11 +2446,11 @@ }, { "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-01460", - "text": " If handleType is not 0 and handle is NULL, name must name a valid synchronization primitive of the type specified by handleType" + "text": " If handle is NULL, name must name a valid synchronization primitive of the type specified by handleType" }, { "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-01461", - "text": " If handleType is not 0 and name is NULL, handle must be a valid handle of the type specified by handleType" + "text": " If name is NULL, handle must be a valid handle of the type specified by handleType" }, { "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handle-01462", @@ -2081,10 +2479,6 @@ { "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-flags-parameter", "text": " flags must be a valid combination of VkFenceImportFlagBits values" - }, - { - "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-parameter", - "text": " If handleType is not 0, handleType must be a valid VkExternalFenceHandleTypeFlagBits value" } ] }, @@ -2178,14 +2572,6 @@ }, "VkSemaphoreTypeCreateInfo": { "(VK_VERSION_1_2,VK_KHR_timeline_semaphore)": [ - { - "vuid": "VUID-VkSemaphoreTypeCreateInfo-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO" - }, - { - "vuid": "VUID-VkSemaphoreTypeCreateInfo-semaphoreType-parameter", - "text": " semaphoreType must be a valid VkSemaphoreType value" - }, { "vuid": "VUID-VkSemaphoreTypeCreateInfo-timelineSemaphore-03252", "text": " If the timelineSemaphore feature is not enabled, semaphoreType must not equal VK_SEMAPHORE_TYPE_TIMELINE" @@ -2193,6 +2579,14 @@ { "vuid": "VUID-VkSemaphoreTypeCreateInfo-semaphoreType-03279", "text": " If semaphoreType is VK_SEMAPHORE_TYPE_BINARY, initialValue must be zero" + }, + { + "vuid": "VUID-VkSemaphoreTypeCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO" + }, + { + "vuid": "VUID-VkSemaphoreTypeCreateInfo-semaphoreType-parameter", + "text": " semaphoreType must be a valid VkSemaphoreType value" } ] }, @@ -2354,6 +2748,66 @@ } ] }, + "vkGetSemaphoreZirconHandleFUCHSIA": { + "(VK_FUCHSIA_external_semaphore)": [ + { + "vuid": "VUID-vkGetSemaphoreZirconHandleFUCHSIA-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetSemaphoreZirconHandleFUCHSIA-pGetZirconHandleInfo-parameter", + "text": " pGetZirconHandleInfo must be a valid pointer to a valid VkSemaphoreGetZirconHandleInfoFUCHSIA structure" + }, + { + "vuid": "VUID-vkGetSemaphoreZirconHandleFUCHSIA-pZirconHandle-parameter", + "text": " pZirconHandle must be a valid pointer to a zx_handle_t value" + } + ] + }, + "VkSemaphoreGetZirconHandleInfoFUCHSIA": { + "(VK_FUCHSIA_external_semaphore)": [ + { + "vuid": "VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-handleType-04758", + "text": " handleType must have been included in VkExportSemaphoreCreateInfo::handleTypes when semaphore’s current payload was created." + }, + { + "vuid": "VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-semaphore-04759", + "text": " semaphore must not currently have its payload replaced by an imported payload as described below in Importing Semaphore Payloads unless that imported payload’s handle type was included in VkExternalSemaphoreProperties::exportFromImportedHandleTypes for handleType." + }, + { + "vuid": "VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-handleType-04760", + "text": " If handleType refers to a handle type with copy payload transference semantics, as defined below in Importing Semaphore Payloads, there must be no queue waiting on semaphore." + }, + { + "vuid": "VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-handleType-04761", + "text": " If handleType refers to a handle type with copy payload transference semantics, semaphore must be signaled, or have an associated semaphore signal operation pending execution." + }, + { + "vuid": "VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-handleType-04762", + "text": " handleType must be defined as a Zircon event handle." + }, + { + "vuid": "VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-semaphore-04763", + "text": " semaphore must have been created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_BINARY." + }, + { + "vuid": "VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA" + }, + { + "vuid": "VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-semaphore-parameter", + "text": " semaphore must be a valid VkSemaphore handle" + }, + { + "vuid": "VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-handleType-parameter", + "text": " handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value" + } + ] + }, "vkDestroySemaphore": { "core": [ { @@ -2522,11 +2976,11 @@ }, { "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01467", - "text": " If handleType is not 0 and handle is NULL, name must name a valid synchronization primitive of the type specified by handleType" + "text": " If handle is NULL, name must name a valid synchronization primitive of the type specified by handleType" }, { "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01468", - "text": " If handleType is not 0 and name is NULL, handle must be a valid handle of the type specified by handleType" + "text": " If name is NULL, handle must be a valid handle of the type specified by handleType" }, { "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01469", @@ -2559,10 +3013,6 @@ { "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-parameter", "text": " flags must be a valid combination of VkSemaphoreImportFlagBits values" - }, - { - "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-parameter", - "text": " If handleType is not 0, handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value" } ], "(VK_KHR_external_semaphore_win32)+(VK_VERSION_1_2,VK_KHR_timeline_semaphore)": [ @@ -2638,7 +3088,71 @@ } ] }, + "vkImportSemaphoreZirconHandleFUCHSIA": { + "(VK_FUCHSIA_external_semaphore)": [ + { + "vuid": "VUID-vkImportSemaphoreZirconHandleFUCHSIA-semaphore-04764", + "text": " semaphore must not be associated with any queue command that has not yet completed execution on that queue." + }, + { + "vuid": "VUID-vkImportSemaphoreZirconHandleFUCHSIA-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkImportSemaphoreZirconHandleFUCHSIA-pImportSemaphoreZirconHandleInfo-parameter", + "text": " pImportSemaphoreZirconHandleInfo must be a valid pointer to a valid VkImportSemaphoreZirconHandleInfoFUCHSIA structure" + } + ] + }, + "VkImportSemaphoreZirconHandleInfoFUCHSIA": { + "(VK_FUCHSIA_external_semaphore)": [ + { + "vuid": "VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-handleType-04765", + "text": " handleType must be a value included in the Handle Types Supported by VkImportSemaphoreZirconHandleInfoFUCHSIA table." + }, + { + "vuid": "VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-zirconHandle-04766", + "text": " zirconHandle must obey any requirements listed for handleType in external semaphore handle types compatibility." + }, + { + "vuid": "VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-zirconHandle-04767", + "text": " zirconHandle must have ZX_RIGHTS_BASIC and ZX_RIGHTS_SIGNAL rights." + }, + { + "vuid": "VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA" + }, + { + "vuid": "VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-semaphore-parameter", + "text": " semaphore must be a valid VkSemaphore handle" + }, + { + "vuid": "VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-flags-parameter", + "text": " flags must be a valid combination of VkSemaphoreImportFlagBits values" + }, + { + "vuid": "VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-handleType-parameter", + "text": " handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value" + } + ], + "(VK_FUCHSIA_external_semaphore)+(VK_VERSION_1_2,VK_KHR_timeline_semaphore)": [ + { + "vuid": "VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-semaphoreType-04768", + "text": " The VkSemaphoreTypeCreateInfo::semaphoreType field must not be VK_SEMAPHORE_TYPE_TIMELINE." + } + ] + }, "vkCreateEvent": { + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-vkCreateEvent-events-04468", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::events is VK_FALSE, then the implementation does not support events, and vkCreateEvent must not be used." + } + ], "core": [ { "vuid": "VUID-vkCreateEvent-device-parameter", @@ -2669,8 +3183,8 @@ "text": " pNext must be NULL" }, { - "vuid": "VUID-VkEventCreateInfo-flags-zerobitmask", - "text": " flags must be 0" + "vuid": "VUID-VkEventCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkEventCreateFlagBits values" } ] }, @@ -2707,6 +3221,12 @@ ] }, "vkGetEventStatus": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkGetEventStatus-event-03940", + "text": " event must not have been created with VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR" + } + ], "core": [ { "vuid": "VUID-vkGetEventStatus-device-parameter", @@ -2723,6 +3243,12 @@ ] }, "vkSetEvent": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkSetEvent-event-03941", + "text": " event must not have been created with VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR" + } + ], "core": [ { "vuid": "VUID-vkSetEvent-device-parameter", @@ -2741,8 +3267,8 @@ "vkResetEvent": { "core": [ { - "vuid": "VUID-vkResetEvent-event-01148", - "text": " event must not be waited on by a vkCmdWaitEvents command that is currently executing" + "vuid": "VUID-vkResetEvent-event-03821", + "text": " There must be an execution dependency between vkCmdResetEvent and the execution of any vkCmdWaitEvents that includes event in its pEvents parameter" }, { "vuid": "VUID-vkResetEvent-device-parameter", @@ -2756,6 +3282,98 @@ "vuid": "VUID-vkResetEvent-event-parent", "text": " event must have been created, allocated, or retrieved from device" } + ], + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkResetEvent-event-03822", + "text": " There must be an execution dependency between vkCmdResetEvent and the execution of any vkCmdWaitEvents2KHR that includes event in its pEvents parameter" + }, + { + "vuid": "VUID-vkResetEvent-event-03823", + "text": " event must not have been created with VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR" + } + ] + }, + "vkCmdSetEvent2KHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkCmdSetEvent2KHR-synchronization2-03824", + "text": " The synchronization2 feature must be enabled" + }, + { + "vuid": "VUID-vkCmdSetEvent2KHR-dependencyFlags-03825", + "text": " The dependencyFlags member of pDependencyInfo must be 0" + }, + { + "vuid": "VUID-vkCmdSetEvent2KHR-srcStageMask-03827", + "text": " The srcStageMask member of any element of the pMemoryBarriers, pBufferMemoryBarriers, or pImageMemoryBarriers members of pDependencyInfo must only include pipeline stages valid for the queue family that was used to create the command pool that commandBuffer was allocated from" + }, + { + "vuid": "VUID-vkCmdSetEvent2KHR-dstStageMask-03828", + "text": " The dstStageMask member of any element of the pMemoryBarriers, pBufferMemoryBarriers, or pImageMemoryBarriers members of pDependencyInfo must only include pipeline stages valid for the queue family that was used to create the command pool that commandBuffer was allocated from" + }, + { + "vuid": "VUID-vkCmdSetEvent2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetEvent2KHR-event-parameter", + "text": " event must be a valid VkEvent handle" + }, + { + "vuid": "VUID-vkCmdSetEvent2KHR-pDependencyInfo-parameter", + "text": " pDependencyInfo must be a valid pointer to a valid VkDependencyInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdSetEvent2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetEvent2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdSetEvent2KHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdSetEvent2KHR-commonparent", + "text": " Both of commandBuffer, and event must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkCmdSetEvent2KHR-commandBuffer-03826", + "text": " The current device mask of commandBuffer must include exactly one physical device." + } + ] + }, + "VkDependencyInfoKHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkDependencyInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR" + }, + { + "vuid": "VUID-VkDependencyInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDependencyInfoKHR-dependencyFlags-parameter", + "text": " dependencyFlags must be a valid combination of VkDependencyFlagBits values" + }, + { + "vuid": "VUID-VkDependencyInfoKHR-pMemoryBarriers-parameter", + "text": " If memoryBarrierCount is not 0, pMemoryBarriers must be a valid pointer to an array of memoryBarrierCount valid VkMemoryBarrier2KHR structures" + }, + { + "vuid": "VUID-VkDependencyInfoKHR-pBufferMemoryBarriers-parameter", + "text": " If bufferMemoryBarrierCount is not 0, pBufferMemoryBarriers must be a valid pointer to an array of bufferMemoryBarrierCount valid VkBufferMemoryBarrier2KHR structures" + }, + { + "vuid": "VUID-VkDependencyInfoKHR-pImageMemoryBarriers-parameter", + "text": " If imageMemoryBarrierCount is not 0, pImageMemoryBarriers must be a valid pointer to an array of imageMemoryBarrierCount valid VkImageMemoryBarrier2KHR structures" + } ] }, "vkCmdSetEvent": { @@ -2843,6 +3461,12 @@ "text": " If the shading rate image feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV" } ], + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkCmdSetEvent-stageMask-03937", + "text": " If the synchronization2 feature is not enabled, pname:stageMask must not be 0" + } + ], "(VK_VERSION_1_1,VK_KHR_device_group)": [ { "vuid": "VUID-vkCmdSetEvent-commandBuffer-01152", @@ -2850,6 +3474,106 @@ } ] }, + "vkCmdResetEvent2KHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-03929", + "text": " If the geometry shaders feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-03930", + "text": " If the tessellation shaders feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR or VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-synchronization2-03829", + "text": " The synchronization2 feature must be enabled" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-03830", + "text": " stageMask must not include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-event-03831", + "text": " There must be an execution dependency between vkCmdResetEvent2KHR and the execution of any vkCmdWaitEvents that includes event in its pEvents parameter" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-event-03832", + "text": " There must be an execution dependency between vkCmdResetEvent2KHR and the execution of any vkCmdWaitEvents2KHR that includes event in its pEvents parameter" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-event-parameter", + "text": " event must be a valid VkEvent handle" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-parameter", + "text": " stageMask must be a valid combination of VkPipelineStageFlagBits2KHR values" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-requiredbitmask", + "text": " stageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-commonparent", + "text": " Both of commandBuffer, and event must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_conditional_rendering)": [ + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-03931", + "text": " If the conditional rendering feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-03932", + "text": " If the fragment density map feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_transform_feedback)": [ + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-03933", + "text": " If the transform feedback feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-03934", + "text": " If the mesh shaders feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV" + }, + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-03935", + "text": " If the task shaders feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdResetEvent2KHR-stageMask-03936", + "text": " If the shading rate image feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkCmdResetEvent2KHR-commandBuffer-03833", + "text": " commandBuffer’s current device mask must include exactly one physical device." + } + ] + }, "vkCmdResetEvent": { "core": [ { @@ -2869,8 +3593,8 @@ "text": " stageMask must not include VK_PIPELINE_STAGE_HOST_BIT" }, { - "vuid": "VUID-vkCmdResetEvent-event-01156", - "text": " When this command executes, event must not be waited on by a vkCmdWaitEvents command that is currently executing" + "vuid": "VUID-vkCmdResetEvent-event-03834", + "text": " There must be an execution dependency between vkCmdResetEvent and the execution of any vkCmdWaitEvents that includes event in its pEvents parameter" }, { "vuid": "VUID-vkCmdResetEvent-commandBuffer-parameter", @@ -2939,6 +3663,16 @@ "text": " If the shading rate image feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV" } ], + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkCmdResetEvent-stageMask-03937", + "text": " If the synchronization2 feature is not enabled, pname:stageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdResetEvent-event-03835", + "text": " There must be an execution dependency between vkCmdResetEvent and the execution of any vkCmdWaitEvents2KHR that includes event in its pEvents parameter" + } + ], "(VK_VERSION_1_1,VK_KHR_device_group)": [ { "vuid": "VUID-vkCmdResetEvent-commandBuffer-01157", @@ -2946,6 +3680,82 @@ } ] }, + "vkCmdWaitEvents2KHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkCmdWaitEvents2KHR-synchronization2-03836", + "text": " The synchronization2 feature must be enabled" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03837", + "text": " Members of pEvents must not have been signaled by vkCmdSetEvent" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03838", + "text": " For any element i of pEvents, if that event is signaled by vkCmdSetEvent2KHR, that command’s dependencyInfo parameter must be exactly equal to the ith element of pDependencyInfos" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03839", + "text": " For any element i of pEvents, if that event is signaled by vkSetEvent, barriers in the ith element of pDependencyInfos must include only host operations in their first synchronization scope" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03840", + "text": " For any element i of pEvents, if barriers in the ith element of pDependencyInfos include only host operations, the ith element of pEvents must be signaled before vkCmdWaitEvents2KHR is executed" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03841", + "text": " For any element i of pEvents, if barriers in the ith element of pDependencyInfos do not include host operations, the ith element of pEvents must be signaled by a corresponding vkCmdSetEvent2KHR that occurred earlier in submission order" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-srcStageMask-03842", + "text": " The srcStageMask member of any element of the pMemoryBarriers, pBufferMemoryBarriers, or pImageMemoryBarriers members of pDependencyInfos must either include only pipeline stages valid for the queue family that was used to create the command pool that commandBuffer was allocated from, or include only VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-dstStageMask-03843", + "text": " The dstStageMask member of any element of the pMemoryBarriers, pBufferMemoryBarriers, or pImageMemoryBarriers members of pDependencyInfos must only include pipeline stages valid for the queue family that was used to create the command pool that commandBuffer was allocated from" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-dependencyFlags-03844", + "text": " The dependencyFlags member of any element of pDependencyInfo must be 0" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-03845", + "text": " If pEvents includes one or more events that will be signaled by vkSetEvent after commandBuffer has been submitted to a queue, then vkCmdWaitEvents2KHR must not be called inside a render pass instance" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-commandBuffer-03846", + "text": " commandBuffer’s current device mask must include exactly one physical device" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-pEvents-parameter", + "text": " pEvents must be a valid pointer to an array of eventCount valid VkEvent handles" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-pDependencyInfos-parameter", + "text": " pDependencyInfos must be a valid pointer to an array of eventCount valid VkDependencyInfoKHR structures" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-eventCount-arraylength", + "text": " eventCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdWaitEvents2KHR-commonparent", + "text": " Both of commandBuffer, and the elements of pEvents must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, "vkCmdWaitEvents": { "core": [ { @@ -2998,7 +3808,7 @@ }, { "vuid": "VUID-vkCmdWaitEvents-srcStageMask-01158", - "text": " srcStageMask must be the bitwise OR of the stageMask parameter used in previous calls to vkCmdSetEvent with any of the members of pEvents and VK_PIPELINE_STAGE_HOST_BIT if any of the members of pEvents was set using vkSetEvent" + "text": " srcStageMask must be the bitwise OR of the stageMask parameter used in previous calls to vkCmdSetEvent with any of the elements of pEvents and VK_PIPELINE_STAGE_HOST_BIT if any of the elements of pEvents was set using vkSetEvent" }, { "vuid": "VUID-vkCmdWaitEvents-pEvents-01163", @@ -3020,18 +3830,10 @@ "vuid": "VUID-vkCmdWaitEvents-srcStageMask-parameter", "text": " srcStageMask must be a valid combination of VkPipelineStageFlagBits values" }, - { - "vuid": "VUID-vkCmdWaitEvents-srcStageMask-requiredbitmask", - "text": " srcStageMask must not be 0" - }, { "vuid": "VUID-vkCmdWaitEvents-dstStageMask-parameter", "text": " dstStageMask must be a valid combination of VkPipelineStageFlagBits values" }, - { - "vuid": "VUID-vkCmdWaitEvents-dstStageMask-requiredbitmask", - "text": " dstStageMask must not be 0" - }, { "vuid": "VUID-vkCmdWaitEvents-pMemoryBarriers-parameter", "text": " If memoryBarrierCount is not 0, pMemoryBarriers must be a valid pointer to an array of memoryBarrierCount valid VkMemoryBarrier structures" @@ -3119,6 +3921,20 @@ "text": " If the shading rate image feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV" } ], + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkCmdWaitEvents-srcStageMask-03937", + "text": " If the synchronization2 feature is not enabled, pname:srcStageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdWaitEvents-dstStageMask-03937", + "text": " If the synchronization2 feature is not enabled, pname:dstStageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdWaitEvents-pEvents-03847", + "text": " Elements of pEvents must not have been signaled by vkCmdSetEvent2KHR" + } + ], "(VK_VERSION_1_1,VK_KHR_device_group)": [ { "vuid": "VUID-vkCmdWaitEvents-commandBuffer-01167", @@ -3126,6 +3942,64 @@ } ] }, + "vkCmdPipelineBarrier2KHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-pDependencies-02285", + "text": " If fname:vkCmdPipelineBarrier2KHR is called within a render pass instance, the render pass must have been created with at least one VkSubpassDependency instance in VkRenderPassCreateInfo::pDependencies that expresses a dependency from the current subpass to itself, with synchronization scopes and access scopes that are all supersets of the scopes defined in this command" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-bufferMemoryBarrierCount-01178", + "text": " If fname:vkCmdPipelineBarrier2KHR is called within a render pass instance, it must not include any buffer memory barriers" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-image-04073", + "text": " If fname:vkCmdPipelineBarrier2KHR is called within a render pass instance, the image member of any image memory barrier included in this command must be an attachment used in the current subpass both as an input attachment, and as either a color or depth/stencil attachment" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-oldLayout-01181", + "text": " If fname:vkCmdPipelineBarrier2KHR is called within a render pass instance, the oldLayout and newLayout members of any image memory barrier included in this command must be equal" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-srcQueueFamilyIndex-01182", + "text": " If fname:vkCmdPipelineBarrier2KHR is called within a render pass instance, the srcQueueFamilyIndex and dstQueueFamilyIndex members of any image memory barrier included in this command must be equal" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-synchronization2-03848", + "text": " The synchronization2 feature must be enabled" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-srcStageMask-03849", + "text": " The srcStageMask member of any element of the pMemoryBarriers, pBufferMemoryBarriers, or pImageMemoryBarriers members of pDependencyInfo must only include pipeline stages valid for the queue family that was used to create the command pool that commandBuffer was allocated from" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-dstStageMask-03850", + "text": " The dstStageMask member of any element of the pMemoryBarriers, pBufferMemoryBarriers, or pImageMemoryBarriers members of pDependencyInfo must only include pipeline stages valid for the queue family that was used to create the command pool that commandBuffer was allocated from" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-pDependencyInfo-parameter", + "text": " pDependencyInfo must be a valid pointer to a valid VkDependencyInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdPipelineBarrier2KHR-dependencyFlags-01186", + "text": " If fname:vkCmdPipelineBarrier2KHR is called outside of a render pass instance, VK_DEPENDENCY_VIEW_LOCAL_BIT must not be included in the dependency flags" + } + ] + }, "vkCmdPipelineBarrier": { "core": [ { @@ -3299,6 +4173,16 @@ "text": " If the shading rate image feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV" } ], + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-03937", + "text": " If the synchronization2 feature is not enabled, pname:srcStageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-03937", + "text": " If the synchronization2 feature is not enabled, pname:dstStageMask must not be 0" + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdPipelineBarrier-dependencyFlags-01186", @@ -3306,6 +4190,378 @@ } ] }, + "VkMemoryBarrier2KHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03929", + "text": " If the geometry shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03930", + "text": " If the tessellation shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR or VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03900", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03901", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INDEX_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03902", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03903", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03904", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_UNIFORM_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03905", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03906", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03907", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03908", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03909", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03910", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03911", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03912", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03913", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03914", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFER_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03915", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03916", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_HOST_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03917", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_HOST_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03929", + "text": " If the geometry shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03930", + "text": " If the tessellation shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR or VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03900", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03901", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INDEX_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03902", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03903", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03904", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_UNIFORM_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03905", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03906", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03907", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03908", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03909", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03910", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03911", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03912", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03913", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03914", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFER_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03915", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03916", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_HOST_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03917", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_HOST_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-parameter", + "text": " srcStageMask must be a valid combination of VkPipelineStageFlagBits2KHR values" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-parameter", + "text": " srcAccessMask must be a valid combination of VkAccessFlagBits2KHR values" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-parameter", + "text": " dstStageMask must be a valid combination of VkPipelineStageFlagBits2KHR values" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-parameter", + "text": " dstAccessMask must be a valid combination of VkAccessFlagBits2KHR values" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_conditional_rendering)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03931", + "text": " If the conditional rendering feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03918", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03931", + "text": " If the conditional rendering feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03918", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03932", + "text": " If the fragment density map feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03919", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03932", + "text": " If the fragment density map feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03919", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_transform_feedback)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03933", + "text": " If the transform feedback feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03920", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-04747", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03922", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03933", + "text": " If the transform feedback feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03920", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-04747", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03922", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03934", + "text": " If the mesh shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03935", + "text": " If the task shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03934", + "text": " If the mesh shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03935", + "text": " If the task shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcStageMask-03936", + "text": " If the shading rate image feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03923", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, pname:srcStageMask must include VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstStageMask-03936", + "text": " If the shading rate image feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03923", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, pname:dstStageMask must include VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03924", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03925", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03924", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03925", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_blend_operation_advanced)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03926", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03926", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_acceleration_structure,VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03927", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-03928", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03927", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-03928", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_video_decode_queue)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-04858", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-04859", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-04858", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-04859", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-04860", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-srcAccessMask-04861", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-04860", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + }, + { + "vuid": "VUID-VkMemoryBarrier2KHR-dstAccessMask-04861", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + } + ] + }, "VkMemoryBarrier": { "core": [ { @@ -3326,6 +4582,426 @@ } ] }, + "VkBufferMemoryBarrier2KHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03929", + "text": " If the geometry shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03930", + "text": " If the tessellation shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR or VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03900", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03901", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INDEX_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03902", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03903", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03904", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_UNIFORM_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03905", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03906", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03907", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03908", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03909", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03910", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03911", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03912", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03913", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03914", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFER_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03915", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03916", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_HOST_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03917", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_HOST_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03929", + "text": " If the geometry shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03930", + "text": " If the tessellation shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR or VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03900", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03901", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INDEX_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03902", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03903", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03904", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_UNIFORM_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03905", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03906", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03907", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03908", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03909", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03910", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03911", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03912", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03913", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03914", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFER_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03915", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03916", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_HOST_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03917", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_HOST_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-offset-01187", + "text": " offset must be less than the size of buffer" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-size-01188", + "text": " If size is not equal to VK_WHOLE_SIZE, size must be greater than 0" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-size-01189", + "text": " If size is not equal to VK_WHOLE_SIZE, size must be less than or equal to than the size of buffer minus offset" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-buffer-01931", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03851", + "text": " If either srcStageMask or dstStageMask includes VK_PIPELINE_STAGE_2_HOST_BIT_KHR, srcQueueFamilyIndex and dstQueueFamilyIndex must be equal" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-parameter", + "text": " srcStageMask must be a valid combination of VkPipelineStageFlagBits2KHR values" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-parameter", + "text": " srcAccessMask must be a valid combination of VkAccessFlagBits2KHR values" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-parameter", + "text": " dstStageMask must be a valid combination of VkPipelineStageFlagBits2KHR values" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-parameter", + "text": " dstAccessMask must be a valid combination of VkAccessFlagBits2KHR values" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_conditional_rendering)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03931", + "text": " If the conditional rendering feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03918", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03931", + "text": " If the conditional rendering feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03918", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03932", + "text": " If the fragment density map feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03919", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03932", + "text": " If the fragment density map feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03919", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_transform_feedback)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03933", + "text": " If the transform feedback feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03920", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-04747", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03922", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03933", + "text": " If the transform feedback feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03920", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-04747", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03922", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03934", + "text": " If the mesh shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03935", + "text": " If the task shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03934", + "text": " If the mesh shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03935", + "text": " If the task shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcStageMask-03936", + "text": " If the shading rate image feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03923", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, pname:srcStageMask must include VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstStageMask-03936", + "text": " If the shading rate image feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03923", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, pname:dstStageMask must include VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03924", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03925", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03924", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03925", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_blend_operation_advanced)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03926", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03926", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_acceleration_structure,VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03927", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-03928", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03927", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-03928", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_video_decode_queue)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-04858", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-04859", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-04858", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-04859", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-04860", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcAccessMask-04861", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-04860", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-dstAccessMask-04861", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+!(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-buffer-04086", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must be valid queue families" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-srcQueueFamilyIndex-04087", + "text": " If srcQueueFamilyIndex is not equal to dstQueueFamilyIndex, at least one must not be a special queue family reserved for external memory ownership transfers, as described in Queue Family Ownership Transfer" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-buffer-04088", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is one of the special queue family values reserved for external memory transfers, the other must be VK_QUEUE_FAMILY_IGNORED" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2KHR-buffer-04089", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in Queue Family Ownership Transfer" + } + ] + }, "VkBufferMemoryBarrier": { "core": [ { @@ -3363,8 +5039,8 @@ "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must be valid queue families" }, { - "vuid": "VUID-VkBufferMemoryBarrier-buffer-01190", - "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex must both be VK_QUEUE_FAMILY_IGNORED" + "vuid": "VUID-VkBufferMemoryBarrier-synchronization2-03852", + "text": " If the synchronization2 feature is not enabled, and buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex must both be VK_QUEUE_FAMILY_IGNORED" } ], "(VK_VERSION_1_1,VK_KHR_external_memory)": [ @@ -3374,15 +5050,571 @@ }, { "vuid": "VUID-VkBufferMemoryBarrier-buffer-04088", - "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is a special queue family values reserved for external memory transfers, the other must be VK_QUEUE_FAMILY_IGNORED" + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is one of the special queue family values reserved for external memory transfers, the other must be VK_QUEUE_FAMILY_IGNORED" }, { "vuid": "VUID-VkBufferMemoryBarrier-buffer-04089", "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in Queue Family Ownership Transfer" }, { - "vuid": "VUID-VkBufferMemoryBarrier-buffer-01191", - "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, at least one of srcQueueFamilyIndex and dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED" + "vuid": "VUID-VkBufferMemoryBarrier-synchronization2-03853", + "text": " If the synchronization2 feature is not enabled, and buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, at least one of srcQueueFamilyIndex and dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED" + } + ] + }, + "VkImageMemoryBarrier2KHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03929", + "text": " If the geometry shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03930", + "text": " If the tessellation shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR or VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03900", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03901", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INDEX_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03902", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03903", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03904", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_UNIFORM_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03905", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03906", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03907", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03908", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03909", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADER_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03910", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03911", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03912", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03913", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03914", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFER_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03915", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03916", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_HOST_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03917", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_HOST_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03929", + "text": " If the geometry shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03930", + "text": " If the tessellation shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR or VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03900", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03901", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INDEX_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03902", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03903", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03904", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_UNIFORM_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03905", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03906", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03907", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03908", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03909", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADER_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR, or one of the VK_PIPELINE_STAGE_*_SHADER_BIT stages" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03910", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03911", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03912", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03913", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03914", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFER_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03915", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COPY_BIT_KHR, VK_PIPELINE_STAGE_2_BLIT_BIT_KHR, VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR, VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR, VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR, VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03916", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_HOST_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03917", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_HOST_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_HOST_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-subresourceRange-01486", + "text": " subresourceRange.baseMipLevel must be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-subresourceRange-01724", + "text": " If subresourceRange.levelCount is not VK_REMAINING_MIP_LEVELS, subresourceRange.baseMipLevel + subresourceRange.levelCount must be less than or equal to the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-subresourceRange-01488", + "text": " subresourceRange.baseArrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-subresourceRange-01725", + "text": " If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-01932", + "text": " If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01208", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01209", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01210", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01211", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01212", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL then image must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01213", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL then image must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01197", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, oldLayout must be VK_IMAGE_LAYOUT_UNDEFINED or the current layout of the image subresources affected by the barrier" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-newLayout-01198", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, newLayout must not be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03854", + "text": " If either srcStageMask or dstStageMask includes VK_PIPELINE_STAGE_2_HOST_BIT_KHR, srcQueueFamilyIndex and dstQueueFamilyIndex must be equal" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03855", + "text": " If srcStageMask includes VK_PIPELINE_STAGE_2_HOST_BIT_KHR, and srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, oldLayout must be one of VK_IMAGE_LAYOUT_PREINITIALIZED, VK_IMAGE_LAYOUT_UNDEFINED, or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkSampleLocationsInfoEXT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-parameter", + "text": " srcStageMask must be a valid combination of VkPipelineStageFlagBits2KHR values" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-parameter", + "text": " srcAccessMask must be a valid combination of VkAccessFlagBits2KHR values" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-parameter", + "text": " dstStageMask must be a valid combination of VkPipelineStageFlagBits2KHR values" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-parameter", + "text": " dstAccessMask must be a valid combination of VkAccessFlagBits2KHR values" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-parameter", + "text": " oldLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-newLayout-parameter", + "text": " newLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-subresourceRange-parameter", + "text": " subresourceRange must be a valid VkImageSubresourceRange structure" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_conditional_rendering)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03931", + "text": " If the conditional rendering feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03918", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03931", + "text": " If the conditional rendering feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03918", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03932", + "text": " If the fragment density map feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03919", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03932", + "text": " If the fragment density map feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03919", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_transform_feedback)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03933", + "text": " If the transform feedback feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03920", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-04747", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03922", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03933", + "text": " If the transform feedback feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03920", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-04747", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03922", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03934", + "text": " If the mesh shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03935", + "text": " If the task shaders feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03934", + "text": " If the mesh shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03935", + "text": " If the task shaders feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcStageMask-03936", + "text": " If the shading rate image feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03923", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, pname:srcStageMask must include VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstStageMask-03936", + "text": " If the shading rate image feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03923", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, pname:dstStageMask must include VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03924", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03925", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03924", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03925", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_blend_operation_advanced)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03926", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03926", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_acceleration_structure,VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03927", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-03928", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03927", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-03928", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_video_decode_queue)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-04858", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-04859", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-04858", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-04859", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-04860", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcAccessMask-04861", + "text": " If pname:srcAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, pname:srcStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-04860", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-dstAccessMask-04861", + "text": " If pname:dstAccessMask includes VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, pname:dstStageMask must include VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01658", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-01659", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_2,VK_EXT_separate_depth_stencil_layouts)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-04065", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL then image must have been created with at least one of VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_SAMPLED_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-04066", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-04067", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL then image must have been created with at least one of VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_SAMPLED_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-04068", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-03938", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, image must have been created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT or VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-03939", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, image must have been created with at least one of VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_SAMPLED_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" + } + ], + "(VK_KHR_synchronization2)+(VK_KHR_fragment_shading_rate,VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-oldLayout-02088", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR then image must have been created with VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR set" + } + ], + "(VK_KHR_synchronization2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-02902", + "text": " If image has a color format, then the aspectMask member of subresourceRange must be VK_IMAGE_ASPECT_COLOR_BIT" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-01671", + "text": " If image has a single-plane color format or is not disjoint, then the aspectMask member of subresourceRange must be VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-01672", + "text": " If image has a multi-planar format and the image is disjoint, then the aspectMask member of subresourceRange must include either at least one of VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, and VK_IMAGE_ASPECT_PLANE_2_BIT; or must include VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-01673", + "text": " If image has a multi-planar format with only two planes, then the aspectMask member of subresourceRange must not include VK_IMAGE_ASPECT_PLANE_2_BIT" + } + ], + "(VK_KHR_synchronization2)+!(VK_VERSION_1_2,VK_KHR_separate_depth_stencil_layouts)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-01207", + "text": " If image has a depth/stencil format with both depth and stencil components, then the aspectMask member of subresourceRange must include both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_2,VK_KHR_separate_depth_stencil_layouts)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-03319", + "text": " If image has a depth/stencil format with both depth and stencil and the separateDepthStencilLayouts feature is enabled, then the aspectMask member of subresourceRange must include either or both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-03320", + "text": " If image has a depth/stencil format with both depth and stencil and the separateDepthStencilLayouts feature is not enabled, then the aspectMask member of subresourceRange must include both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT" + } + ], + "(VK_KHR_synchronization2)+!(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-04069", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must be valid queue families" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-srcQueueFamilyIndex-04070", + "text": " If srcQueueFamilyIndex is not equal to dstQueueFamilyIndex, at least one must not be a special queue family reserved for external memory ownership transfers, as described in Queue Family Ownership Transfer" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-04071", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is one of the special queue family values reserved for external memory transfers, the other must be VK_QUEUE_FAMILY_IGNORED" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2KHR-image-04072", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in Queue Family Ownership Transfer" } ] }, @@ -3410,35 +5642,35 @@ }, { "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01208", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01209", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01210", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01211", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01212", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL then image must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL then image must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01213", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL then image must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL then image must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01197", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, oldLayout must be VK_IMAGE_LAYOUT_UNDEFINED or the current layout of the image subresources affected by the barrier" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, oldLayout must be VK_IMAGE_LAYOUT_UNDEFINED or the current layout of the image subresources affected by the barrier" }, { "vuid": "VUID-VkImageMemoryBarrier-newLayout-01198", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, newLayout must not be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, newLayout must not be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED" }, { "vuid": "VUID-VkImageMemoryBarrier-sType-sType", @@ -3472,35 +5704,45 @@ "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ { "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01658", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01659", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" } ], "(VK_VERSION_1_2,VK_EXT_separate_depth_stencil_layouts)": [ { "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-04065", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR then image must have been created with at least one of VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_SAMPLED_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL then image must have been created with at least one of VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_SAMPLED_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-04066", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" }, { "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-04067", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR then image must have been created with at least one of VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_SAMPLED_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL then image must have been created with at least one of VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_SAMPLED_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-04068", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" } ], - "(VK_NV_shading_rate_image)": [ + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-03938", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, image must have been created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT or VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-03939", + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, image must have been created with at least one of VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_SAMPLED_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" + } + ], + "(VK_KHR_fragment_shading_rate,VK_NV_shading_rate_image)": [ { "vuid": "VUID-VkImageMemoryBarrier-oldLayout-02088", - "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define a image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV then image must have been created with VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV set" + "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR then image must have been created with VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR set" } ], "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ @@ -3545,8 +5787,8 @@ "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must be valid queue families" }, { - "vuid": "VUID-VkImageMemoryBarrier-image-01199", - "text": " If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex must both be VK_QUEUE_FAMILY_IGNORED" + "vuid": "VUID-VkImageMemoryBarrier-synchronization2-03856", + "text": " If the synchronization2 feature is not enabled, and image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex must both be VK_QUEUE_FAMILY_IGNORED" } ], "(VK_VERSION_1_1,VK_KHR_external_memory)": [ @@ -3556,15 +5798,15 @@ }, { "vuid": "VUID-VkImageMemoryBarrier-image-04071", - "text": " If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is a special queue family values reserved for external memory transfers, the other must be VK_QUEUE_FAMILY_IGNORED" + "text": " If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is one of the special queue family values reserved for external memory transfers, the other must be VK_QUEUE_FAMILY_IGNORED" }, { "vuid": "VUID-VkImageMemoryBarrier-image-04072", "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in Queue Family Ownership Transfer" }, { - "vuid": "VUID-VkImageMemoryBarrier-image-01381", - "text": " If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, at least one of srcQueueFamilyIndex and dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED" + "vuid": "VUID-VkImageMemoryBarrier-synchronization2-03857", + "text": " If the synchronization2 feature is not enabled, and image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, at least one of srcQueueFamilyIndex and dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED" } ] }, @@ -3726,7 +5968,7 @@ }, { "vuid": "VUID-VkRenderPassCreateInfo-pNext-01927", - "text": " If the pNext chain includes a VkRenderPassInputAttachmentAspectCreateInfo structure, the inputAttachmentIndex member of each element of its pAspectReferences member must be less than the value of inputAttachmentCount in the member of pSubpasses identified by its subpass member" + "text": " If the pNext chain includes a VkRenderPassInputAttachmentAspectCreateInfo structure, the inputAttachmentIndex member of each element of its pAspectReferences member must be less than the value of inputAttachmentCount in the element of pSubpasses identified by its subpass member" }, { "vuid": "VUID-VkRenderPassCreateInfo-pNext-01963", @@ -3761,10 +6003,6 @@ { "vuid": "VUID-VkRenderPassCreateInfo-pNext-02515", "text": " If the pNext chain includes a VkRenderPassMultiviewCreateInfo structure, and each element of its pViewMasks member is 0, correlatedViewMaskCount must be 0" - }, - { - "vuid": "VUID-VkRenderPassCreateInfo-pNext-02516", - "text": " If the pNext chain includes a VkRenderPassMultiviewCreateInfo structure, each element of its pViewMask member must not have a bit set at an index greater than or equal to VkPhysicalDeviceLimits::maxFramebufferLayers" } ] }, @@ -4012,15 +6250,15 @@ }, { "vuid": "VUID-VkSubpassDescription-pDepthStencilAttachment-02650", - "text": " If pDepthStencilAttachment is not NULL and the attachment is not VK_ATTACHMENT_UNUSED then it must have a image format whose potential format features contain VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT" + "text": " If pDepthStencilAttachment is not NULL and the attachment is not VK_ATTACHMENT_UNUSED then it must have an image format whose potential format features contain VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT" }, { "vuid": "VUID-VkSubpassDescription-pDepthStencilAttachment-01418", - "text": " If neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if pDepthStencilAttachment is not VK_ATTACHMENT_UNUSED and any attachments in pColorAttachments are not VK_ATTACHMENT_UNUSED, they must have the same sample count" + "text": " If neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if pDepthStencilAttachment is not VK_ATTACHMENT_UNUSED and any attachments in pColorAttachments are not VK_ATTACHMENT_UNUSED, they must have the same sample count" }, { "vuid": "VUID-VkSubpassDescription-attachment-00853", - "text": " The attachment member of each element of pPreserveAttachments must not be VK_ATTACHMENT_UNUSED" + "text": " Each element of pPreserveAttachments must not be VK_ATTACHMENT_UNUSED" }, { "vuid": "VUID-VkSubpassDescription-pPreserveAttachments-00854", @@ -4030,6 +6268,14 @@ "vuid": "VUID-VkSubpassDescription-layout-02519", "text": " If any attachment is used by more than one VkAttachmentReference member, then each use must use the same layout" }, + { + "vuid": "VUID-VkSubpassDescription-None-04437", + "text": " Each attachment must follow the image layout requirements specified for its attachment type" + }, + { + "vuid": "VUID-VkSubpassDescription-pDepthStencilAttachment-04438", + "text": " pDepthStencilAttachment and pColorAttachments must not contain references to the same attachment" + }, { "vuid": "VUID-VkSubpassDescription-flags-parameter", "text": " flags must be a valid combination of VkSubpassDescriptionFlagBits values" @@ -4062,7 +6308,7 @@ "(VK_AMD_mixed_attachment_samples)": [ { "vuid": "VUID-VkSubpassDescription-pColorAttachments-01506", - "text": " If the VK_AMD_mixed_attachment_samples extension is enabled, and all attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have a sample count that is smaller than or equal to the sample count of pDepthStencilAttachment if it is not VK_ATTACHMENT_UNUSED" + "text": " If the VK_AMD_mixed_attachment_samples extension is enabled, and all attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have a sample count that is smaller than or equal to the sample count of pDepthStencilAttachment if it is not VK_ATTACHMENT_UNUSED" } ], "(VK_NVX_multiview_per_view_attributes)": [ @@ -4083,14 +6329,6 @@ { "vuid": "VUID-VkSubpassDescription-flags-03343", "text": " If flags includes VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM, then the subpass must be the last subpass in a subpass dependency chain" - }, - { - "vuid": "VUID-VkSubpassDescription-flags-03344", - "text": " If flags includes VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, then the sample count of the input attachments must equal rasterizationSamples" - }, - { - "vuid": "VUID-VkSubpassDescription-flags-03345", - "text": " If flags includes VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, and if sampleShadingEnable is enabled (explicitly or implicitly) then minSampleShading must equal 0.0" } ], "(VK_QCOM_render_pass_transform)": [ @@ -4104,7 +6342,7 @@ "core": [ { "vuid": "VUID-VkAttachmentReference-layout-00857", - "text": " If attachment is not VK_ATTACHMENT_UNUSED, layout must not be VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR" + "text": " If attachment is not VK_ATTACHMENT_UNUSED, layout must not be VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL" }, { "vuid": "VUID-VkAttachmentReference-layout-parameter", @@ -4158,18 +6396,10 @@ "vuid": "VUID-VkSubpassDependency-srcStageMask-parameter", "text": " srcStageMask must be a valid combination of VkPipelineStageFlagBits values" }, - { - "vuid": "VUID-VkSubpassDependency-srcStageMask-requiredbitmask", - "text": " srcStageMask must not be 0" - }, { "vuid": "VUID-VkSubpassDependency-dstStageMask-parameter", "text": " dstStageMask must be a valid combination of VkPipelineStageFlagBits values" }, - { - "vuid": "VUID-VkSubpassDependency-dstStageMask-requiredbitmask", - "text": " dstStageMask must not be 0" - }, { "vuid": "VUID-VkSubpassDependency-srcAccessMask-parameter", "text": " srcAccessMask must be a valid combination of VkAccessFlagBits values" @@ -4286,10 +6516,6 @@ "vuid": "VUID-VkRenderPassCreateInfo2-pDependencies-03060", "text": " For any element of pDependencies where its srcSubpass member equals its dstSubpass member, if the viewMask member of the corresponding element of pSubpasses includes more than one bit, its dependencyFlags member must include VK_DEPENDENCY_VIEW_LOCAL_BIT" }, - { - "vuid": "VUID-VkRenderPassCreateInfo2-viewMask-02524", - "text": " The viewMask member must not have a bit set at an index greater than or equal to VkPhysicalDeviceLimits::maxFramebufferLayers" - }, { "vuid": "VUID-VkRenderPassCreateInfo2-attachment-02525", "text": " If the attachment member of any element of the pInputAttachments member of any element of pSubpasses is not VK_ATTACHMENT_UNUSED, the aspectMask member of that element of pInputAttachments must only include aspects that are present in images of the format specified by the element of pAttachments specified by attachment" @@ -4338,6 +6564,44 @@ "vuid": "VUID-VkRenderPassCreateInfo2-subpassCount-arraylength", "text": " subpassCount must be greater than 0" } + ], + "(VK_VERSION_1_2,VK_KHR_create_renderpass2)+(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-VkRenderPassCreateInfo2-pAttachments-04585", + "text": " If any element of pAttachments is used as a fragment shading rate attachment in any subpass, it must not be used as any other attachment in the render pass" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo2-pAttachments-04586", + "text": " If any element of pAttachments is used as a fragment shading rate attachment in any subpass, it must have an image format whose potential format features contain VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR" + } + ], + "(VK_VERSION_1_2,VK_KHR_create_renderpass2)+(VK_KHR_fragment_shading_rate)+(VK_QCOM_render_pass_transform)": [ + { + "vuid": "VUID-VkRenderPassCreateInfo2-flags-04521", + "text": " If flags includes VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM, an element of pSubpasses includes an instance of VkFragmentShadingRateAttachmentInfoKHR in its pNext chain, and the pFragmentShadingRateAttachment member of that structure is not equal to NULL, the attachment member of pFragmentShadingRateAttachment must be VK_ATTACHMENT_UNUSED" + } + ], + "(VK_VERSION_1_2,VK_KHR_create_renderpass2)+(VK_QCOM_render_pass_shader_resolve)": [ + { + "vuid": "VUID-VkRenderPassCreateInfo2-rasterizationSamples-04905", + "text": " If the pipeline is being created with fragment shader state, and the VK_QCOM_render_pass_shader_resolve extension is enabled, and if subpass has any input attachments, and if the subpass description contains VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, then the sample count of the input attachments must equal rasterizationSamples" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo2-sampleShadingEnable-04906", + "text": " If the pipeline is being created with fragment shader state, and the VK_QCOM_render_pass_shader_resolve extension is enabled, and if the subpass description contains VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, then sampleShadingEnable must be false" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo2-flags-04907", + "text": " If flags includes VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM, and if pResolveAttachments is not NULL, then each resolve attachment must be VK_ATTACHMENT_UNUSED" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo2-flags-04908", + "text": " If flags includes VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM, and if pDepthStencilResolveAttachmentKHR is not NULL, then the depth/stencil resolve attachment must be VK_ATTACHMENT_UNUSED" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo2-flags-04909", + "text": " If flags includes VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM, then the subpass must be the last subpass in a subpass dependency chain" + } ] }, "VkAttachmentDescription2": { @@ -4366,6 +6630,14 @@ "vuid": "VUID-VkAttachmentDescription2-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2" }, + { + "vuid": "VUID-VkAttachmentDescription2-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkAttachmentDescriptionStencilLayout" + }, + { + "vuid": "VUID-VkAttachmentDescription2-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, { "vuid": "VUID-VkAttachmentDescription2-flags-parameter", "text": " flags must be a valid combination of VkAttachmentDescriptionFlagBits values" @@ -4522,15 +6794,15 @@ }, { "vuid": "VUID-VkSubpassDescription2-pDepthStencilAttachment-02900", - "text": " If pDepthStencilAttachment is not NULL and the attachment is not VK_ATTACHMENT_UNUSED then it must have a image format whose potential format features contain VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT" + "text": " If pDepthStencilAttachment is not NULL and the attachment is not VK_ATTACHMENT_UNUSED then it must have an image format whose potential format features contain VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT" }, { "vuid": "VUID-VkSubpassDescription2-pDepthStencilAttachment-03071", - "text": " If neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if pDepthStencilAttachment is not VK_ATTACHMENT_UNUSED and any attachments in pColorAttachments are not VK_ATTACHMENT_UNUSED, they must have the same sample count" + "text": " If neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if pDepthStencilAttachment is not VK_ATTACHMENT_UNUSED and any attachments in pColorAttachments are not VK_ATTACHMENT_UNUSED, they must have the same sample count" }, { "vuid": "VUID-VkSubpassDescription2-attachment-03073", - "text": " The attachment member of any element of pPreserveAttachments must not be VK_ATTACHMENT_UNUSED" + "text": " Each element of pPreserveAttachments must not be VK_ATTACHMENT_UNUSED" }, { "vuid": "VUID-VkSubpassDescription2-pPreserveAttachments-03074", @@ -4538,7 +6810,11 @@ }, { "vuid": "VUID-VkSubpassDescription2-layout-02528", - "text": " If any attachment is used by more than one VkAttachmentReference member, then each use must use the same layout" + "text": " If any attachment is used by more than one VkAttachmentReference2 member, then each use must use the same layout" + }, + { + "vuid": "VUID-VkSubpassDescription2-None-04439", + "text": " Attachments must follow the image layout requirements based on the type of attachment it is being used as" }, { "vuid": "VUID-VkSubpassDescription2-attachment-02799", @@ -4552,10 +6828,22 @@ "vuid": "VUID-VkSubpassDescription2-attachment-02801", "text": " If the attachment member of any element of pInputAttachments is not VK_ATTACHMENT_UNUSED, then the aspectMask member must not include VK_IMAGE_ASPECT_METADATA_BIT" }, + { + "vuid": "VUID-VkSubpassDescription2-pDepthStencilAttachment-04440", + "text": " An attachment must not be used in both pDepthStencilAttachment and pColorAttachments" + }, { "vuid": "VUID-VkSubpassDescription2-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2" }, + { + "vuid": "VUID-VkSubpassDescription2-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkFragmentShadingRateAttachmentInfoKHR or VkSubpassDescriptionDepthStencilResolve" + }, + { + "vuid": "VUID-VkSubpassDescription2-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, { "vuid": "VUID-VkSubpassDescription2-flags-parameter", "text": " flags must be a valid combination of VkSubpassDescriptionFlagBits values" @@ -4588,7 +6876,7 @@ "(VK_VERSION_1_2,VK_KHR_create_renderpass2)+(VK_AMD_mixed_attachment_samples)": [ { "vuid": "VUID-VkSubpassDescription2-pColorAttachments-03070", - "text": " If the VK_AMD_mixed_attachment_samples extension is enabled, all attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have a sample count that is smaller than or equal to the sample count of pDepthStencilAttachment if it is not VK_ATTACHMENT_UNUSED" + "text": " If the VK_AMD_mixed_attachment_samples extension is enabled, all attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have a sample count that is smaller than or equal to the sample count of pDepthStencilAttachment if it is not VK_ATTACHMENT_UNUSED" } ], "(VK_VERSION_1_2,VK_KHR_create_renderpass2)+(VK_NVX_multiview_per_view_attributes)": [ @@ -4596,6 +6884,12 @@ "vuid": "VUID-VkSubpassDescription2-flags-03076", "text": " If flags includes VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, it must also include VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX" } + ], + "(VK_VERSION_1_2,VK_KHR_create_renderpass2)+(VK_EXT_image_drm_format_modifier)": [ + { + "vuid": "VUID-VkSubpassDescription2-attachment-04563", + "text": " If the attachment member of any element of pInputAttachments is not VK_ATTACHMENT_UNUSED, then the aspectMask member must not include VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT for any index i" + } ] }, "VkSubpassDescriptionDepthStencilResolve": { @@ -4644,42 +6938,90 @@ "vuid": "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-03186", "text": " If the VkFormat of pDepthStencilResolveAttachment has both depth and stencil components, VkPhysicalDeviceDepthStencilResolveProperties::independentResolve is VK_FALSE and VkPhysicalDeviceDepthStencilResolveProperties::independentResolveNone is VK_TRUE, then the values of depthResolveMode and stencilResolveMode must be identical or one of them must be VK_RESOLVE_MODE_NONE" }, + { + "vuid": "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-04588", + "text": " If the VkFormat of pDepthStencilResolveAttachment has a depth component, depthResolveMode must be a valid VkResolveModeFlagBits value" + }, + { + "vuid": "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-04589", + "text": " If the VkFormat of pDepthStencilResolveAttachment has a stencil component, stencilResolveMode must be a valid VkResolveModeFlagBits value" + }, { "vuid": "VUID-VkSubpassDescriptionDepthStencilResolve-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE" }, - { - "vuid": "VUID-VkSubpassDescriptionDepthStencilResolve-depthResolveMode-parameter", - "text": " depthResolveMode must be a valid VkResolveModeFlagBits value" - }, - { - "vuid": "VUID-VkSubpassDescriptionDepthStencilResolve-stencilResolveMode-parameter", - "text": " stencilResolveMode must be a valid VkResolveModeFlagBits value" - }, { "vuid": "VUID-VkSubpassDescriptionDepthStencilResolve-pDepthStencilResolveAttachment-parameter", "text": " If pDepthStencilResolveAttachment is not NULL, pDepthStencilResolveAttachment must be a valid pointer to a valid VkAttachmentReference2 structure" } ] }, + "VkFragmentShadingRateAttachmentInfoKHR": { + "(VK_VERSION_1_2,VK_KHR_create_renderpass2)+(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04524", + "text": " If pFragmentShadingRateAttachment is not NULL and its attachment member is not VK_ATTACHMENT_UNUSED, its layout member must be equal to VK_IMAGE_LAYOUT_GENERAL or VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR" + }, + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04525", + "text": " If pFragmentShadingRateAttachment is not NULL and its attachment member is not VK_ATTACHMENT_UNUSED, shadingRateAttachmentTexelSize.width must be a power of two value" + }, + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04526", + "text": " If pFragmentShadingRateAttachment is not NULL and its attachment member is not VK_ATTACHMENT_UNUSED, shadingRateAttachmentTexelSize.width must be less than or equal to maxFragmentShadingRateAttachmentTexelSize.width" + }, + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04527", + "text": " If pFragmentShadingRateAttachment is not NULL and its attachment member is not VK_ATTACHMENT_UNUSED, shadingRateAttachmentTexelSize.width must be greater than or equal to minFragmentShadingRateAttachmentTexelSize.width" + }, + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04528", + "text": " If pFragmentShadingRateAttachment is not NULL and its attachment member is not VK_ATTACHMENT_UNUSED, shadingRateAttachmentTexelSize.height must be a power of two value" + }, + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04529", + "text": " If pFragmentShadingRateAttachment is not NULL and its attachment member is not VK_ATTACHMENT_UNUSED, shadingRateAttachmentTexelSize.height must be less than or equal to maxFragmentShadingRateAttachmentTexelSize.height" + }, + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04530", + "text": " If pFragmentShadingRateAttachment is not NULL and its attachment member is not VK_ATTACHMENT_UNUSED, shadingRateAttachmentTexelSize.height must be greater than or equal to minFragmentShadingRateAttachmentTexelSize.height" + }, + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04531", + "text": " If pFragmentShadingRateAttachment is not NULL and its attachment member is not VK_ATTACHMENT_UNUSED, the quotient of shadingRateAttachmentTexelSize.width and shadingRateAttachmentTexelSize.height must be less than or equal to maxFragmentShadingRateAttachmentTexelSizeAspectRatio" + }, + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04532", + "text": " If pFragmentShadingRateAttachment is not NULL and its attachment member is not VK_ATTACHMENT_UNUSED, the quotient of shadingRateAttachmentTexelSize.height and shadingRateAttachmentTexelSize.width must be less than or equal to maxFragmentShadingRateAttachmentTexelSizeAspectRatio" + }, + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR" + }, + { + "vuid": "VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-parameter", + "text": " If pFragmentShadingRateAttachment is not NULL, pFragmentShadingRateAttachment must be a valid pointer to a valid VkAttachmentReference2 structure" + } + ] + }, "VkAttachmentReference2": { "(VK_VERSION_1_2,VK_KHR_create_renderpass2)": [ { "vuid": "VUID-VkAttachmentReference2-layout-03077", "text": " If attachment is not VK_ATTACHMENT_UNUSED, layout must not be VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, or VK_IMAGE_LAYOUT_PRESENT_SRC_KHR" }, - { - "vuid": "VUID-VkAttachmentReference2-attachment-03311", - "text": " If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask does not include VK_IMAGE_ASPECT_STENCIL_BIT or VK_IMAGE_ASPECT_DEPTH_BIT, layout must not be VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL" - }, - { - "vuid": "VUID-VkAttachmentReference2-attachment-03312", - "text": " If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask does not include VK_IMAGE_ASPECT_COLOR_BIT, layout must not be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL" - }, { "vuid": "VUID-VkAttachmentReference2-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2" }, + { + "vuid": "VUID-VkAttachmentReference2-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkAttachmentReferenceStencilLayout" + }, + { + "vuid": "VUID-VkAttachmentReference2-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, { "vuid": "VUID-VkAttachmentReference2-layout-parameter", "text": " layout must be a valid VkImageLayout value" @@ -4691,20 +7033,20 @@ "text": " If the separateDepthStencilLayouts feature is not enabled, and attachment is not VK_ATTACHMENT_UNUSED, layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL," }, { - "vuid": "VUID-VkAttachmentReference2-attachment-03314", - "text": " If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask includes VK_IMAGE_ASPECT_COLOR_BIT, layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL," + "vuid": "VUID-VkAttachmentReference2-attachment-04754", + "text": " If attachment is not VK_ATTACHMENT_UNUSED, and the format of the referenced attachment is a color format, layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL" }, { - "vuid": "VUID-VkAttachmentReference2-attachment-03315", - "text": " If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask includes both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT, and layout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, the pNext chain must include a VkAttachmentReferenceStencilLayout structure" + "vuid": "VUID-VkAttachmentReference2-attachment-04755", + "text": " If attachment is not VK_ATTACHMENT_UNUSED, and the format of the referenced attachment is a depth/stencil format which includes both depth and stencil aspects, and layout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, the pNext chain must include a VkAttachmentReferenceStencilLayout structure" }, { - "vuid": "VUID-VkAttachmentReference2-attachment-03316", - "text": " If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask includes only VK_IMAGE_ASPECT_DEPTH_BIT then layout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL" + "vuid": "VUID-VkAttachmentReference2-attachment-04756", + "text": " If attachment is not VK_ATTACHMENT_UNUSED, and the format of the referenced attachment is a depth/stencil format which includes only the depth aspect, layout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL" }, { - "vuid": "VUID-VkAttachmentReference2-attachment-03317", - "text": " If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask includes only VK_IMAGE_ASPECT_STENCIL_BIT then layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL" + "vuid": "VUID-VkAttachmentReference2-attachment-04757", + "text": " If attachment is not VK_ATTACHMENT_UNUSED, and the format of the referenced attachment is a depth/stencil format which includes only the stencil aspect, layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL" } ] }, @@ -4786,22 +7128,22 @@ "vuid": "VUID-VkSubpassDependency2-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2" }, + { + "vuid": "VUID-VkSubpassDependency2-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkMemoryBarrier2KHR" + }, + { + "vuid": "VUID-VkSubpassDependency2-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, { "vuid": "VUID-VkSubpassDependency2-srcStageMask-parameter", "text": " srcStageMask must be a valid combination of VkPipelineStageFlagBits values" }, - { - "vuid": "VUID-VkSubpassDependency2-srcStageMask-requiredbitmask", - "text": " srcStageMask must not be 0" - }, { "vuid": "VUID-VkSubpassDependency2-dstStageMask-parameter", "text": " dstStageMask must be a valid combination of VkPipelineStageFlagBits values" }, - { - "vuid": "VUID-VkSubpassDependency2-dstStageMask-requiredbitmask", - "text": " dstStageMask must not be 0" - }, { "vuid": "VUID-VkSubpassDependency2-srcAccessMask-parameter", "text": " srcAccessMask must be a valid combination of VkAccessFlagBits values" @@ -4920,6 +7262,18 @@ "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00881", "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of pAttachments must have been created with a samples value that matches the samples value specified by the corresponding VkAttachmentDescription in renderPass" }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04533", + "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of pAttachments that is used as an input, color, resolve, or depth/stencil attachment by renderPass must have been created with a VkImageCreateInfo::width greater than or equal to width" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04534", + "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of pAttachments that is used as an input, color, resolve, or depth/stencil attachment by renderPass must have been created with a VkImageCreateInfo::height greater than or equal to height" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04535", + "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of pAttachments that is used as an input, color, resolve, or depth/stencil attachment by renderPass must have been created with a VkImageViewCreateInfo::subresourceRange.layerCount greater than or equal to layers" + }, { "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00883", "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of pAttachments must only specify a single mip level" @@ -4952,10 +7306,6 @@ "vuid": "VUID-VkFramebufferCreateInfo-layers-00890", "text": " layers must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferLayers" }, - { - "vuid": "VUID-VkFramebufferCreateInfo-flags-03188", - "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, and attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkImageView handles" - }, { "vuid": "VUID-VkFramebufferCreateInfo-flags-04113", "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of pAttachments must have been created with VkImageViewCreateInfo::viewType not equal to VK_IMAGE_VIEW_TYPE_3D" @@ -4998,11 +7348,7 @@ }, { "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02553", - "text": " If renderPass has a fragment density map attachment and non-subsample image feature is not enabled, each element of pAttachments must have been created with a flags value including VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT unless that element is the fragment density map attachment" - }, - { - "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-02554", - "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of pAttachments must have dimensions at least as large as the corresponding framebuffer dimension except for any element that is referenced by fragmentDensityMapAttachment" + "text": " If renderPass has a fragment density map attachment and non-subsample image feature is not enabled, each element of pAttachments must have been created with a flags value including VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT unless that element is the fragment density map attachment" }, { "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-02555", @@ -5013,16 +7359,14 @@ "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, an element of pAttachments that is referenced by fragmentDensityMapAttachment must have a height at least as large as \\(\\left\\lceil{\\frac{height}{maxFragmentDensityTexelSize_{height}}}\\right\\rceil\\)" } ], - "!(VK_EXT_fragment_density_map)": [ + "(VK_VERSION_1_1,VK_KHR_multiview)": [ { - "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00882", - "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of pAttachments must have dimensions at least as large as the corresponding framebuffer dimension" - } - ], - "!(VK_EXT_fragment_density_map)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + "vuid": "VUID-VkFramebufferCreateInfo-renderPass-04536", + "text": " If renderPass was specified with non-zero view masks, each element of pAttachments that is used as an input, color, resolve, or depth/stencil attachment by renderPass must have a layerCount greater than the index of the most significant bit set in any of those view masks" + }, { - "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02743", - "text": " If renderPass was specified with non-zero view masks, each element of pAttachments must have a layerCount greater than the index of the most significant bit set in any of those view masks" + "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02531", + "text": " If renderPass was specified with non-zero view masks, layers must be 1" } ], "(VK_EXT_fragment_density_map)+!(VK_VERSION_1_1,VK_KHR_multiview)": [ @@ -5032,10 +7376,6 @@ } ], "(VK_EXT_fragment_density_map)+(VK_VERSION_1_1,VK_KHR_multiview)": [ - { - "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02745", - "text": " If renderPass was specified with non-zero view masks, each element of pAttachments that is not referenced by fragmentDensityMapAttachment must have a layerCount greater than the index of the most significant bit set in any of those view masks" - }, { "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02746", "text": " If renderPass was specified with non-zero view masks, each element of pAttachments that is referenced by fragmentDensityMapAttachment must have a layerCount equal to 1 or greater than the index of the most significant bit set in any of those view masks" @@ -5045,10 +7385,28 @@ "text": " If renderPass was not specified with non-zero view masks, each element of pAttachments that is referenced by fragmentDensityMapAttachment must have a layerCount equal to 1" } ], - "(VK_VERSION_1_1,VK_KHR_multiview)": [ + "(VK_KHR_fragment_shading_rate)+(VK_VERSION_1_1,VK_KHR_multiview)": [ { - "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02531", - "text": " If renderPass was specified with non-zero view masks, layers must be 1" + "vuid": "VUID-VkFramebufferCreateInfo-flags-04537", + "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, and renderPass was specified with non-zero view masks, each element of pAttachments that is used as a fragment shading rate attachment by renderPass must have a layerCount that is either 1, or greater than the index of the most significant bit set in any of those view masks" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04538", + "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, and renderPass was not specified with non-zero view masks, each element of pAttachments that is used as a fragment shading rate attachment by renderPass must have a layerCount that is either 1, or greater than layers" + } + ], + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04539", + "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, an element of pAttachments that is used as a fragment shading rate attachment must have a width at least as large as {lceil}width / texelWidth{rceil}, where texelWidth is the largest value of shadingRateAttachmentTexelSize.width in a VkFragmentShadingRateAttachmentInfoKHR which references that attachment" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04540", + "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, an element of pAttachments that is used as a fragment shading rate attachment must have a height at least as large as {lceil}height / texelHeight{rceil}, where texelHeight is the largest value of shadingRateAttachmentTexelSize.height in a VkFragmentShadingRateAttachmentInfoKHR which references that attachment" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04548", + "text": " If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of pAttachments that is used as a fragment shading rate attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR" } ], "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ @@ -5064,11 +7422,19 @@ }, { "vuid": "VUID-VkFramebufferCreateInfo-flags-03190", - "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the pNext chain must include a VkFramebufferAttachmentsCreateInfo structure" + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the pNext chain must include an instance of VkFramebufferAttachmentsCreateInfo" }, { "vuid": "VUID-VkFramebufferCreateInfo-flags-03191", - "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the attachmentImageInfoCount member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain must be equal to either zero or attachmentCount" + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the attachmentImageInfoCount member of an instance of VkFramebufferAttachmentsCreateInfo in the pNext chain must be equal to either zero or attachmentCount" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04541", + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the width member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfo in the pNext chain that is used as an input, color, resolve or depth/stencil attachment in renderPass must be greater than or equal to width" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04542", + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the height member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfo in the pNext chain that is used as an input, color, resolve or depth/stencil attachment in renderPass must be greater than or equal to height" }, { "vuid": "VUID-VkFramebufferCreateInfo-flags-03201", @@ -5087,48 +7453,48 @@ "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, at least one element of the pViewFormats member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain must be equal to the corresponding value of VkAttachmentDescription::format used to create renderPass" } ], - "(VK_VERSION_1_2,VK_KHR_imageless_framebuffer)+!(VK_EXT_fragment_density_map)": [ - { - "vuid": "VUID-VkFramebufferCreateInfo-flags-03192", - "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the width member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain must be greater than or equal to width" - }, - { - "vuid": "VUID-VkFramebufferCreateInfo-flags-03193", - "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the height member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain must be greater than or equal to height" - } - ], "(VK_VERSION_1_2,VK_KHR_imageless_framebuffer)+(VK_EXT_fragment_density_map)": [ - { - "vuid": "VUID-VkFramebufferCreateInfo-flags-03194", - "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the width member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain must be greater than or equal to width, except for any element that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass" - }, - { - "vuid": "VUID-VkFramebufferCreateInfo-flags-03195", - "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the height member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain must be greater than or equal to height, except for any element that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass" - }, { "vuid": "VUID-VkFramebufferCreateInfo-flags-03196", - "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the width member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass must be greater than or equal to \\(\\left\\lceil{\\frac{width}{maxFragmentDensityTexelSize_{width}}}\\right\\rceil\\)" + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the width member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfo in the pNext chain that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass must be greater than or equal to \\(\\left\\lceil{\\frac{width}{maxFragmentDensityTexelSize_{width}}}\\right\\rceil\\)" }, { "vuid": "VUID-VkFramebufferCreateInfo-flags-03197", "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the height member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass must be greater than or equal to \\(\\left\\lceil{\\frac{height}{maxFragmentDensityTexelSize_{height}}}\\right\\rceil\\)" } ], + "(VK_VERSION_1_2,VK_KHR_imageless_framebuffer)+(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04543", + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the width member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfo in the pNext chain that is used as a fragment shading rate attachment must be greater than or equal to {lceil}width / texelWidth{rceil}, where texelWidth is the largest value of shadingRateAttachmentTexelSize.width in a VkFragmentShadingRateAttachmentInfoKHR which references that attachment" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04544", + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the height member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfo in the pNext chain that is used as a fragment shading rate attachment must be greater than or equal to {lceil}height / texelHeight{rceil}, where texelHeight is the largest value of shadingRateAttachmentTexelSize.height in a VkFragmentShadingRateAttachmentInfoKHR which references that attachment" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04545", + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the layerCount member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfo in the pNext chain that is used as a fragment shading rate attachment must be either 1, or greater than or equal to layers" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04587", + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, and renderPass was specified with non-zero view masks, each element of pAttachments that is used as a fragment shading rate attachment by renderPass must have a layerCount that is either 1, or greater than the index of the most significant bit set in any of those view masks" + } + ], "(VK_VERSION_1_2,VK_KHR_imageless_framebuffer)+(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-VkFramebufferCreateInfo-renderPass-03198", - "text": " If multiview is enabled for renderPass, and flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the layerCount member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain must be greater than the maximum bit index set in the view mask in the subpasses in which it is used in renderPass" + "text": " If multiview is enabled for renderPass, and flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the layerCount member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain used as an input, color, resolve, or depth/stencil attachment in renderPass must be greater than the maximum bit index set in the view mask in the subpasses in which it is used in renderPass" }, { - "vuid": "VUID-VkFramebufferCreateInfo-renderPass-03199", - "text": " If multiview is not enabled for renderPass, and flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the layerCount member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain must be greater than or equal to layers" + "vuid": "VUID-VkFramebufferCreateInfo-renderPass-04546", + "text": " If multiview is not enabled for renderPass, and flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the layerCount member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain used as an input, color, resolve, or depth/stencil attachment in renderPass must be greater than or equal to layers" } ], "(VK_VERSION_1_2,VK_KHR_imageless_framebuffer)+!(VK_VERSION_1_1+VK_KHR_multiview)": [ { - "vuid": "VUID-VkFramebufferCreateInfo-flags-03200", - "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the layerCount member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain must be greater than or equal to layers" + "vuid": "VUID-VkFramebufferCreateInfo-flags-04547", + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the layerCount member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain used as an input, color, resolve, or depth/stencil attachment in pRenderPass must be greater than or equal to layers" } ], "(VK_VERSION_1_2,VK_KHR_imageless_framebuffer)+(VK_KHR_depth_stencil_resolve)": [ @@ -5136,6 +7502,12 @@ "vuid": "VUID-VkFramebufferCreateInfo-flags-03203", "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the usage member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain that refers to an attachment used as a depth/stencil resolve attachment by renderPass must include VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" } + ], + "(VK_KHR_fragment_shading_rate)+(VK_VERSION_1_2,VK_KHR_imageless_framebuffer)": [ + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-04549", + "text": " If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, the usage member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfo structure included in the pNext chain that refers to an attachment used as a fragment shading rate attachment by renderPass must include VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR" + } ] }, "VkFramebufferAttachmentsCreateInfo": { @@ -5230,7 +7602,7 @@ }, { "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00900", - "text": " If any of the initialLayout members of the VkAttachmentDescription structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is not VK_IMAGE_LAYOUT_UNDEFINED, then each such initialLayout must be equal to the current layout of the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin" + "text": " If the initialLayout member of any of the VkAttachmentDescription structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is not VK_IMAGE_LAYOUT_UNDEFINED, then each such initialLayout must be equal to the current layout of the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin" }, { "vuid": "VUID-vkCmdBeginRenderPass-srcStageMask-00901", @@ -5240,22 +7612,6 @@ "vuid": "VUID-vkCmdBeginRenderPass-framebuffer-02532", "text": " For any attachment in framebuffer that is used by renderPass and is bound to memory locations that are also bound to another attachment used by renderPass, and if at least one of those uses causes either attachment to be written to, both attachments must have had the VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT set" }, - { - "vuid": "VUID-vkCmdBeginRenderPass-pAttachments-04102", - "text": " Each element of the pAttachments of framebuffer that is referenced by any element of the pInputAttachments of any element of pSubpasses of renderPass must have image view format features containing at least VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT or VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT" - }, - { - "vuid": "VUID-vkCmdBeginRenderPass-pAttachments-04103", - "text": " Each element of the pAttachments of framebuffer that is referenced by any element of the pColorAttachments of any element of pSubpasses of renderPass must have image view format features containing VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT" - }, - { - "vuid": "VUID-vkCmdBeginRenderPass-pAttachments-04104", - "text": " Each element of the pAttachments of framebuffer that is referenced by any element of the pResolveAttachments of any element of pSubpasses of renderPass must have image view format features containing VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT" - }, - { - "vuid": "VUID-vkCmdBeginRenderPass-pAttachments-04105", - "text": " Each element of the pAttachments of framebuffer that is referenced by any element of the pDepthStencilAttachment of any element of pSubpasses of renderPass must have image view format features containing VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT" - }, { "vuid": "VUID-vkCmdBeginRenderPass-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -5336,7 +7692,7 @@ }, { "vuid": "VUID-vkCmdBeginRenderPass2-initialLayout-03100", - "text": " If any of the initialLayout members of the VkAttachmentDescription structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is not VK_IMAGE_LAYOUT_UNDEFINED, then each such initialLayout must be equal to the current layout of the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin" + "text": " If the initialLayout member of any of the VkAttachmentDescription structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is not VK_IMAGE_LAYOUT_UNDEFINED, then each such initialLayout must be equal to the current layout of the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin" }, { "vuid": "VUID-vkCmdBeginRenderPass2-srcStageMask-03101", @@ -5346,22 +7702,6 @@ "vuid": "VUID-vkCmdBeginRenderPass2-framebuffer-02533", "text": " For any attachment in framebuffer that is used by renderPass and is bound to memory locations that are also bound to another attachment used by renderPass, and if at least one of those uses causes either attachment to be written to, both attachments must have had the VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT set" }, - { - "vuid": "VUID-vkCmdBeginRenderPass2-pAttachments-04106", - "text": " Each element of the pAttachments of framebuffer that is referenced by any element of the pInputAttachments of any element of pSubpasses of renderPass must have image view format features contain at least VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT or VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT" - }, - { - "vuid": "VUID-vkCmdBeginRenderPass2-pAttachments-04107", - "text": " Each element of the pAttachments of framebuffer that is referenced by any element of the pColorAttachments of any element of pSubpasses of renderPass must have image view format features contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT" - }, - { - "vuid": "VUID-vkCmdBeginRenderPass2-pAttachments-04108", - "text": " Each element of the pAttachments of framebuffer that is referenced by any element of the pResolveAttachments of any element of pSubpasses of renderPass must have image view format features contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT" - }, - { - "vuid": "VUID-vkCmdBeginRenderPass2-pAttachments-04109", - "text": " Each element of the pAttachments of framebuffer that is referenced by any element of the pDepthStencilAttachment of any element of pSubpasses of renderPass must have image view format features contain VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT" - }, { "vuid": "VUID-vkCmdBeginRenderPass2-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -5432,10 +7772,6 @@ "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-parameter", "text": " framebuffer must be a valid VkFramebuffer handle" }, - { - "vuid": "VUID-VkRenderPassBeginInfo-pClearValues-parameter", - "text": " If clearValueCount is not 0, pClearValues must be a valid pointer to an array of clearValueCount VkClearValue unions" - }, { "vuid": "VUID-VkRenderPassBeginInfo-commonparent", "text": " Both of framebuffer, and renderPass must have been created, allocated, or retrieved from the same VkDevice" @@ -5504,35 +7840,35 @@ }, { "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-02780", - "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must have been created on the same VkDevice as framebuffer and renderPass" + "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must have been created on the same VkDevice as framebuffer and renderPass" }, { "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03209", - "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::flags equal to the flags member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer" + "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::flags equal to the flags member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachmentImageInfos used to create framebuffer" }, { - "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03210", - "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::usage equal to the usage member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachments used to create framebuffer" + "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-04627", + "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView with an inherited usage equal to the usage member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachmentImageInfos used to create framebuffer" }, { "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03211", - "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView with a width equal to the width member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachments used to create framebuffer" + "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView with a width equal to the width member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachmentImageInfos used to create framebuffer" }, { "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03212", - "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView with a height equal to the height member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachments used to create framebuffer" + "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView with a height equal to the height member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachmentImageInfos used to create framebuffer" }, { "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03213", - "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageViewCreateInfo::subresourceRange.layerCount equal to the layerCount member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachments used to create framebuffer" + "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageViewCreateInfo::subresourceRange.layerCount equal to the layerCount member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachmentImageInfos used to create framebuffer" }, { "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03214", - "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageFormatListCreateInfo::viewFormatCount equal to the viewFormatCount member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachments used to create framebuffer" + "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageFormatListCreateInfo::viewFormatCount equal to the viewFormatCount member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachmentImageInfos used to create framebuffer" }, { "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03215", - "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a set of elements in VkImageFormatListCreateInfo::pViewFormats equal to the set of elements in the pViewFormats member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachments used to create framebuffer" + "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a set of elements in VkImageFormatListCreateInfo::pViewFormats equal to the set of elements in the pViewFormats member of the corresponding element of VkFramebufferAttachmentsCreateInfo::pAttachmentImageInfos used to create framebuffer" }, { "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03216", @@ -5546,11 +7882,11 @@ "(VK_QCOM_render_pass_transform)": [ { "vuid": "VUID-VkRenderPassBeginInfo-pNext-02869", - "text": " If the pNext chain includes VkRenderPassTransformBeginInfoQCOM, renderArea::offset must equal (0,0)" + "text": " If the pNext chain includes VkRenderPassTransformBeginInfoQCOM, renderArea.offset must equal (0,0)" }, { "vuid": "VUID-VkRenderPassBeginInfo-pNext-02870", - "text": " If the pNext chain includes VkRenderPassTransformBeginInfoQCOM, renderArea::extent transformed by VkRenderPassTransformBeginInfoQCOM::transform must equal the framebuffer dimensions" + "text": " If the pNext chain includes VkRenderPassTransformBeginInfoQCOM, renderArea.extent transformed by VkRenderPassTransformBeginInfoQCOM::transform must equal the framebuffer dimensions" } ] }, @@ -5900,6 +8236,14 @@ "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01091", "text": " If pCode declares any of the capabilities listed in the SPIR-V Environment appendix, one of the corresponding requirements must be satisfied" }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-04146", + "text": " pCode must not declare any SPIR-V extension that is not supported by the API, as described by the Extension section of the SPIR-V Environment appendix" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-04147", + "text": " If pCode declares any of the SPIR-V extensions listed in the SPIR-V Environment appendix, one of the corresponding requirements must be satisfied" + }, { "vuid": "VUID-VkShaderModuleCreateInfo-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO" @@ -5994,6 +8338,30 @@ } ] }, + "vkCmdSetPatchControlPointsEXT": { + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdSetPatchControlPointsEXT-None-04873", + "text": " The extendedDynamicState2PatchControlPoints feature must be enabled" + }, + { + "vuid": "VUID-vkCmdSetPatchControlPointsEXT-patchControlPoints-04874", + "text": " patchControlPoints must be greater than zero and less than or equal to VkPhysicalDeviceLimits::maxTessellationPatchSize" + }, + { + "vuid": "VUID-vkCmdSetPatchControlPointsEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetPatchControlPointsEXT-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetPatchControlPointsEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV": { "(VK_NV_cooperative_matrix)": [ { @@ -6289,7 +8657,7 @@ "text": " flags must not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" } ], - "(VK_KHR_ray_tracing)": [ + "(VK_KHR_ray_tracing_pipeline)": [ { "vuid": "VUID-VkComputePipelineCreateInfo-flags-03365", "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR" @@ -6313,6 +8681,10 @@ { "vuid": "VUID-VkComputePipelineCreateInfo-flags-03370", "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-03576", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR" } ], "(VK_NV_device_generated_commands)": [ @@ -6394,6 +8766,10 @@ "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00719", "text": " If stage is VK_SHADER_STAGE_FRAGMENT_BIT, and the identified entry point writes to FragDepth in any execution path, it must write to FragDepth in all execution paths" }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-module-04145", + "text": " The SPIR-V code that was used to create module must be valid as described by the Khronos SPIR-V Specification after applying the specializations provided in pSpecializationInfo, if any, and then converting all specialization constants into fixed constants." + }, { "vuid": "VUID-VkPipelineShaderStageCreateInfo-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO" @@ -6580,99 +8956,107 @@ }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00729", - "text": " If pStages includes a tessellation control shader stage, it must include a tessellation evaluation shader stage" + "text": " If the pipeline is being created with pre-rasterization shader state and pStages includes a tessellation control shader stage, it must include a tessellation evaluation shader stage" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00730", - "text": " If pStages includes a tessellation evaluation shader stage, it must include a tessellation control shader stage" + "text": " If the pipeline is being created with pre-rasterization shader state and pStages includes a tessellation evaluation shader stage, it must include a tessellation control shader stage" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00731", - "text": " If pStages includes a tessellation control shader stage and a tessellation evaluation shader stage, pTessellationState must be a valid pointer to a valid VkPipelineTessellationStateCreateInfo structure" + "text": " If the pipeline is being created with pre-rasterization shader state and pStages includes a tessellation control shader stage and a tessellation evaluation shader stage, pTessellationState must be a valid pointer to a valid VkPipelineTessellationStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00732", - "text": " If pStages includes tessellation shader stages, the shader code of at least one stage must contain an OpExecutionMode instruction that specifies the type of subdivision in the pipeline" + "text": " If the pipeline is being created with pre-rasterization shader state and pStages includes tessellation shader stages, the shader code of at least one stage must contain an OpExecutionMode instruction that specifies the type of subdivision in the pipeline" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00733", - "text": " If pStages includes tessellation shader stages, and the shader code of both stages contain an OpExecutionMode instruction that specifies the type of subdivision in the pipeline, they must both specify the same subdivision mode" + "text": " If the pipeline is being created with pre-rasterization shader state and pStages includes tessellation shader stages, and the shader code of both stages contain an OpExecutionMode instruction that specifies the type of subdivision in the pipeline, they must both specify the same subdivision mode" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00734", - "text": " If pStages includes tessellation shader stages, the shader code of at least one stage must contain an OpExecutionMode instruction that specifies the output patch size in the pipeline" + "text": " If the pipeline is being created with pre-rasterization shader state and pStages includes tessellation shader stages, the shader code of at least one stage must contain an OpExecutionMode instruction that specifies the output patch size in the pipeline" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00735", - "text": " If pStages includes tessellation shader stages, and the shader code of both contain an OpExecutionMode instruction that specifies the out patch size in the pipeline, they must both specify the same patch size" + "text": " If the pipeline is being created with pre-rasterization shader state and pStages includes tessellation shader stages, and the shader code of both contain an OpExecutionMode instruction that specifies the out patch size in the pipeline, they must both specify the same patch size" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00736", - "text": " If pStages includes tessellation shader stages, the topology member of pInputAssembly must be VK_PRIMITIVE_TOPOLOGY_PATCH_LIST" + "text": " If the pipeline is being created with pre-rasterization shader state and pStages includes tessellation shader stages, the topology member of pInputAssembly must be VK_PRIMITIVE_TOPOLOGY_PATCH_LIST" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-topology-00737", - "text": " If the topology member of pInputAssembly is VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, pStages must include tessellation shader stages" + "text": " If the pipeline is being created with pre-rasterization shader state and the topology member of pInputAssembly is VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, pStages must include tessellation shader stages" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00738", - "text": " If pStages includes a geometry shader stage, and does not include any tessellation shader stages, its shader code must contain an OpExecutionMode instruction that specifies an input primitive type that is compatible with the primitive topology specified in pInputAssembly" + "text": " If the pipeline is being created with pre-rasterization shader state and pStages includes a geometry shader stage, and does not include any tessellation shader stages, its shader code must contain an OpExecutionMode instruction that specifies an input primitive type that is compatible with the primitive topology specified in pInputAssembly" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00739", - "text": " If pStages includes a geometry shader stage, and also includes tessellation shader stages, its shader code must contain an OpExecutionMode instruction that specifies an input primitive type that is compatible with the primitive topology that is output by the tessellation stages" + "text": " If the pipeline is being created with pre-rasterization shader state and pStages includes a geometry shader stage, and also includes tessellation shader stages, its shader code must contain an OpExecutionMode instruction that specifies an input primitive type that is compatible with the primitive topology that is output by the tessellation stages" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00740", - "text": " If pStages includes a fragment shader stage and a geometry shader stage, and the fragment shader code reads from an input variable that is decorated with PrimitiveID, then the geometry shader code must write to a matching output variable, decorated with PrimitiveID, in all execution paths" + "text": " If the pipeline is being created with pre-rasterization shader state and fragment shader state, it includes both a fragment shader and a geometry shader, and the fragment shader code reads from an input variable that is decorated with PrimitiveID, then the geometry shader code must write to a matching output variable, decorated with PrimitiveID, in all execution paths" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00741", - "text": " If pStages includes a fragment shader stage, its shader code must not read from any input attachment that is defined as VK_ATTACHMENT_UNUSED in subpass" + "text": " If the pipeline is being created with fragment shader state the fragment shader must not read from any input attachment that is defined as VK_ATTACHMENT_UNUSED in subpass" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00742", - "text": " The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" + "text": " If the pipeline is being created with pre-rasterization shader state and multiple pre-rasterization shader stages are included in pStages, the shader code for the entry points identified by those pStages and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-blendEnable-02023", - "text": " If rasterization is not disabled and the subpass uses color attachments, then for each color attachment in the subpass the blendEnable member of the corresponding element of the pAttachment member of pColorBlendState must be VK_FALSE if the attached image’s format features does not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-None-04889", + "text": " If the pipeline is being created with pre-rasterization shader state and fragment shader state, the fragment shader and last vertex processing shader stage and any relevant state must adhere to the pipeline linking rules described in the Shader Interfaces chapter" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-04890", + "text": " If the pipeline is being created with fragment shader state, and subpass uses a depth/stencil attachment in renderPass with a read-only layout for the depth aspect in the VkAttachmentReference defined by subpass, the depthWriteEnable member of pDepthStencilState must be VK_FALSE" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-04891", + "text": " If the pipeline is being created with fragment shader state, and subpass uses a depth/stencil attachment in renderPass with a read-only layout for the stencil aspect in the VkAttachmentReference defined by subpass, the failOp, passOp and depthFailOp members of each of the front and back members of pDepthStencilState must be VK_STENCIL_OP_KEEP" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-blendEnable-04717", + "text": " If the pipeline is being created with fragment output interface state, then for each color attachment in the subpass, if the potential format features of the format of the corresponding attachment description do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746", - "text": " If rasterization is not disabled and the subpass uses color attachments, the attachmentCount member of pColorBlendState must be equal to the colorAttachmentCount used to create subpass" - }, - { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", - "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT, the pViewports member of pViewportState must be a valid pointer to an array of pViewportState->viewportCount valid VkViewport structures" - }, - { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748", - "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SCISSOR, the pScissors member of pViewportState must be a valid pointer to an array of pViewportState->scissorCount VkRect2D structures" + "text": " If the pipeline is being created with fragment output interface state, and the subpass uses color attachments, the attachmentCount member of pColorBlendState must be equal to the colorAttachmentCount used to create subpass" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00749", - "text": " If the wide lines feature is not enabled, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_LINE_WIDTH, the lineWidth member of pRasterizationState must be 1.0" + "text": " If the pipeline is being created with pre-rasterization shader state, and the wide lines feature is not enabled, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_LINE_WIDTH, the lineWidth member of pRasterizationState must be 1.0" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00750", - "text": " If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pViewportState must be a valid pointer to a valid VkPipelineViewportStateCreateInfo structure" + "text": " If the pipeline is being created with pre-rasterization shader state, and the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pViewportState must be a valid pointer to a valid VkPipelineViewportStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00751", - "text": " If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pMultisampleState must be a valid pointer to a valid VkPipelineMultisampleStateCreateInfo structure" + "text": " If the pipeline is being created with fragment shader state, pMultisampleState must be a valid pointer to a valid VkPipelineMultisampleStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00752", - "text": " If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, and subpass uses a depth/stencil attachment, pDepthStencilState must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" + "text": " If the pipeline is being created with fragment shader state, and subpass uses a depth/stencil attachment, pDepthStencilState must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753", - "text": " If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, and subpass uses color attachments, pColorBlendState must be a valid pointer to a valid VkPipelineColorBlendStateCreateInfo structure" + "text": " If the pipeline is being created with fragment output interface state, and subpass uses color attachments, pColorBlendState must be a valid pointer to a valid VkPipelineColorBlendStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-04493", + "text": " If the pipeline is being created with fragment output interface state, pColorBlendState->attachmentCount must be greater than the index of all color attachments that are not VK_ATTACHMENT_UNUSED for the subpass index in renderPass" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00754", - "text": " If the depth bias clamping feature is not enabled, no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BIAS, and the depthBiasEnable member of pRasterizationState is VK_TRUE, the depthBiasClamp member of pRasterizationState must be 0.0" + "text": " If the pipeline is being created with pre-rasterization shader state, the depth bias clamping feature is not enabled, no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BIAS, and the depthBiasEnable member of pRasterizationState is VK_TRUE, the depthBiasClamp member of pRasterizationState must be 0.0" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-layout-00756", @@ -6680,11 +9064,11 @@ }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00757", - "text": " If neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if subpass uses color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must be the same as the sample count for those subpass attachments" + "text": " If the pipeline is being created with fragment shader state, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if subpass uses color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must be the same as the sample count for those subpass attachments" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00758", - "text": " If subpass does not use any color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must follow the rules for a zero-attachment subpass" + "text": " If the pipeline is being created with fragment shader state and subpass does not use any color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must follow the rules for a zero-attachment subpass" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00759", @@ -6695,12 +9079,12 @@ "text": " The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02097", - "text": " If pStages includes a vertex shader stage, pVertexInputState must be a valid pointer to a valid VkPipelineVertexInputStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02098", + "text": " If the pipeline is being created with vertex input state, pInputAssemblyState must be a valid pointer to a valid VkPipelineInputAssemblyStateCreateInfo structure" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02098", - "text": " If pStages includes a vertex shader stage, pInputAssemblyState must be a valid pointer to a valid VkPipelineInputAssemblyStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-None-04893", + "text": " The pipeline must be created with a complete set of state" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-sType-sType", @@ -6708,7 +9092,7 @@ }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkGraphicsPipelineShaderGroupsCreateInfoNV, VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfoEXT, VkPipelineDiscardRectangleStateCreateInfoEXT, or VkPipelineRepresentativeFragmentTestStateCreateInfoNV" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkGraphicsPipelineShaderGroupsCreateInfoNV, VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfoEXT, VkPipelineDiscardRectangleStateCreateInfoEXT, VkPipelineFragmentShadingRateEnumStateCreateInfoNV, VkPipelineFragmentShadingRateStateCreateInfoKHR, or VkPipelineRepresentativeFragmentTestStateCreateInfoNV" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-sType-unique", @@ -6750,107 +9134,145 @@ "!(VK_NV_mesh_shader)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-stage-00727", - "text": " The stage member of one element of pStages must be VK_SHADER_STAGE_VERTEX_BIT" + "text": " If the pipeline is being created with pre-rasterization shader state the stage member of one element of pStages must be VK_SHADER_STAGE_VERTEX_BIT" } ], "(VK_NV_mesh_shader)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02095", - "text": " The geometric shader stages provided in pStages must be either from the mesh shading pipeline (stage is VK_SHADER_STAGE_TASK_BIT_NV or VK_SHADER_STAGE_MESH_BIT_NV) or from the primitive shading pipeline (stage is VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, or VK_SHADER_STAGE_GEOMETRY_BIT)" + "text": " If the pipeline is being created with pre-rasterization shader state the geometric shader stages provided in pStages must be either from the mesh shading pipeline (stage is VK_SHADER_STAGE_TASK_BIT_NV or VK_SHADER_STAGE_MESH_BIT_NV) or from the primitive shading pipeline (stage is VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, or VK_SHADER_STAGE_GEOMETRY_BIT)" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-stage-02096", - "text": " The stage member of one element of pStages must be either VK_SHADER_STAGE_VERTEX_BIT or VK_SHADER_STAGE_MESH_BIT_NV" + "text": " If the pipeline is being created with pre-rasterization shader state the stage member of one element of pStages must be either VK_SHADER_STAGE_VERTEX_BIT or VK_SHADER_STAGE_MESH_BIT_NV" } ], - "!(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + "!(VK_EXT_extended_dynamic_state)": [ { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00743", - "text": " If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the depthWriteEnable member of pDepthStencilState must be VK_FALSE" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", + "text": " If the pipeline is being created with pre-rasterization shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT, the pViewports member of pViewportState must be a valid pointer to an array of pViewportState->viewportCount valid VkViewport structures" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00744", - "text": " If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the failOp, passOp and depthFailOp members of each of the front and back members of pDepthStencilState must be VK_STENCIL_OP_KEEP" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748", + "text": " If the pipeline is being created with pre-rasterization shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SCISSOR, the pScissors member of pViewportState must be a valid pointer to an array of pViewportState->scissorCount VkRect2D structures" } ], - "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + "(VK_EXT_extended_dynamic_state)": [ { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01756", - "text": " If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL in the VkAttachmentReference defined by subpass, the depthWriteEnable member of pDepthStencilState must be VK_FALSE" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04130", + "text": " If the pipeline is being created with pre-rasterization shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT or VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, the pViewports member of pViewportState must be a valid pointer to an array of pViewportState->viewportCount valid VkViewport structures" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01757", - "text": " If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the failOp, passOp and depthFailOp members of each of the front and back members of pDepthStencilState must be VK_STENCIL_OP_KEEP" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04131", + "text": " If the pipeline is being created with pre-rasterization shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SCISSOR or VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, the pScissors member of pViewportState must be a valid pointer to an array of pViewportState->scissorCount VkRect2D structures" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03378", + "text": " If the extendedDynamicState feature is not enabled, there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_CULL_MODE_EXT, VK_DYNAMIC_STATE_FRONT_FACE_EXT, VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, or VK_DYNAMIC_STATE_STENCIL_OP_EXT" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03379", + "text": " If the pipeline is being created with pre-rasterization shader state, and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT is included in the pDynamicStates array then viewportCount must be zero" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03380", + "text": " If the pipeline is being created with pre-rasterization shader state, and VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT is included in the pDynamicStates array then scissorCount must be zero" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04132", + "text": " If the pipeline is being created with pre-rasterization shader state, and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT is included in the pDynamicStates array then VK_DYNAMIC_STATE_VIEWPORT must not be present" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04133", + "text": " If the pipeline is being created with pre-rasterization shader state, and VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT is included in the pDynamicStates array then VK_DYNAMIC_STATE_SCISSOR must not be present" + } + ], + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pViewportState-04892", + "text": " If the pipeline is being created with pre-rasterization shader state, and the graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled, pViewportState must be a valid pointer to a valid VkPipelineViewportStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04868", + "text": " If the extendedDynamicState2 feature is not enabled, there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT, VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT, or VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04869", + "text": " If the extendedDynamicState2LogicOp feature is not enabled, there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_LOGIC_OP_EXT" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04870", + "text": " If the extendedDynamicState2PatchControlPoints feature is not enabled, there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT" } ], "!(VK_EXT_depth_range_unrestricted)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00755", - "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BOUNDS, and the depthBoundsTestEnable member of pDepthStencilState is VK_TRUE, the minDepthBounds and maxDepthBounds members of pDepthStencilState must be between 0.0 and 1.0, inclusive" + "text": " If the pipeline is being created with fragment shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BOUNDS, and the depthBoundsTestEnable member of pDepthStencilState is VK_TRUE, the minDepthBounds and maxDepthBounds members of pDepthStencilState must be between 0.0 and 1.0, inclusive" } ], "(VK_EXT_depth_range_unrestricted)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-02510", - "text": " If the VK_EXT_depth_range_unrestricted extension is not enabled and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BOUNDS, and the depthBoundsTestEnable member of pDepthStencilState is VK_TRUE, the minDepthBounds and maxDepthBounds members of pDepthStencilState must be between 0.0 and 1.0, inclusive" + "text": " If the pipeline is being created with fragment shader state, and the VK_EXT_depth_range_unrestricted extension is not enabled and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BOUNDS, and the depthBoundsTestEnable member of pDepthStencilState is VK_TRUE, the minDepthBounds and maxDepthBounds members of pDepthStencilState must be between 0.0 and 1.0, inclusive" } ], "(VK_EXT_sample_locations)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01521", - "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationGridSize.width must evenly divide VkMultisamplePropertiesEXT::sampleLocationGridSize.width as returned by vkGetPhysicalDeviceMultisamplePropertiesEXT with a samples parameter equaling rasterizationSamples" + "text": " If the pipeline is being created with fragment shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationGridSize.width must evenly divide VkMultisamplePropertiesEXT::sampleLocationGridSize.width as returned by vkGetPhysicalDeviceMultisamplePropertiesEXT with a samples parameter equaling rasterizationSamples" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01522", - "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationGridSize.height must evenly divide VkMultisamplePropertiesEXT::sampleLocationGridSize.height as returned by vkGetPhysicalDeviceMultisamplePropertiesEXT with a samples parameter equaling rasterizationSamples" + "text": " If the pipeline is being created with fragment shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationGridSize.height must evenly divide VkMultisamplePropertiesEXT::sampleLocationGridSize.height as returned by vkGetPhysicalDeviceMultisamplePropertiesEXT with a samples parameter equaling rasterizationSamples" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01523", - "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationsPerPixel must equal rasterizationSamples" + "text": " If the pipeline is being created with fragment shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationsPerPixel must equal rasterizationSamples" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-sampleLocationsEnable-01524", - "text": " If the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, the fragment shader code must not statically use the extended instruction InterpolateAtSample" + "text": " If the pipeline is being created with fragment shader state, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, the fragment shader code must not statically use the extended instruction InterpolateAtSample" } ], "(VK_AMD_mixed_attachment_samples)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01505", - "text": " If the VK_AMD_mixed_attachment_samples extension is enabled, and if subpass uses color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must equal the maximum of the sample counts of those subpass attachments" + "text": " If the pipeline is being created with fragment shader state, and the VK_AMD_mixed_attachment_samples extension is enabled, and if subpass uses color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must equal the maximum of the sample counts of those subpass attachments" } ], "(VK_NV_framebuffer_mixed_samples)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01411", - "text": " If the VK_NV_framebuffer_mixed_samples extension is enabled, and if subpass has a depth/stencil attachment and depth test, stencil test, or depth bounds test are enabled, then the rasterizationSamples member of pMultisampleState must be the same as the sample count of the depth/stencil attachment" + "text": " If the pipeline is being created with fragment shader state, and the VK_NV_framebuffer_mixed_samples extension is enabled, and if subpass has a depth/stencil attachment and depth test, stencil test, or depth bounds test are enabled, then the rasterizationSamples member of pMultisampleState must be the same as the sample count of the depth/stencil attachment" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01412", - "text": " If the VK_NV_framebuffer_mixed_samples extension is enabled, and if subpass has any color attachments, then the rasterizationSamples member of pMultisampleState must be greater than or equal to the sample count for those subpass attachments" + "text": " If the pipeline is being created with fragment shader state, and the VK_NV_framebuffer_mixed_samples extension is enabled, and if subpass has any color attachments, then the rasterizationSamples member of pMultisampleState must be greater than or equal to the sample count for those subpass attachments" } ], "(VK_NV_coverage_reduction_mode)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-coverageReductionMode-02722", - "text": " If the VK_NV_coverage_reduction_mode extension is enabled, the coverage reduction mode specified by VkPipelineCoverageReductionStateCreateInfoNV::coverageReductionMode, the rasterizationSamples member of pMultisampleState and the sample counts for the color and depth/stencil attachments (if the subpass has them) must be a valid combination returned by vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" + "text": " If the pipeline is being created with fragment shader state, and the VK_NV_coverage_reduction_mode extension is enabled, the coverage reduction mode specified by VkPipelineCoverageReductionStateCreateInfoNV::coverageReductionMode, the rasterizationSamples member of pMultisampleState and the sample counts for the color and depth/stencil attachments (if the subpass has them) must be a valid combination returned by vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" } ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00760", - "text": " If the renderPass has multiview enabled and subpass has more than one bit set in the view mask and multiviewTessellationShader is not enabled, then pStages must not include tessellation shaders" + "text": " If the pipeline is being created with pre-rasterization shader state, and the renderPass has multiview enabled and subpass has more than one bit set in the view mask and multiviewTessellationShader is not enabled, then pStages must not include tessellation shaders" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00761", - "text": " If the renderPass has multiview enabled and subpass has more than one bit set in the view mask and multiviewGeometryShader is not enabled, then pStages must not include a geometry shader" + "text": " If the pipeline is being created with pre-rasterization shader state, and the renderPass has multiview enabled and subpass has more than one bit set in the view mask and multiviewGeometryShader is not enabled, then pStages must not include a geometry shader" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00762", - "text": " If the renderPass has multiview enabled and subpass has more than one bit set in the view mask, shaders in the pipeline must not write to the Layer built-in output" + "text": " If the pipeline is being created with pre-rasterization shader state, and the renderPass has multiview enabled and subpass has more than one bit set in the view mask, shaders in the pipeline must not write to the Layer built-in output" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00763", - "text": " If the renderPass has multiview enabled, then all shaders must not include variables decorated with the Layer built-in decoration in their interfaces" + "text": " If the pipeline is being created with pre-rasterization shader state, and the renderPass has multiview enabled, then all shaders must not include variables decorated with the Layer built-in decoration in their interfaces" } ], "(VK_VERSION_1_1,VK_KHR_device_group)": [ @@ -6862,69 +9284,79 @@ "(VK_VERSION_1_1,VK_KHR_maintenance2,VK_KHR_create_renderpass2)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-01565", - "text": " If pStages includes a fragment shader stage and an input attachment was referenced by an aspectMask at renderPass creation time, its shader code must only read from the aspects that were specified for that input attachment" + "text": " If the pipeline is being created with fragment shader state and an input attachment was referenced by an aspectMask at renderPass creation time, the fragment shader must only read from the aspects that were specified for that input attachment" } ], "(VK_NV_clip_space_w_scaling)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01715", - "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, and the viewportWScalingEnable member of a VkPipelineViewportWScalingStateCreateInfoNV structure, included in the pNext chain of pViewportState, is VK_TRUE, the pViewportWScalings member of the VkPipelineViewportWScalingStateCreateInfoNV must be a pointer to an array of VkPipelineViewportWScalingStateCreateInfoNV::viewportCount valid VkViewportWScalingNV structures" + "text": " If the pipeline is being created with pre-rasterization shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, and the viewportWScalingEnable member of a VkPipelineViewportWScalingStateCreateInfoNV structure, included in the pNext chain of pViewportState, is VK_TRUE, the pViewportWScalings member of the VkPipelineViewportWScalingStateCreateInfoNV must be a pointer to an array of VkPipelineViewportWScalingStateCreateInfoNV::viewportCount valid VkViewportWScalingNV structures" } ], "(VK_NV_scissor_exclusive)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04056", - "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, and if pViewportState->pNext chain includes a VkPipelineViewportExclusiveScissorStateCreateInfoNV structure, and if its exclusiveScissorCount member is not 0, then its pExclusiveScissors member must be a valid pointer to an array of exclusiveScissorCount VkRect2D structures" + "text": " If the pipeline is being created with pre-rasterization shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, and if pViewportState->pNext chain includes a VkPipelineViewportExclusiveScissorStateCreateInfoNV structure, and if its exclusiveScissorCount member is not 0, then its pExclusiveScissors member must be a valid pointer to an array of exclusiveScissorCount VkRect2D structures" } ], "(VK_NV_shading_rate_image)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04057", - "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, and if pViewportState->pNext chain includes a VkPipelineViewportShadingRateImageStateCreateInfoNV structure, then its pShadingRatePalettes member must be a valid pointer to an array of viewportCount valid VkShadingRatePaletteNV structures" + "text": " If the pipeline is being created with pre-rasterization shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, and if pViewportState->pNext chain includes a VkPipelineViewportShadingRateImageStateCreateInfoNV structure, then its pShadingRatePalettes member must be a valid pointer to an array of viewportCount valid VkShadingRatePaletteNV structures" } ], "(VK_EXT_discard_rectangles)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04058", - "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, and if pNext chain includes a VkPipelineDiscardRectangleStateCreateInfoEXT structure, and if its discardRectangleCount member is not 0, then its pDiscardRectangles member must be a valid pointer to an array of discardRectangleCount VkRect2D structures" + "text": " If the pipeline is being created with pre-rasterization shader state, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, and if pNext chain includes a VkPipelineDiscardRectangleStateCreateInfoEXT structure, and if its discardRectangleCount member is not 0, then its pDiscardRectangles member must be a valid pointer to an array of discardRectangleCount VkRect2D structures" + } + ], + "!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02097", + "text": " If the pipeline is being created with vertex input state, pVertexInputState must be a valid pointer to a valid VkPipelineVertexInputStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pVertexInputState-04910", + "text": " If the pipeline is being created with vertex input state, and VK_DYNAMIC_STATE_VERTEX_INPUT_EXT is not set, pVertexInputState must be a valid pointer to a valid VkPipelineVertexInputStateCreateInfo structure" } ], "(VK_EXT_transform_feedback)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02317", - "text": " The Xfb execution mode can be specified by only one shader stage in pStages" + "text": " If the pipeline is being created with pre-rasterization shader state, the Xfb execution mode can be specified by no more than one shader stage in pStages" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02318", - "text": " If any shader stage in pStages specifies Xfb execution mode it must be the last vertex processing stage" + "text": " If the pipeline is being created with pre-rasterization shader state, and any shader stage in pStages specifies Xfb execution mode it must be the last vertex processing stage" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizationStream-02319", - "text": " If a VkPipelineRasterizationStateStreamCreateInfoEXT::rasterizationStream value other than zero is specified, all variables in the output interface of the entry point being compiled decorated with Position, PointSize, ClipDistance, or CullDistance must all be decorated with identical Stream values that match the rasterizationStream" + "text": " If the pipeline is being created with pre-rasterization shader state, and a VkPipelineRasterizationStateStreamCreateInfoEXT::rasterizationStream value other than zero is specified, all variables in the output interface of the entry point being compiled decorated with Position, PointSize, ClipDistance, or CullDistance must be decorated with identical Stream values that match the rasterizationStream" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizationStream-02320", - "text": " If VkPipelineRasterizationStateStreamCreateInfoEXT::rasterizationStream is zero, or not specified, all variables in the output interface of the entry point being compiled decorated with Position, PointSize, ClipDistance, or CullDistance must all be decorated with a Stream value of zero, or must not specify the Stream decoration" + "text": " If the pipeline is being created with pre-rasterization shader state, and VkPipelineRasterizationStateStreamCreateInfoEXT::rasterizationStream is zero, or not specified, all variables in the output interface of the entry point being compiled decorated with Position, PointSize, ClipDistance, or CullDistance must be decorated with a Stream value of zero, or must not specify the Stream decoration" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-geometryStreams-02321", - "text": " If the last vertex processing stage is a geometry shader, and that geometry shader uses the GeometryStreams capability, then VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams feature must be enabled" + "text": " If the pipeline is being created with pre-rasterization shader state, and the last vertex processing stage is a geometry shader, and that geometry shader uses the GeometryStreams capability, then VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams feature must be enabled" } ], "(VK_EXT_transform_feedback)+(VK_NV_mesh_shader)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-None-02322", - "text": " If there are any mesh shader stages in the pipeline there must not be any shader stage in the pipeline with a Xfb execution mode" + "text": " If the pipeline is being created with pre-rasterization shader state, and there are any mesh shader stages in the pipeline there must not be any shader stage in the pipeline with a Xfb execution mode" } ], "(VK_EXT_line_rasterization)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766", - "text": " If the lineRasterizationMode member of a VkPipelineRasterizationLineStateCreateInfoEXT structure included in the pNext chain of pRasterizationState is VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT or VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT and if rasterization is enabled, then the alphaToCoverageEnable, alphaToOneEnable, and sampleShadingEnable members of pMultisampleState must all be VK_FALSE" + "text": " If the pipeline is being created with pre-rasterization shader state and at least one of fragment output interface state or fragment shader state, the lineRasterizationMode member of a VkPipelineRasterizationLineStateCreateInfoEXT structure included in the pNext chain of pRasterizationState is VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT or VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT, then the alphaToCoverageEnable, alphaToOneEnable, and sampleShadingEnable members of pMultisampleState must all be VK_FALSE" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767", - "text": " If the stippledLineEnable member of VkPipelineRasterizationLineStateCreateInfoEXT is VK_TRUE and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, then the lineStippleFactor member of VkPipelineRasterizationLineStateCreateInfoEXT must be in the range [1,256]" + "text": " If the pipeline is being created with pre-rasterization shader state, the stippledLineEnable member of VkPipelineRasterizationLineStateCreateInfoEXT is VK_TRUE, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, then the lineStippleFactor member of VkPipelineRasterizationLineStateCreateInfoEXT must be in the range [1,256]" } ], "(VK_KHR_pipeline_library)": [ @@ -6933,7 +9365,7 @@ "text": " flags must not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" } ], - "(VK_KHR_ray_tracing)": [ + "(VK_KHR_ray_tracing_pipeline)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03372", "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR" @@ -6957,32 +9389,26 @@ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03377", "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" - } - ], - "(VK_EXT_extended_dynamic_state)": [ - { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03378", - "text": " If the extendedDynamicState feature is not enabled, there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_CULL_MODE_EXT, VK_DYNAMIC_STATE_FRONT_FACE_EXT, VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, or VK_DYNAMIC_STATE_STENCIL_OP_EXT" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03379", - "text": " If VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT is included in the pDynamicStates array then viewportCount must be zero" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-03577", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03380", - "text": " If VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT is included in the pDynamicStates array then scissorCount must be zero" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03578", + "text": " All elements of the pDynamicStates member of pDynamicState must not be VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR" } ], "(VK_NV_device_generated_commands)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-02877", - "text": " If flags includes VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, then the VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" + "text": " If flags includes VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, then the VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" } ], "(VK_NV_device_generated_commands)+(VK_EXT_transform_feedback)": [ { "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-02966", - "text": " If flags includes VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, then all stages must not specify Xfb execution mode" + "text": " If the pipeline is being created with pre-rasterization shader state and flags includes VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, then all stages must not specify Xfb execution mode" } ], "(VK_EXT_pipeline_creation_cache_control)": [ @@ -6990,6 +9416,124 @@ "vuid": "VUID-VkGraphicsPipelineCreateInfo-pipelineCreationCacheControl-02878", "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT" } + ], + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04494", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, VkPipelineFragmentShadingRateStateCreateInfoKHR::fragmentSize.width must be greater than or equal to 1" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04495", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, VkPipelineFragmentShadingRateStateCreateInfoKHR::fragmentSize.height must be greater than or equal to 1" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04496", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, VkPipelineFragmentShadingRateStateCreateInfoKHR::fragmentSize.width must be a power-of-two value" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04497", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, VkPipelineFragmentShadingRateStateCreateInfoKHR::fragmentSize.height must be a power-of-two value" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04498", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, VkPipelineFragmentShadingRateStateCreateInfoKHR::fragmentSize.width must be less than or equal to 4" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04499", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, VkPipelineFragmentShadingRateStateCreateInfoKHR::fragmentSize.height must be less than or equal to 4" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04500", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, and the pipelineFragmentShadingRate feature is not enabled, VkPipelineFragmentShadingRateStateCreateInfoKHR::fragmentSize.width and VkPipelineFragmentShadingRateStateCreateInfoKHR::fragmentSize.height must both be equal to 1" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04501", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, and the primitiveFragmentShadingRate feature is not enabled, VkPipelineFragmentShadingRateStateCreateInfoKHR::combinerOps[0] must be VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04502", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, and the attachmentFragmentShadingRate feature is not enabled, VkPipelineFragmentShadingRateStateCreateInfoKHR::combinerOps[1] must be VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-primitiveFragmentShadingRateWithMultipleViewports-04504", + "text": " If the pipeline is being created with pre-rasterization shader state and the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, and entry points specified in pStages write to the ViewportIndex built-in, they must not also write to the PrimitiveShadingRateKHR built-in" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-fragmentShadingRateNonTrivialCombinerOps-04506", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state, the fragmentShadingRateNonTrivialCombinerOps limit is not supported, and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, elements of VkPipelineFragmentShadingRateStateCreateInfoKHR::combinerOps must be VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR or VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR" + } + ], + "(VK_KHR_fragment_shading_rate)+(VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-primitiveFragmentShadingRateWithMultipleViewports-04503", + "text": " If the pipeline is being created with pre-rasterization shader state and the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT is not included in pDynamicState->pDynamicStates, and VkPipelineViewportStateCreateInfo::viewportCount is greater than 1, entry points specified in pStages must not write to the PrimitiveShadingRateKHR built-in" + } + ], + "(VK_KHR_fragment_shading_rate)+(VK_NV_viewport_array2)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-primitiveFragmentShadingRateWithMultipleViewports-04505", + "text": " If the pipeline is being created with pre-rasterization shader state and the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, and entry points specified in pStages write to the ViewportMaskNV built-in, they must not also write to the PrimitiveShadingRateKHR built-in" + } + ], + "(VK_NV_fragment_shading_rate_enums)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04569", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state, and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, and the fragmentShadingRateEnums feature is not enabled, VkPipelineFragmentShadingRateEnumStateCreateInfoNV::shadingRateType must be equal to VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV." + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04570", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state, and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, and the pipelineFragmentShadingRate feature is not enabled, VkPipelineFragmentShadingRateEnumStateCreateInfoNV::shadingRate must be equal to VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV." + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04571", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state, and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, and the primitiveFragmentShadingRate feature is not enabled, VkPipelineFragmentShadingRateEnumStateCreateInfoNV::combinerOps[0] must be VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-04572", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state, and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, and the attachmentFragmentShadingRate feature is not enabled, VkPipelineFragmentShadingRateEnumStateCreateInfoNV::combinerOps[1] must be VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-fragmentShadingRateNonTrivialCombinerOps-04573", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state, and the fragmentShadingRateNonTrivialCombinerOps limit is not supported and VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR is not included in pDynamicState->pDynamicStates, elements of VkPipelineFragmentShadingRateEnumStateCreateInfoNV::combinerOps must be VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR or VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-None-04574", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state, and the supersampleFragmentShadingRates feature is not enabled, VkPipelineFragmentShadingRateEnumStateCreateInfoNV::shadingRate must not be equal to VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV, VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV, VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV, or VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV." + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-None-04575", + "text": " If the pipeline is being created with pre-rasterization shader state or fragment shader state, and the noInvocationFragmentShadingRates feature is not enabled, VkPipelineFragmentShadingRateEnumStateCreateInfoNV::shadingRate must not be equal to VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV." + } + ], + "(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04807", + "text": " If the pipeline is being created with pre-rasterization shader state and the vertexInputDynamicState feature is not enabled, there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_VERTEX_INPUT_EXT" + } + ], + "(VK_EXT_color_write_enable)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04800", + "text": " If the colorWriteEnable feature is not enabled, there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT" + } + ], + "(VK_QCOM_render_pass_shader_resolve)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizationSamples-04899", + "text": " If the pipeline is being created with fragment shader state, and the VK_QCOM_render_pass_shader_resolve extension is enabled, and if subpass has any input attachments, and if the subpass description contains VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, then the sample count of the input attachments must equal rasterizationSamples" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-sampleShadingEnable-04900", + "text": " If the pipeline is being created with fragment shader state, and the VK_QCOM_render_pass_shader_resolve extension is enabled, and if the subpass description contains VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, then sampleShadingEnable must be false" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-04901", + "text": " If flags includes VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM, then the subpass must be the last subpass in a subpass dependency chain" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-04902", + "text": " If flags includes VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM, and if pResolveAttachments is not NULL, then each resolve attachment must be VK_ATTACHMENT_UNUSED" + } ] }, "VkPipelineDynamicStateCreateInfo": { @@ -7036,11 +9580,11 @@ }, { "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-pPipelines-02886", - "text": " Each element of the pPipelines member of libraries must have been created with identical state to the pipeline currently created except the state that can be overriden by VkGraphicsShaderGroupCreateInfoNV" + "text": " Each element of pPipelines must have been created with identical state to the pipeline currently created except the state that can be overridden by VkGraphicsShaderGroupCreateInfoNV" }, { "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-deviceGeneratedCommands-02887", - "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" }, { "vuid": "VUID-VkGraphicsPipelineShaderGroupsCreateInfoNV-sType-sType", @@ -7112,6 +9656,736 @@ } ] }, + "vkCreateRayTracingPipelinesNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-03415", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-03416", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parameter", + "text": " If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-pCreateInfos-parameter", + "text": " pCreateInfos must be a valid pointer to an array of createInfoCount valid VkRayTracingPipelineCreateInfoNV structures" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-pPipelines-parameter", + "text": " pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-createInfoCount-arraylength", + "text": " createInfoCount must be greater than 0" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parent", + "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-03816", + "text": " flags must not contain the VK_PIPELINE_CREATE_DISPATCH_BASE flag" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-02903", + "text": " If pipelineCache was created with VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT, host access to pipelineCache must be externally synchronized" + } + ] + }, + "vkCreateRayTracingPipelinesKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-flags-03415", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-flags-03416", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-deferredOperation-03677", + "text": " If deferredOperation is not VK_NULL_HANDLE, it must be a valid VkDeferredOperationKHR object" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-deferredOperation-03678", + "text": " Any previous deferred operation that was associated with deferredOperation must be complete" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-rayTracingPipeline-03586", + "text": " The rayTracingPipeline feature must be enabled" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-deferredOperation-parameter", + "text": " If deferredOperation is not VK_NULL_HANDLE, deferredOperation must be a valid VkDeferredOperationKHR handle" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-parameter", + "text": " If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pCreateInfos-parameter", + "text": " pCreateInfos must be a valid pointer to an array of createInfoCount valid VkRayTracingPipelineCreateInfoKHR structures" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pPipelines-parameter", + "text": " pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-createInfoCount-arraylength", + "text": " createInfoCount must be greater than 0" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-deferredOperation-parent", + "text": " If deferredOperation is a valid handle, it must have been created, allocated, or retrieved from device" + }, + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-parent", + "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-flags-03816", + "text": " flags must not contain the VK_PIPELINE_CREATE_DISPATCH_BASE flag" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-02903", + "text": " If pipelineCache was created with VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT, host access to pipelineCache must be externally synchronized" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_EXT_pipeline_creation_cache_control+VK_KHR_deferred_host_operations)": [ + { + "vuid": "VUID-vkCreateRayTracingPipelinesKHR-deferredOperation-03587", + "text": " If deferredOperation is not VK_NULL_HANDLE, the flags member of elements of pCreateInfos must not include VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT" + } + ] + }, + "VkRayTracingPipelineCreateInfoNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03421", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a ray tracing VkPipeline" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03422", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command’s pCreateInfos parameter" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03423", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03424", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pStages-03426", + "text": " The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-03427", + "text": " layout must be consistent with all shaders specified in pStages" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-03428", + "text": " The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-stage-03425", + "text": " The stage member of at least one element of pStages must be VK_SHADER_STAGE_RAYGEN_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-maxRecursionDepth-03457", + "text": " maxRecursionDepth must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxRecursionDepth" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkPipelineCreationFeedbackCreateInfoEXT" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-parameter", + "text": " flags must be a valid combination of VkPipelineCreateFlagBits values" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pStages-parameter", + "text": " pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pGroups-parameter", + "text": " pGroups must be a valid pointer to an array of groupCount valid VkRayTracingShaderGroupCreateInfoNV structures" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-parameter", + "text": " layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-stageCount-arraylength", + "text": " stageCount must be greater than 0" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-groupCount-arraylength", + "text": " groupCount must be greater than 0" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-commonparent", + "text": " Both of basePipelineHandle, and layout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02904", + "text": " flags must not include VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pipelineCreationCacheControl-02905", + "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02957", + "text": " flags must not include both VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV and VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT at the same time" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_KHR_pipeline_library)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03456", + "text": " flags must not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03458", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03459", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03460", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03461", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03462", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03463", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03588", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR" + } + ] + }, + "VkRayTracingPipelineCreateInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03421", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a ray tracing VkPipeline" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03422", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command’s pCreateInfos parameter" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03423", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03424", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pStages-03426", + "text": " The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-layout-03427", + "text": " layout must be consistent with all shaders specified in pStages" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-layout-03428", + "text": " The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-stage-03425", + "text": " If flags does not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, the stage member of at least one element of pStages, including those implicitly added by pLibraryInfo, must be VK_SHADER_STAGE_RAYGEN_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-maxPipelineRayRecursionDepth-03589", + "text": " maxPipelineRayRecursionDepth must be less than or equal to VkPhysicalDeviceRayTracingPipelinePropertiesKHR::maxRayRecursionDepth" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03595", + "text": " If the VK_KHR_pipeline_library extension is not enabled, pLibraryInfo and pLibraryInterface must be NULL." + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03470", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, for any element of pGroups with a type of VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR or VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, the anyHitShader of that element must not be VK_SHADER_UNUSED_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03471", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, for any element of pGroups with a type of VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR or VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, the closestHitShader of that element must not be VK_SHADER_UNUSED_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-rayTraversalPrimitiveCulling-03596", + "text": " If the rayTraversalPrimitiveCulling feature is not enabled, flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-rayTraversalPrimitiveCulling-03597", + "text": " If the rayTraversalPrimitiveCulling feature is not enabled, flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03598", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, rayTracingPipelineShaderGroupHandleCaptureReplay must be enabled" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-rayTracingPipelineShaderGroupHandleCaptureReplay-03599", + "text": " If VkPhysicalDeviceRayTracingPipelineFeaturesKHR::rayTracingPipelineShaderGroupHandleCaptureReplay is VK_TRUE and the pShaderGroupCaptureReplayHandle member of any element of pGroups is not NULL, flags must include VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03600", + "text": " If pLibraryInfo is not NULL and its libraryCount is 0, stageCount must not be 0" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03601", + "text": " If pLibraryInfo is not NULL and its libraryCount is 0, groupCount must not be 0" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pDynamicStates-03602", + "text": " Any element of the pDynamicStates member of pDynamicState must be VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkPipelineCreationFeedbackCreateInfoEXT" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkPipelineCreateFlagBits values" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pStages-parameter", + "text": " If stageCount is not 0, pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pGroups-parameter", + "text": " If groupCount is not 0, pGroups must be a valid pointer to an array of groupCount valid VkRayTracingShaderGroupCreateInfoKHR structures" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-parameter", + "text": " If pLibraryInfo is not NULL, pLibraryInfo must be a valid pointer to a valid VkPipelineLibraryCreateInfoKHR structure" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInterface-parameter", + "text": " If pLibraryInterface is not NULL, pLibraryInterface must be a valid pointer to a valid VkRayTracingPipelineInterfaceCreateInfoKHR structure" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pDynamicState-parameter", + "text": " If pDynamicState is not NULL, pDynamicState must be a valid pointer to a valid VkPipelineDynamicStateCreateInfo structure" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-layout-parameter", + "text": " layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-commonparent", + "text": " Both of basePipelineHandle, and layout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_NV_device_generated_commands)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-02904", + "text": " flags must not include VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_EXT_pipeline_creation_cache_control)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pipelineCreationCacheControl-02905", + "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_KHR_pipeline_library)": [ + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03465", + "text": " If flags includes VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, pLibraryInterface must not be NULL" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03590", + "text": " If pLibraryInfo is not NULL and its libraryCount member is greater than 0, its pLibraryInterface member must not be NULL" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraries-03591", + "text": " Each element of pLibraryInfo->pLibraries must have been created with the value of maxPipelineRayRecursionDepth equal to that in this pipeline" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03592", + "text": " If pLibraryInfo is not NULL, each element of its pLibraries member must have been created with a layout that is compatible with the layout in this pipeline" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03593", + "text": " If pLibraryInfo is not NULL, each element of its pLibraries member must have been created with values of the maxPipelineRayPayloadSize and maxPipelineRayHitAttributeSize members of pLibraryInterface equal to those in this pipeline" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03594", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, each element of pLibraryInfo->pLibraries must have been created with the VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR bit set" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-04718", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, each element of pLibraryInfo->pLibraries must have been created with the VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR bit set" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-04719", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, each element of pLibraryInfo->pLibraries must have been created with the VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR bit set" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-04720", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, each element of pLibraryInfo->pLibraries must have been created with the VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR bit set" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-04721", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, each element of pLibraryInfo->pLibraries must have been created with the VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR bit set" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-04722", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, each element of pLibraryInfo->pLibraries must have been created with the VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR bit set" + }, + { + "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-04723", + "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, each element of pLibraryInfo->pLibraries must have been created with the VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR bit set" + } + ] + }, + "VkRayTracingShaderGroupCreateInfoNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02413", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV then generalShader must be a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_RAYGEN_BIT_NV, VK_SHADER_STAGE_MISS_BIT_NV, or VK_SHADER_STAGE_CALLABLE_BIT_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02414", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV then closestHitShader, anyHitShader, and intersectionShader must be VK_SHADER_UNUSED_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02415", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV then intersectionShader must be a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_INTERSECTION_BIT_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02416", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV then intersectionShader must be VK_SHADER_UNUSED_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-closestHitShader-02417", + "text": " closestHitShader must be either VK_SHADER_UNUSED_NV or a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-anyHitShader-02418", + "text": " anyHitShader must be either VK_SHADER_UNUSED_NV or a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_ANY_HIT_BIT_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter", + "text": " type must be a valid VkRayTracingShaderGroupTypeKHR value" + } + ] + }, + "VkRayTracingShaderGroupCreateInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03474", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR then generalShader must be a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_RAYGEN_BIT_KHR, VK_SHADER_STAGE_MISS_BIT_KHR, or VK_SHADER_STAGE_CALLABLE_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03475", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR then closestHitShader, anyHitShader, and intersectionShader must be VK_SHADER_UNUSED_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03476", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR then intersectionShader must be a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_INTERSECTION_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03477", + "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR then intersectionShader must be VK_SHADER_UNUSED_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-closestHitShader-03478", + "text": " closestHitShader must be either VK_SHADER_UNUSED_KHR or a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-anyHitShader-03479", + "text": " anyHitShader must be either VK_SHADER_UNUSED_KHR or a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_ANY_HIT_BIT_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-rayTracingPipelineShaderGroupHandleCaptureReplayMixed-03603", + "text": " If VkPhysicalDeviceRayTracingPipelineFeaturesKHR::rayTracingPipelineShaderGroupHandleCaptureReplayMixed is VK_FALSE then pShaderGroupCaptureReplayHandle must not be provided if it has not been provided on a previous call to ray tracing pipeline creation" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-rayTracingPipelineShaderGroupHandleCaptureReplayMixed-03604", + "text": " If VkPhysicalDeviceRayTracingPipelineFeaturesKHR::rayTracingPipelineShaderGroupHandleCaptureReplayMixed is VK_FALSE then the caller must guarantee that no ray tracing pipeline creation commands with pShaderGroupCaptureReplayHandle provided execute simultaneously with ray tracing pipeline creation commands without pShaderGroupCaptureReplayHandle provided" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-parameter", + "text": " type must be a valid VkRayTracingShaderGroupTypeKHR value" + } + ] + }, + "VkRayTracingPipelineInterfaceCreateInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-VkRayTracingPipelineInterfaceCreateInfoKHR-maxPipelineRayHitAttributeSize-03605", + "text": " maxPipelineRayHitAttributeSize must be less than or equal to VkPhysicalDeviceRayTracingPipelinePropertiesKHR::maxRayHitAttributeSize" + }, + { + "vuid": "VUID-VkRayTracingPipelineInterfaceCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkRayTracingPipelineInterfaceCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetRayTracingShaderGroupHandlesKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-04619", + "text": " pipeline must be a ray tracing pipeline" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-firstGroup-04050", + "text": " firstGroup must be less than the number of shader groups in pipeline" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-firstGroup-02419", + "text": " The sum of firstGroup and groupCount must be less than or equal to the number of shader groups in pipeline" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-dataSize-02420", + "text": " dataSize must be at least VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupHandleSize {times} groupCount" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pData-parameter", + "text": " pData must be a valid pointer to an array of dataSize bytes" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-dataSize-arraylength", + "text": " dataSize must be greater than 0" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-parent", + "text": " pipeline must have been created, allocated, or retrieved from device" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_pipeline_library)": [ + { + "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-03482", + "text": " pipeline must have not been created with VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" + } + ] + }, + "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-04620", + "text": " pipeline must be a ray tracing pipeline" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-firstGroup-04051", + "text": " firstGroup must be less than the number of shader groups in pipeline" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-firstGroup-03483", + "text": " The sum of firstGroup and groupCount must be less than or equal to the number of shader groups in pipeline" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-dataSize-03484", + "text": " dataSize must be at least VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupHandleCaptureReplaySize {times} groupCount" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-rayTracingPipelineShaderGroupHandleCaptureReplay-03606", + "text": " VkPhysicalDeviceRayTracingPipelineFeaturesKHR::rayTracingPipelineShaderGroupHandleCaptureReplay must be enabled to call this function" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-03607", + "text": " pipeline must have been created with a flags that included VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pData-parameter", + "text": " pData must be a valid pointer to an array of dataSize bytes" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-dataSize-arraylength", + "text": " dataSize must be greater than 0" + }, + { + "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-parent", + "text": " pipeline must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCompileDeferredNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkCompileDeferredNV-pipeline-04621", + "text": " pipeline must be a ray tracing pipeline" + }, + { + "vuid": "VUID-vkCompileDeferredNV-pipeline-02237", + "text": " pipeline must have been created with VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV" + }, + { + "vuid": "VUID-vkCompileDeferredNV-shader-02238", + "text": " shader must not have been called as a deferred compile before" + }, + { + "vuid": "VUID-vkCompileDeferredNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCompileDeferredNV-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkCompileDeferredNV-pipeline-parent", + "text": " pipeline must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetRayTracingShaderGroupStackSizeKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-vkGetRayTracingShaderGroupStackSizeKHR-pipeline-04622", + "text": " pipeline must be a ray tracing pipeline" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupStackSizeKHR-group-03608", + "text": " The value of group must be less than the number of shader groups in pipeline" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupStackSizeKHR-groupShader-03609", + "text": " The shader identified by groupShader in group must not be VK_SHADER_UNUSED_KHR" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupStackSizeKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupStackSizeKHR-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupStackSizeKHR-groupShader-parameter", + "text": " groupShader must be a valid VkShaderGroupShaderKHR value" + }, + { + "vuid": "VUID-vkGetRayTracingShaderGroupStackSizeKHR-pipeline-parent", + "text": " pipeline must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCmdSetRayTracingPipelineStackSizeKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-vkCmdSetRayTracingPipelineStackSizeKHR-pipelineStackSize-03610", + "text": " pipelineStackSize must be large enough for any dynamic execution through the shaders in the ray tracing pipeline used by a subsequent trace call" + }, + { + "vuid": "VUID-vkCmdSetRayTracingPipelineStackSizeKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetRayTracingPipelineStackSizeKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetRayTracingPipelineStackSizeKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdSetRayTracingPipelineStackSizeKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, "vkDestroyPipeline": { "core": [ { @@ -7292,6 +10566,10 @@ "vuid": "VUID-VkSpecializationInfo-pMapEntries-00774", "text": " The size member of each element of pMapEntries must be less than or equal to dataSize minus offset" }, + { + "vuid": "VUID-VkSpecializationInfo-constantID-04911", + "text": " The constantID value of each element of pMapEntries must be unique within pMapEntries" + }, { "vuid": "VUID-VkSpecializationInfo-pMapEntries-parameter", "text": " If mapEntryCount is not 0, pMapEntries must be a valid pointer to an array of mapEntryCount valid VkSpecializationMapEntry structures" @@ -7389,20 +10667,38 @@ "text": " This command must not be recorded when transform feedback is active" } ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ { "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-02391", "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, the VkCommandPool that commandBuffer was allocated from must support compute operations" }, { "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-02392", - "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, the pipeline must be a ray tracing pipeline" + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, pipeline must be a ray tracing pipeline" } ], "(VK_KHR_pipeline_library)": [ { "vuid": "VUID-vkCmdBindPipeline-pipeline-03382", - "text": " The pipeline must not have been created with VK_PIPELINE_CREATE_LIBRARY_BIT_KHR set" + "text": " pipeline must not have been created with VK_PIPELINE_CREATE_LIBRARY_BIT_KHR set" + } + ], + "(VK_NV_inherited_viewport_scissor)": [ + { + "vuid": "VUID-vkCmdBindPipeline-commandBuffer-04808", + "text": " If commandBuffer is a secondary command buffer with VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled and pipelineBindPoint is VK_PIPELINE_BIND_POINT_GRAPHICS, then the pipeline must have been created with VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT or VK_DYNAMIC_STATE_VIEWPORT, and VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT or VK_DYNAMIC_STATE_SCISSOR enabled." + } + ], + "(VK_NV_inherited_viewport_scissor,VK_EXT_discard_rectangles)": [ + { + "vuid": "VUID-vkCmdBindPipeline-commandBuffer-04809", + "text": " If commandBuffer is a secondary command buffer with VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled and pipelineBindPoint is VK_PIPELINE_BIND_POINT_GRAPHICS and pipeline was created with VkPipelineDiscardRectangleStateCreateInfoEXT structure and its discardRectangleCount member is not 0, then the pipeline must have been created with VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT enabled." + } + ], + "(VK_EXT_provoking_vertex)": [ + { + "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-04881", + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_GRAPHICS and the provokingVertexModePerPipeline limit is VK_FALSE, then pipeline’s VkPipelineRasterizationProvokingVertexStateCreateInfoEXT::provokingVertexMode must be the same as that of any other pipelines previously bound to this bind point within the current renderpass instance, including any pipeline already bound when beginning the renderpass instance" } ] }, @@ -7422,7 +10718,7 @@ }, { "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-deviceGeneratedCommands-02896", - "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" }, { "vuid": "VUID-vkCmdBindPipelineShaderGroupNV-commandBuffer-parameter", @@ -7518,7 +10814,7 @@ }, { "vuid": "VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274", - "text": " pipeline member of pExecutableInfo must have been created with VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR set in the flags field of VkGraphicsPipelineCreateInfo or VkComputePipelineCreateInfo" + "text": " pipeline member of pExecutableInfo must have been created with VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR" }, { "vuid": "VUID-vkGetPipelineExecutableStatisticsKHR-device-parameter", @@ -7542,7 +10838,7 @@ "(VK_KHR_pipeline_executable_properties)": [ { "vuid": "VUID-VkPipelineExecutableInfoKHR-executableIndex-03275", - "text": " executableIndex must be less than the number of executables associated with pipeline as returned in the pExecutableCount parameter of vkGetPipelineExecutablePropertiesKHR" + "text": " executableIndex must be less than the number of pipeline executables associated with pipeline as returned in the pExecutableCount parameter of vkGetPipelineExecutablePropertiesKHR" }, { "vuid": "VUID-VkPipelineExecutableInfoKHR-sType-sType", @@ -7582,7 +10878,7 @@ }, { "vuid": "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278", - "text": " pipeline member of pExecutableInfo must have been created with VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR set in the flags field of VkGraphicsPipelineCreateInfo or VkComputePipelineCreateInfo" + "text": " pipeline member of pExecutableInfo must have been created with VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR" }, { "vuid": "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-device-parameter", @@ -7658,582 +10954,6 @@ } ] }, - "vkCreateRayTracingPipelinesNV": { - "core": [ - { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-03415", - "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-03416", - "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set" - } - ], - "(VK_EXT_pipeline_creation_cache_control)": [ - { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-02903", - "text": " If pipelineCache was created with VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT, host access to pipelineCache must be externally synchronized" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parameter", - "text": " If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-pCreateInfos-parameter", - "text": " pCreateInfos must be a valid pointer to an array of createInfoCount valid VkRayTracingPipelineCreateInfoNV structures" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-pAllocator-parameter", - "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-pPipelines-parameter", - "text": " pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-createInfoCount-arraylength", - "text": " createInfoCount must be greater than 0" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parent", - "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" - } - ] - }, - "vkCreateRayTracingPipelinesKHR": { - "core": [ - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-flags-03415", - "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-flags-03416", - "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-rayTracing-03455", - "text": " The rayTracing feature must be enabled" - } - ], - "(VK_EXT_pipeline_creation_cache_control)": [ - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-02903", - "text": " If pipelineCache was created with VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT, host access to pipelineCache must be externally synchronized" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-parameter", - "text": " If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pCreateInfos-parameter", - "text": " pCreateInfos must be a valid pointer to an array of createInfoCount valid VkRayTracingPipelineCreateInfoKHR structures" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pAllocator-parameter", - "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pPipelines-parameter", - "text": " pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-createInfoCount-arraylength", - "text": " createInfoCount must be greater than 0" - }, - { - "vuid": "VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-parent", - "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" - } - ] - }, - "VkRayTracingPipelineCreateInfoNV": { - "core": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03421", - "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a ray tracing VkPipeline" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03422", - "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command’s pCreateInfos parameter" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03423", - "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03424", - "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-stage-03425", - "text": " The stage member of at least one element of pStages must be VK_SHADER_STAGE_RAYGEN_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pStages-03426", - "text": " The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-03427", - "text": " layout must be consistent with all shaders specified in pStages" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-03428", - "text": " The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-maxRecursionDepth-03457", - "text": " maxRecursionDepth must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxRecursionDepth" - } - ], - "(VK_NV_device_generated_commands)": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02904", - "text": " flags must not include VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" - } - ], - "(VK_EXT_pipeline_creation_cache_control)": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pipelineCreationCacheControl-02905", - "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02957", - "text": " flags must not include both VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV and VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT at the same time" - } - ], - "(VK_KHR_pipeline_library)": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03456", - "text": " flags must not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" - } - ], - "(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03458", - "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03459", - "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03460", - "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03461", - "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03462", - "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-03463", - "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkPipelineCreationFeedbackCreateInfoEXT" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-sType-unique", - "text": " The sType value of each struct in the pNext chain must be unique" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-parameter", - "text": " flags must be a valid combination of VkPipelineCreateFlagBits values" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pStages-parameter", - "text": " pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pGroups-parameter", - "text": " pGroups must be a valid pointer to an array of groupCount valid VkRayTracingShaderGroupCreateInfoNV structures" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-parameter", - "text": " layout must be a valid VkPipelineLayout handle" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-stageCount-arraylength", - "text": " stageCount must be greater than 0" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-groupCount-arraylength", - "text": " groupCount must be greater than 0" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-commonparent", - "text": " Both of basePipelineHandle, and layout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" - } - ] - }, - "VkRayTracingPipelineCreateInfoKHR": { - "core": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03421", - "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a ray tracing VkPipeline" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03422", - "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command’s pCreateInfos parameter" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03423", - "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03424", - "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-stage-03425", - "text": " The stage member of at least one element of pStages must be VK_SHADER_STAGE_RAYGEN_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pStages-03426", - "text": " The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-layout-03427", - "text": " layout must be consistent with all shaders specified in pStages" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-layout-03428", - "text": " The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-maxRecursionDepth-03464", - "text": " maxRecursionDepth must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxRecursionDepth" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03470", - "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, for any element of pGroups with a type of VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR or VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, the anyHitShader of that element must not be VK_SHADER_UNUSED_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03471", - "text": " If flags includes VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, for any element of pGroups with a type of VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR or VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, the closestHitShader of that element must not be VK_SHADER_UNUSED_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-rayTracingPrimitiveCulling-03472", - "text": " If the rayTracingPrimitiveCulling feature is not enabled, flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-rayTracingPrimitiveCulling-03473", - "text": " If the rayTracingPrimitiveCulling feature is not enabled, flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-libraries-02958", - "text": " If libraries.libraryCount is zero, then stageCount must not be zero" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-libraries-02959", - "text": " If libraries.libraryCount is zero, then groupCount must not be zero" - } - ], - "(VK_NV_device_generated_commands)": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-02904", - "text": " flags must not include VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" - } - ], - "(VK_EXT_pipeline_creation_cache_control)": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pipelineCreationCacheControl-02905", - "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT" - } - ], - "(VK_KHR_pipeline_library)": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-03465", - "text": " If flags includes VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, pLibraryInterface must not be NULL" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-libraryCount-03466", - "text": " If the libraryCount member of libraries is greater than 0, pLibraryInterface must not be NULL" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraries-03467", - "text": " Each element of the pLibraries member of libraries must have been created with the value of maxRecursionDepth equal to that in this pipeline" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraries-03468", - "text": " Each element of the pLibraries member of libraries must have been created with a layout that is compatible with the layout in this pipeline" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraries-03469", - "text": " Each element of the pLibraries member of libraries must have been created with values of the maxPayloadSize, maxAttributeSize, and maxCallableSize members of pLibraryInterface equal to those in this pipeline" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeferredOperationInfoKHR or VkPipelineCreationFeedbackCreateInfoEXT" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-sType-unique", - "text": " The sType value of each struct in the pNext chain must be unique" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-flags-parameter", - "text": " flags must be a valid combination of VkPipelineCreateFlagBits values" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pStages-parameter", - "text": " If stageCount is not 0, pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pGroups-parameter", - "text": " If groupCount is not 0, pGroups must be a valid pointer to an array of groupCount valid VkRayTracingShaderGroupCreateInfoKHR structures" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-libraries-parameter", - "text": " libraries must be a valid VkPipelineLibraryCreateInfoKHR structure" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInterface-parameter", - "text": " If pLibraryInterface is not NULL, pLibraryInterface must be a valid pointer to a valid VkRayTracingPipelineInterfaceCreateInfoKHR structure" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-layout-parameter", - "text": " layout must be a valid VkPipelineLayout handle" - }, - { - "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-commonparent", - "text": " Both of basePipelineHandle, and layout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" - } - ] - }, - "VkRayTracingShaderGroupCreateInfoNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02413", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV then generalShader must be a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_RAYGEN_BIT_NV, VK_SHADER_STAGE_MISS_BIT_NV, or VK_SHADER_STAGE_CALLABLE_BIT_NV" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02414", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV then closestHitShader, anyHitShader, and intersectionShader must be VK_SHADER_UNUSED_NV" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02415", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV then intersectionShader must be a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_INTERSECTION_BIT_NV" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02416", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV then intersectionShader must be VK_SHADER_UNUSED_NV" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-closestHitShader-02417", - "text": " closestHitShader must be either VK_SHADER_UNUSED_NV or a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-anyHitShader-02418", - "text": " anyHitShader must be either VK_SHADER_UNUSED_NV or a valid index into VkRayTracingPipelineCreateInfoNV::pStages referring to a shader of VK_SHADER_STAGE_ANY_HIT_BIT_NV" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter", - "text": " type must be a valid VkRayTracingShaderGroupTypeKHR value" - } - ] - }, - "VkRayTracingShaderGroupCreateInfoKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03474", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR then generalShader must be a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_RAYGEN_BIT_KHR, VK_SHADER_STAGE_MISS_BIT_KHR, or VK_SHADER_STAGE_CALLABLE_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03475", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR then closestHitShader, anyHitShader, and intersectionShader must be VK_SHADER_UNUSED_KHR" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03476", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR then intersectionShader must be a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_INTERSECTION_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03477", - "text": " If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR then intersectionShader must be VK_SHADER_UNUSED_KHR" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-closestHitShader-03478", - "text": " closestHitShader must be either VK_SHADER_UNUSED_KHR or a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-anyHitShader-03479", - "text": " anyHitShader must be either VK_SHADER_UNUSED_KHR or a valid index into VkRayTracingPipelineCreateInfoKHR::pStages referring to a shader of VK_SHADER_STAGE_ANY_HIT_BIT_KHR" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-rayTracingShaderGroupHandleCaptureReplayMixed-03480", - "text": " If VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingShaderGroupHandleCaptureReplayMixed is VK_FALSE then pShaderGroupCaptureReplayHandle must not be provided if it has not been provided on a previous call to ray tracing pipeline creation" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-rayTracingShaderGroupHandleCaptureReplayMixed-03481", - "text": " If VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingShaderGroupHandleCaptureReplayMixed is VK_FALSE then the caller must guarantee that no ray tracing pipeline creation commands with pShaderGroupCaptureReplayHandle provided execute simultaneously with ray tracing pipeline creation commands without pShaderGroupCaptureReplayHandle provided" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkRayTracingShaderGroupCreateInfoKHR-type-parameter", - "text": " type must be a valid VkRayTracingShaderGroupTypeKHR value" - } - ] - }, - "VkRayTracingPipelineInterfaceCreateInfoKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkRayTracingPipelineInterfaceCreateInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR" - }, - { - "vuid": "VUID-VkRayTracingPipelineInterfaceCreateInfoKHR-pNext-pNext", - "text": " pNext must be NULL" - } - ] - }, - "vkGetRayTracingShaderGroupHandlesKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-firstGroup-04050", - "text": " firstGroup must be less than the number of shader groups in pipeline" - }, - { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-firstGroup-02419", - "text": " The sum of firstGroup and groupCount must be less than or equal to the number of shader groups in pipeline" - }, - { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-dataSize-02420", - "text": " dataSize must be at least VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize {times} groupCount" - }, - { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-parameter", - "text": " pipeline must be a valid VkPipeline handle" - }, - { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pData-parameter", - "text": " pData must be a valid pointer to an array of dataSize bytes" - }, - { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-dataSize-arraylength", - "text": " dataSize must be greater than 0" - }, - { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-parent", - "text": " pipeline must have been created, allocated, or retrieved from device" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_pipeline_library)": [ - { - "vuid": "VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-03482", - "text": " pipeline must have not been created with VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" - } - ] - }, - "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-firstGroup-04051", - "text": " firstGroup must be less than the number of shader groups in pipeline" - }, - { - "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-firstGroup-03483", - "text": " The sum of firstGroup and groupCount must be less than or equal to the number of shader groups in pipeline" - }, - { - "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-dataSize-03484", - "text": " dataSize must be at least VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleCaptureReplaySize {times} groupCount" - }, - { - "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-rayTracingShaderGroupHandleCaptureReplay-03485", - "text": " VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingShaderGroupHandleCaptureReplay must be enabled to call this function" - }, - { - "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-parameter", - "text": " pipeline must be a valid VkPipeline handle" - }, - { - "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pData-parameter", - "text": " pData must be a valid pointer to an array of dataSize bytes" - }, - { - "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-dataSize-arraylength", - "text": " dataSize must be greater than 0" - }, - { - "vuid": "VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-parent", - "text": " pipeline must have been created, allocated, or retrieved from device" - } - ] - }, - "vkCompileDeferredNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-vkCompileDeferredNV-pipeline-02237", - "text": " pipeline must have been created with VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV" - }, - { - "vuid": "VUID-vkCompileDeferredNV-shader-02238", - "text": " shader must not have been called as a deferred compile before" - }, - { - "vuid": "VUID-vkCompileDeferredNV-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkCompileDeferredNV-pipeline-parameter", - "text": " pipeline must be a valid VkPipeline handle" - }, - { - "vuid": "VUID-vkCompileDeferredNV-pipeline-parent", - "text": " pipeline must have been created, allocated, or retrieved from device" - } - ] - }, "VkPipelineCreationFeedbackCreateInfoEXT": { "(VK_EXT_pipeline_creation_feedback)": [ { @@ -8261,7 +10981,7 @@ "text": " pipelineStageCreationFeedbackCount must be greater than 0" } ], - "(VK_EXT_pipeline_creation_feedback)+(VK_KHR_ray_tracing)": [ + "(VK_EXT_pipeline_creation_feedback)+(VK_KHR_ray_tracing_pipeline)": [ { "vuid": "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670", "text": " When chained to VkRayTracingPipelineCreateInfoKHR, VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal VkRayTracingPipelineCreateInfoKHR::stageCount" @@ -8346,11 +11066,11 @@ "core": [ { "vuid": "VUID-vkAllocateMemory-pAllocateInfo-01713", - "text": " pAllocateInfo->allocationSize must be less than or equal to VkPhysicalDeviceMemoryProperties::memoryHeaps[memindex].size where memindex = VkPhysicalDeviceMemoryProperties::memoryTypes[pAllocateInfo->memoryTypeIndex].heapIndex as returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created from" + "text": " pAllocateInfo->allocationSize must be less than or equal to VkPhysicalDeviceMemoryProperties::memoryHeaps[memindex].size where memindex = VkPhysicalDeviceMemoryProperties::memoryTypes[pAllocateInfo->memoryTypeIndex].heapIndex as returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created from." }, { "vuid": "VUID-vkAllocateMemory-pAllocateInfo-01714", - "text": " pAllocateInfo->memoryTypeIndex must be less than VkPhysicalDeviceMemoryProperties::memoryTypeCount as returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created from" + "text": " pAllocateInfo->memoryTypeIndex must be less than VkPhysicalDeviceMemoryProperties::memoryTypeCount as returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created from." }, { "vuid": "VUID-vkAllocateMemory-maxMemoryAllocationCount-04101", @@ -8376,7 +11096,7 @@ "(VK_AMD_device_coherent_memory)": [ { "vuid": "VUID-vkAllocateMemory-deviceCoherentMemory-02790", - "text": " If the deviceCoherentMemory feature is not enabled, pAllocateInfo->memoryTypeIndex must not identify a memory type supporting VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD" + "text": " If the deviceCoherentMemory feature is not enabled, pAllocateInfo->memoryTypeIndex must not identify a memory type supporting VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD." } ] }, @@ -8408,7 +11128,7 @@ "(VK_KHR_external_memory_fd)": [ { "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-01742", - "text": " If the parameters define an import operation, the external handle specified was created by the Vulkan API, and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR, then the values of allocationSize and memoryTypeIndex must match those specified when the memory object being imported was created" + "text": " If the parameters define an import operation, the external handle specified was created by the Vulkan API, and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR, then the values of allocationSize and memoryTypeIndex must match those specified when the payload being imported was created." }, { "vuid": "VUID-VkMemoryAllocateInfo-memoryTypeIndex-00648", @@ -8418,11 +11138,11 @@ "(VK_KHR_external_memory+VK_KHR_device_group)": [ { "vuid": "VUID-VkMemoryAllocateInfo-None-00643", - "text": " If the parameters define an import operation and the external handle specified was created by the Vulkan API, the device mask specified by VkMemoryAllocateFlagsInfo must match that specified when the memory object being imported was allocated" + "text": " If the parameters define an import operation and the external handle specified was created by the Vulkan API, the device mask specified by VkMemoryAllocateFlagsInfo must match that specified when the payload being imported was allocated." }, { "vuid": "VUID-VkMemoryAllocateInfo-None-00644", - "text": " If the parameters define an import operation and the external handle specified was created by the Vulkan API, the list of physical devices that comprise the logical device passed to vkAllocateMemory must match the list of physical devices that comprise the logical device on which the memory was originally allocated" + "text": " If the parameters define an import operation and the external handle specified was created by the Vulkan API, the list of physical devices that comprise the logical device passed to vkAllocateMemory must match the list of physical devices that comprise the logical device on which the payload was originally allocated." } ], "(VK_KHR_external_memory_win32)": [ @@ -8432,15 +11152,11 @@ }, { "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-01743", - "text": " If the parameters define an import operation, the external handle was created by the Vulkan API, and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR or VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR, then the values of allocationSize and memoryTypeIndex must match those specified when the memory object being imported was created" - }, - { - "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-00646", - "text": " If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, allocationSize must match the size reported in the memory requirements of the image or buffer member of the VkDedicatedAllocationMemoryAllocateInfoNV structure included in the pNext chain" + "text": " If the parameters define an import operation, the external handle was created by the Vulkan API, and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR or VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR, then the values of allocationSize and memoryTypeIndex must match those specified when the payload being imported was created." }, { "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-00647", - "text": " If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, allocationSize must match the size specified when creating the Direct3D 12 heap from which the external handle was extracted" + "text": " If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, allocationSize must match the size specified when creating the Direct3D 12 heap from which the payload was extracted." } ], "(VK_VERSION_1_1)": [ @@ -8537,6 +11253,16 @@ "text": " If the pNext chain includes a VkImportMemoryHostPointerInfoEXT structure, VkMemoryOpaqueCaptureAddressAllocateInfo::opaqueCaptureAddress must be zero" } ], + "(VK_FUCHSIA_external_memory)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-None-04749", + "text": " If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA, the value of memoryTypeIndex must be an index identifying a memory type from the memoryTypeBits field of the VkMemoryZirconHandlePropertiesFUCHSIA structure populated by a call to vkGetMemoryZirconHandlePropertiesFUCHSIA." + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-04750", + "text": " If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA, the value of allocationSize must be greater than 0 and must be less than or equal to the size of the VMO as determined by zx_vmo_get_size(handle) where handle is the VMO handle to the imported external memory." + } + ], "core": [ { "vuid": "VUID-VkMemoryAllocateInfo-sType-sType", @@ -8544,7 +11270,7 @@ }, { "vuid": "VUID-VkMemoryAllocateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDedicatedAllocationMemoryAllocateInfoNV, VkExportMemoryAllocateInfo, VkExportMemoryAllocateInfoNV, VkExportMemoryWin32HandleInfoKHR, VkExportMemoryWin32HandleInfoNV, VkImportAndroidHardwareBufferInfoANDROID, VkImportMemoryFdInfoKHR, VkImportMemoryHostPointerInfoEXT, VkImportMemoryWin32HandleInfoKHR, VkImportMemoryWin32HandleInfoNV, VkMemoryAllocateFlagsInfo, VkMemoryDedicatedAllocateInfo, VkMemoryOpaqueCaptureAddressAllocateInfo, or VkMemoryPriorityAllocateInfoEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDedicatedAllocationMemoryAllocateInfoNV, VkExportMemoryAllocateInfo, VkExportMemoryAllocateInfoNV, VkExportMemoryWin32HandleInfoKHR, VkExportMemoryWin32HandleInfoNV, VkImportAndroidHardwareBufferInfoANDROID, VkImportMemoryFdInfoKHR, VkImportMemoryHostPointerInfoEXT, VkImportMemoryWin32HandleInfoKHR, VkImportMemoryWin32HandleInfoNV, VkImportMemoryZirconHandleInfoFUCHSIA, VkMemoryAllocateFlagsInfo, VkMemoryDedicatedAllocateInfo, VkMemoryOpaqueCaptureAddressAllocateInfo, or VkMemoryPriorityAllocateInfoEXT" }, { "vuid": "VUID-VkMemoryAllocateInfo-sType-unique", @@ -8628,6 +11354,16 @@ "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01797", "text": " If image is not VK_NULL_HANDLE, image must not have been created with VK_IMAGE_CREATE_DISJOINT_BIT set in VkImageCreateInfo::flags" } + ], + "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_FUCHSIA_external_memory)": [ + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-04751", + "text": " If image is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation with handle type VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA, the memory being imported must also be a dedicated image allocation and image must be identical to the image associated with the imported memory" + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-04752", + "text": " If buffer is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation with handle type VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA, the memory being imported must also be a dedicated buffer allocation and buffer must be identical to the buffer associated with the imported memory" + } ] }, "VkDedicatedAllocationMemoryAllocateInfoNV": { @@ -8708,6 +11444,18 @@ } ] }, + "VkExportMemoryAllocateInfoNV": { + "(VK_NV_external_memory)": [ + { + "vuid": "VUID-VkExportMemoryAllocateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV" + }, + { + "vuid": "VUID-VkExportMemoryAllocateInfoNV-handleTypes-parameter", + "text": " handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" + } + ] + }, "VkExportMemoryWin32HandleInfoKHR": { "(VK_KHR_external_memory_win32)": [ { @@ -8856,6 +11604,70 @@ } ] }, + "VkExportMemoryWin32HandleInfoNV": { + "(VK_NV_external_memory_win32)": [ + { + "vuid": "VUID-VkExportMemoryWin32HandleInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV" + }, + { + "vuid": "VUID-VkExportMemoryWin32HandleInfoNV-pAttributes-parameter", + "text": " If pAttributes is not NULL, pAttributes must be a valid pointer to a valid SECURITY_ATTRIBUTES value" + } + ] + }, + "VkImportMemoryWin32HandleInfoNV": { + "(VK_NV_external_memory_win32)": [ + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handleType-01327", + "text": " handleType must not have more than one bit set" + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handle-01328", + "text": " handle must be a valid handle to memory, obtained as specified by handleType" + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV" + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handleType-parameter", + "text": " handleType must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" + } + ] + }, + "vkGetMemoryWin32HandleNV": { + "(VK_NV_external_memory_win32)": [ + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-01326", + "text": " handleType must be a flag specified in VkExportMemoryAllocateInfoNV::handleTypes when allocating memory" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-parameter", + "text": " handleType must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-requiredbitmask", + "text": " handleType must not be 0" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-pHandle-parameter", + "text": " pHandle must be a valid pointer to a HANDLE value" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-memory-parent", + "text": " memory must have been created, allocated, or retrieved from device" + } + ] + }, "VkImportMemoryFdInfoKHR": { "(VK_KHR_external_memory_fd)": [ { @@ -8868,7 +11680,7 @@ }, { "vuid": "VUID-VkImportMemoryFdInfoKHR-handleType-00669", - "text": " If handleType is not 0, it must be defined as a POSIX file descriptor handle" + "text": " If handleType is not 0, it must be VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT" }, { "vuid": "VUID-VkImportMemoryFdInfoKHR-handleType-00670", @@ -8916,7 +11728,7 @@ }, { "vuid": "VUID-VkMemoryGetFdInfoKHR-handleType-00672", - "text": " handleType must be defined as a POSIX file descriptor handle" + "text": " handleType must be VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT" }, { "vuid": "VUID-VkMemoryGetFdInfoKHR-sType-sType", @@ -9112,7 +11924,7 @@ "(VK_ANDROID_external_memory_android_hardware_buffer)": [ { "vuid": "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884", - "text": " buffer must be a valid Android hardware buffer object with at least one of the AHARDWAREBUFFER_USAGE_GPU_* flags in its AHardwareBuffer_Desc::usage" + "text": " buffer must be a valid Android hardware buffer object with at least one of the AHARDWAREBUFFER_USAGE_GPU_* flags in its AHardwareBuffer_Desc::usage" }, { "vuid": "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-device-parameter", @@ -9152,79 +11964,103 @@ } ] }, - "VkExportMemoryAllocateInfoNV": { - "(VK_NV_external_memory)": [ + "VkImportMemoryZirconHandleInfoFUCHSIA": { + "(VK_FUCHSIA_external_memory)": [ { - "vuid": "VUID-VkExportMemoryAllocateInfoNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV" + "vuid": "VUID-VkImportMemoryZirconHandleInfoFUCHSIA-handleType-04771", + "text": " handleType must be VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA." }, { - "vuid": "VUID-VkExportMemoryAllocateInfoNV-handleTypes-parameter", - "text": " handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" + "vuid": "VUID-VkImportMemoryZirconHandleInfoFUCHSIA-handle-04772", + "text": " handle must be a valid VMO handle." + }, + { + "vuid": "VUID-VkImportMemoryZirconHandleInfoFUCHSIA-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA" + }, + { + "vuid": "VUID-VkImportMemoryZirconHandleInfoFUCHSIA-handleType-parameter", + "text": " If handleType is not 0, handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" } ] }, - "VkExportMemoryWin32HandleInfoNV": { - "(VK_NV_external_memory_win32)": [ + "vkGetMemoryZirconHandlePropertiesFUCHSIA": { + "(VK_FUCHSIA_external_memory)": [ { - "vuid": "VUID-VkExportMemoryWin32HandleInfoNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV" + "vuid": "VUID-vkGetMemoryZirconHandlePropertiesFUCHSIA-handleType-04773", + "text": " handleType must be VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA." }, { - "vuid": "VUID-VkExportMemoryWin32HandleInfoNV-pAttributes-parameter", - "text": " If pAttributes is not NULL, pAttributes must be a valid pointer to a valid SECURITY_ATTRIBUTES value" - } - ] - }, - "VkImportMemoryWin32HandleInfoNV": { - "(VK_NV_external_memory_win32)": [ - { - "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handleType-01327", - "text": " handleType must not have more than one bit set" + "vuid": "VUID-vkGetMemoryZirconHandlePropertiesFUCHSIA-zirconHandle-04774", + "text": " zirconHandle must reference a valid VMO." }, { - "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handle-01328", - "text": " handle must be a valid handle to memory, obtained as specified by handleType" - }, - { - "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV" - }, - { - "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handleType-parameter", - "text": " handleType must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" - } - ] - }, - "vkGetMemoryWin32HandleNV": { - "(VK_NV_external_memory_win32)": [ - { - "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-01326", - "text": " handleType must be a flag specified in VkExportMemoryAllocateInfoNV::handleTypes when allocating memory" - }, - { - "vuid": "VUID-vkGetMemoryWin32HandleNV-device-parameter", + "vuid": "VUID-vkGetMemoryZirconHandlePropertiesFUCHSIA-device-parameter", "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-vkGetMemoryWin32HandleNV-memory-parameter", + "vuid": "VUID-vkGetMemoryZirconHandlePropertiesFUCHSIA-handleType-parameter", + "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + }, + { + "vuid": "VUID-vkGetMemoryZirconHandlePropertiesFUCHSIA-pMemoryZirconHandleProperties-parameter", + "text": " pMemoryZirconHandleProperties must be a valid pointer to a VkMemoryZirconHandlePropertiesFUCHSIA structure" + } + ] + }, + "VkMemoryZirconHandlePropertiesFUCHSIA": { + "(VK_FUCHSIA_external_memory)": [ + { + "vuid": "VUID-VkMemoryZirconHandlePropertiesFUCHSIA-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA" + }, + { + "vuid": "VUID-VkMemoryZirconHandlePropertiesFUCHSIA-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetMemoryZirconHandleFUCHSIA": { + "(VK_FUCHSIA_external_memory)": [ + { + "vuid": "VUID-vkGetMemoryZirconHandleFUCHSIA-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetMemoryZirconHandleFUCHSIA-pGetZirconHandleInfo-parameter", + "text": " pGetZirconHandleInfo must be a valid pointer to a valid VkMemoryGetZirconHandleInfoFUCHSIA structure" + }, + { + "vuid": "VUID-vkGetMemoryZirconHandleFUCHSIA-pZirconHandle-parameter", + "text": " pZirconHandle must be a valid pointer to a zx_handle_t value" + } + ] + }, + "VkMemoryGetZirconHandleInfoFUCHSIA": { + "(VK_FUCHSIA_external_memory)": [ + { + "vuid": "VUID-VkMemoryGetZirconHandleInfoFUCHSIA-handleType-04775", + "text": " handleType must be VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA." + }, + { + "vuid": "VUID-VkMemoryGetZirconHandleInfoFUCHSIA-handleType-04776", + "text": " handleType must have been included in the handleTypes field of the VkExportMemoryAllocateInfo structure when the external memory was allocated." + }, + { + "vuid": "VUID-VkMemoryGetZirconHandleInfoFUCHSIA-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA" + }, + { + "vuid": "VUID-VkMemoryGetZirconHandleInfoFUCHSIA-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkMemoryGetZirconHandleInfoFUCHSIA-memory-parameter", "text": " memory must be a valid VkDeviceMemory handle" }, { - "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-parameter", - "text": " handleType must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" - }, - { - "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-requiredbitmask", - "text": " handleType must not be 0" - }, - { - "vuid": "VUID-vkGetMemoryWin32HandleNV-pHandle-parameter", - "text": " pHandle must be a valid pointer to a HANDLE value" - }, - { - "vuid": "VUID-vkGetMemoryWin32HandleNV-memory-parent", - "text": " memory must have been created, allocated, or retrieved from device" + "vuid": "VUID-VkMemoryGetZirconHandleInfoFUCHSIA-handleType-parameter", + "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" } ] }, @@ -9376,14 +12212,14 @@ "vuid": "VUID-VkMappedMemoryRange-size-00686", "text": " If size is equal to VK_WHOLE_SIZE, offset must be within the currently mapped range of memory" }, - { - "vuid": "VUID-VkMappedMemoryRange-size-01389", - "text": " If size is equal to VK_WHOLE_SIZE, the end of the current mapping of memory must be a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize bytes from the beginning of the memory object" - }, { "vuid": "VUID-VkMappedMemoryRange-offset-00687", "text": " offset must be a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize" }, + { + "vuid": "VUID-VkMappedMemoryRange-size-01389", + "text": " If size is equal to VK_WHOLE_SIZE, the end of the current mapping of memory must either be a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize bytes from the beginning of the memory object, or be equal to the end of the memory object" + }, { "vuid": "VUID-VkMappedMemoryRange-size-01390", "text": " If size is not equal to VK_WHOLE_SIZE, size must either be a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize, or offset plus size must equal the size of memory" @@ -9574,7 +12410,7 @@ }, { "vuid": "VUID-VkBufferCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkBufferDeviceAddressCreateInfoEXT, VkBufferOpaqueCaptureAddressCreateInfo, VkDedicatedAllocationBufferCreateInfoNV, or VkExternalMemoryBufferCreateInfo" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkBufferDeviceAddressCreateInfoEXT, VkBufferOpaqueCaptureAddressCreateInfo, VkDedicatedAllocationBufferCreateInfoNV, VkExternalMemoryBufferCreateInfo, VkVideoProfileKHR, or VkVideoProfilesKHR" }, { "vuid": "VUID-VkBufferCreateInfo-sType-unique", @@ -9648,6 +12484,18 @@ "vuid": "VUID-VkBufferCreateInfo-flags-03338", "text": " If flags includes VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, the bufferDeviceAddressCaptureReplay or VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::bufferDeviceAddressCaptureReplay feature must be enabled" } + ], + "(VK_KHR_video_decode_queue)": [ + { + "vuid": "VUID-VkBufferCreateInfo-usage-04813", + "text": " If usage includes VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR, VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR, then the pNext chain must include a valid VkVideoProfilesKHR structure which includes at least one VkVideoProfileKHR with a decode codec-operation." + } + ], + "(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-VkBufferCreateInfo-usage-04814", + "text": " If usage includes VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR, then the pNext chain must include a valid VkVideoProfilesKHR structure which includes at least one VkVideoProfileKHR with a encode codec-operation." + } ] }, "VkDedicatedAllocationBufferCreateInfoNV": { @@ -9770,11 +12618,11 @@ }, { "vuid": "VUID-VkBufferViewCreateInfo-buffer-00933", - "text": " If buffer was created with usage containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, format must be supported for uniform texel buffers, as specified by the VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties" + "text": " If buffer was created with usage containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, format must be supported for uniform texel buffers, as specified by the VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties" }, { "vuid": "VUID-VkBufferViewCreateInfo-buffer-00934", - "text": " If buffer was created with usage containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, format must be supported for storage texel buffers, as specified by the VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties" + "text": " If buffer was created with usage containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, format must be supported for storage texel buffers, as specified by the VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties" }, { "vuid": "VUID-VkBufferViewCreateInfo-buffer-00935", @@ -9882,7 +12730,7 @@ "core": [ { "vuid": "VUID-VkImageCreateInfo-imageCreateMaxMipLevels-02251", - "text": " Each of the following values (as described in Image Creation Limits) must not be undefined imageCreateMaxMipLevels, imageCreateMaxArrayLayers, imageCreateMaxExtent, and imageCreateSampleCounts" + "text": " Each of the following values (as described in Image Creation Limits) must not be undefined : imageCreateMaxMipLevels, imageCreateMaxArrayLayers, imageCreateMaxExtent, and imageCreateSampleCounts" }, { "vuid": "VUID-VkImageCreateInfo-sharingMode-00941", @@ -9992,6 +12840,10 @@ "vuid": "VUID-VkImageCreateInfo-flags-01924", "text": " If the sparse aliased residency feature is not enabled, flags must not contain VK_IMAGE_CREATE_SPARSE_ALIASED_BIT" }, + { + "vuid": "VUID-VkImageCreateInfo-tiling-04121", + "text": " If tiling is VK_IMAGE_TILING_LINEAR, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT" + }, { "vuid": "VUID-VkImageCreateInfo-imageType-00970", "text": " If imageType is VK_IMAGE_TYPE_1D, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT" @@ -10038,7 +12890,7 @@ }, { "vuid": "VUID-VkImageCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDedicatedAllocationImageCreateInfoNV, VkExternalFormatANDROID, VkExternalMemoryImageCreateInfo, VkExternalMemoryImageCreateInfoNV, VkImageDrmFormatModifierExplicitCreateInfoEXT, VkImageDrmFormatModifierListCreateInfoEXT, VkImageFormatListCreateInfo, VkImageStencilUsageCreateInfo, or VkImageSwapchainCreateInfoKHR" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDedicatedAllocationImageCreateInfoNV, VkExternalFormatANDROID, VkExternalMemoryImageCreateInfo, VkExternalMemoryImageCreateInfoNV, VkImageDrmFormatModifierExplicitCreateInfoEXT, VkImageDrmFormatModifierListCreateInfoEXT, VkImageFormatListCreateInfo, VkImageStencilUsageCreateInfo, VkImageSwapchainCreateInfoKHR, VkVideoProfileKHR, or VkVideoProfilesKHR" }, { "vuid": "VUID-VkImageCreateInfo-sType-unique", @@ -10243,6 +13095,14 @@ { "vuid": "VUID-VkImageCreateInfo-format-01577", "text": " If format is not a multi-planar format, and flags does not include VK_IMAGE_CREATE_ALIAS_BIT, flags must not contain VK_IMAGE_CREATE_DISJOINT_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-format-04712", + "text": " If format has a _422 or _420 suffix, width must be a multiple of 2" + }, + { + "vuid": "VUID-VkImageCreateInfo-format-04713", + "text": " If format has a _420 suffix, height must be a multiple of 2" } ], "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_EXT_ycbcr_image_arrays)": [ @@ -10277,7 +13137,7 @@ "text": " If flags contains VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT format must be a depth or depth/stencil format" } ], - "(VK_EXT_separate_stencil_usage)": [ + "(VK_VERSION_1_2,VK_EXT_separate_stencil_usage)": [ { "vuid": "VUID-VkImageCreateInfo-format-02795", "text": " If format is a depth-stencil format, usage includes VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, and the pNext chain includes a VkImageStencilUsageCreateInfo structure, then its VkImageStencilUsageCreateInfo::stencilUsage member must also include VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" @@ -10325,23 +13185,57 @@ "text": " If flags contains VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV and imageType is VK_IMAGE_TYPE_3D, extent.width, extent.height, and extent.depth must be greater than 1" } ], - "(VK_NV_shading_rate_image)": [ + "(VK_KHR_fragment_shading_rate,VK_NV_shading_rate_image)": [ { "vuid": "VUID-VkImageCreateInfo-imageType-02082", - "text": " If usage includes VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, imageType must be VK_IMAGE_TYPE_2D" + "text": " If usage includes VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, imageType must be VK_IMAGE_TYPE_2D" }, { "vuid": "VUID-VkImageCreateInfo-samples-02083", - "text": " If usage includes VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, samples must be VK_SAMPLE_COUNT_1_BIT" - }, + "text": " If usage includes VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, samples must be VK_SAMPLE_COUNT_1_BIT" + } + ], + "(VK_NV_shading_rate_image)": [ { "vuid": "VUID-VkImageCreateInfo-tiling-02084", "text": " If usage includes VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, tiling must be VK_IMAGE_TILING_OPTIMAL" } + ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkImageCreateInfo-imageView2DOn3DImage-04459", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::imageView2DOn3DImage is VK_FALSE, flags must not contain VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT." + }, + { + "vuid": "VUID-VkImageCreateInfo-multisampleArrayImage-04460", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::multisampleArrayImage is VK_FALSE, and samples is not VK_SAMPLE_COUNT_1_BIT, then arrayLayers must be 1." + } + ], + "(VK_VERSION_1_2,VK_KHR_image_format_list)": [ + { + "vuid": "VUID-VkImageCreateInfo-pNext-04737", + "text": " If a VkImageFormatListCreateInfo structure was included in the pNext chain and VkImageFormatListCreateInfo::viewFormatCount is not zero, then all of the formats in VkImageFormatListCreateInfo::pViewFormats must be compatible with the format as described in the compatibility table" + }, + { + "vuid": "VUID-VkImageCreateInfo-flags-04738", + "text": " If flags does not contain VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT and the pNext chain includes a VkImageFormatListCreateInfo structure, then VkImageFormatListCreateInfo::viewFormatCount must be 0 or 1" + } + ], + "(VK_KHR_video_decode_queue)": [ + { + "vuid": "VUID-VkImageCreateInfo-usage-04815", + "text": " If usage includes VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, then the pNext chain must include a valid VkVideoProfilesKHR structure which includes at least one VkVideoProfileKHR with a decode codec-operation." + } + ], + "(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-VkImageCreateInfo-usage-04816", + "text": " If usage includes VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, then the pNext chain must include a valid VkVideoProfilesKHR structure which includes at least one VkVideoProfileKHR with a encode codec-operation." + } ] }, "VkImageStencilUsageCreateInfo": { - "(VK_EXT_separate_stencil_usage)": [ + "(VK_VERSION_1_2,VK_EXT_separate_stencil_usage)": [ { "vuid": "VUID-VkImageStencilUsageCreateInfo-stencilUsage-02539", "text": " If stencilUsage includes VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, it must not include bits other than VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" @@ -10381,10 +13275,6 @@ { "vuid": "VUID-VkExternalMemoryImageCreateInfo-handleTypes-parameter", "text": " handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBits values" - }, - { - "vuid": "VUID-VkExternalMemoryImageCreateInfo-handleTypes-requiredbitmask", - "text": " handleTypes must not be 0" } ] }, @@ -10522,6 +13412,22 @@ "vuid": "VUID-vkGetImageSubresourceLayout-arrayLayer-01717", "text": " The arrayLayer member of pSubresource must be less than the arrayLayers specified in VkImageCreateInfo when image was created" }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-format-04461", + "text": " If format is a color format, the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-format-04462", + "text": " If format has a depth component, the aspectMask member of pSubresource must contain VK_IMAGE_ASPECT_DEPTH_BIT" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-format-04463", + "text": " If format has a stencil component, the aspectMask member of pSubresource must contain VK_IMAGE_ASPECT_STENCIL_BIT" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-format-04464", + "text": " If format does not contain a stencil or depth component, the aspectMask member of pSubresource must not contain VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT" + }, { "vuid": "VUID-vkGetImageSubresourceLayout-device-parameter", "text": " device must be a valid VkDevice handle" @@ -10622,6 +13528,10 @@ "vuid": "VUID-vkDestroyImage-image-01002", "text": " If no VkAllocationCallbacks were provided when image was created, pAllocator must be NULL" }, + { + "vuid": "VUID-vkDestroyImage-image-04882", + "text": " image must not have been acquired from vkGetSwapchainImagesKHR" + }, { "vuid": "VUID-vkDestroyImage-device-parameter", "text": " device must be a valid VkDevice handle" @@ -10670,6 +13580,10 @@ "vuid": "VUID-VkImageViewCreateInfo-viewType-01004", "text": " If the image cubemap arrays feature is not enabled, viewType must not be VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-04441", + "text": " image must have been created with a usage value containing at least one of the usages defined in the valid image usage list for image views" + }, { "vuid": "VUID-VkImageViewCreateInfo-None-02273", "text": " The format features of the resultant image view must contain at least one bit" @@ -10732,7 +13646,7 @@ }, { "vuid": "VUID-VkImageViewCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkImageViewASTCDecodeModeEXT, VkImageViewUsageCreateInfo, or VkSamplerYcbcrConversionInfo" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkImageViewASTCDecodeModeEXT, VkImageViewUsageCreateInfo, VkSamplerYcbcrConversionInfo, VkVideoProfileKHR, or VkVideoProfilesKHR" }, { "vuid": "VUID-VkImageViewCreateInfo-sType-unique", @@ -10785,30 +13699,6 @@ "text": " If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange.layerCount must be non-zero and subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the depth computed from baseMipLevel and extent.depth specified in VkImageCreateInfo when image was created, according to the formula defined in Image Miplevel Sizing" } ], - "!(VK_EXT_fragment_density_map)+!(VK_NV_shading_rate_image)": [ - { - "vuid": "VUID-VkImageViewCreateInfo-image-01007", - "text": " image must have been created with a usage value containing at least one of VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" - } - ], - "!(VK_EXT_fragment_density_map)+(VK_NV_shading_rate_image)": [ - { - "vuid": "VUID-VkImageViewCreateInfo-image-02085", - "text": " image must have been created with a usage value containing at least one of VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, or VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV" - } - ], - "(VK_EXT_fragment_density_map)+!(VK_NV_shading_rate_image)": [ - { - "vuid": "VUID-VkImageViewCreateInfo-image-02569", - "text": " image must have been created with a usage value containing at least one of VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, or VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT" - } - ], - "(VK_EXT_fragment_density_map)+(VK_NV_shading_rate_image)": [ - { - "vuid": "VUID-VkImageViewCreateInfo-image-02570", - "text": " image must have been created with a usage value containing at least one of VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, or VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT" - } - ], "(VK_EXT_fragment_density_map)": [ { "vuid": "VUID-VkImageViewCreateInfo-image-02571", @@ -10816,11 +13706,11 @@ }, { "vuid": "VUID-VkImageViewCreateInfo-flags-02572", - "text": " If dynamic fragment density map feature is not enabled, flags must not contain VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT" + "text": " If dynamic fragment density map feature is not enabled, flags must not contain VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT" }, { - "vuid": "VUID-VkImageViewCreateInfo-image-02573", - "text": " If dynamic fragment density map feature is not enabled and image was created with usage containing VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, flags must not contain any of VK_IMAGE_CREATE_PROTECTED_BIT, VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT" + "vuid": "VUID-VkImageViewCreateInfo-flags-04116", + "text": " If flags does not contain VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT and image was created with usage containing VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, its flags must not contain any of VK_IMAGE_CREATE_PROTECTED_BIT, VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT" } ], "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [ @@ -10865,20 +13755,16 @@ { "vuid": "VUID-VkImageViewCreateInfo-image-01584", "text": " If image was created with the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, the levelCount and layerCount members of subresourceRange must both be 1" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-04739", + "text": " If image was created with the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag and format is a non-compressed format, viewType must not be VK_IMAGE_VIEW_TYPE_3D" } ], "(VK_VERSION_1_2,VK_KHR_image_format_list)": [ { "vuid": "VUID-VkImageViewCreateInfo-pNext-01585", "text": " If a VkImageFormatListCreateInfo structure was included in the pNext chain of the VkImageCreateInfo structure used when creating image and VkImageFormatListCreateInfo::viewFormatCount is not zero then format must be one of the formats in VkImageFormatListCreateInfo::pViewFormats" - }, - { - "vuid": "VUID-VkImageViewCreateInfo-pNext-04082", - "text": " If a VkImageFormatListCreateInfo structure was included in the pNext chain of the VkImageCreateInfo structure used when creating image and VkImageFormatListCreateInfo::viewFormatCount is not zero then all of the formats in VkImageFormatListCreateInfo::pViewFormats must be compatible with the format as described in the compatibility table" - }, - { - "vuid": "VUID-VkImageViewCreateInfo-flags-04083", - "text": " If flags dose not contain VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT and the pNext chain include a VkImageFormatListCreateInfo structure then VkImageFormatListCreateInfo::viewFormatCount must be 0 or 1" } ], "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ @@ -10890,6 +13776,18 @@ "vuid": "VUID-VkImageViewCreateInfo-image-01762", "text": " If image was not created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, or if the format of the image is a multi-planar format and if subresourceRange.aspectMask is VK_IMAGE_ASPECT_COLOR_BIT, format must be identical to the format used to create image" }, + { + "vuid": "VUID-VkImageViewCreateInfo-format-04724", + "text": " If format is one of those listed in Formats requiring sampler Y′CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views, then the pNext chain must include a VkSamplerYcbcrConversionInfo structure with a conversion value other than VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-format-04714", + "text": " If format has a _422 or _420 suffix then image must have been created with a width that is a multiple of 2" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-format-04715", + "text": " If format has a _420 suffix then image must have been created with a height that is a multiple of 2" + }, { "vuid": "VUID-VkImageViewCreateInfo-pNext-01970", "text": " If the pNext chain includes a VkSamplerYcbcrConversionInfo structure with a conversion value other than VK_NULL_HANDLE, all members of components must have the identity swizzle" @@ -10915,23 +13813,49 @@ "text": " If image has an external format, all members of components must be the identity swizzle" } ], - "(VK_NV_shading_rate_image)": [ + "(VK_KHR_fragment_shading_rate,VK_NV_shading_rate_image)": [ { "vuid": "VUID-VkImageViewCreateInfo-image-02086", - "text": " If image was created with usage containing VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, viewType must be VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY" - }, - { - "vuid": "VUID-VkImageViewCreateInfo-image-02087", - "text": " If image was created with usage containing VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, format must be VK_FORMAT_R8_UINT" + "text": " If image was created with usage containing VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, viewType must be VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY" } ], - "(VK_VERSION_1_1,VK_KHR_maintenance2)+!(VK_EXT_separate_stencil_usage)": [ + "(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-02087", + "text": " If the shadingRateImage feature is enabled, and If image was created with usage containing VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, format must be VK_FORMAT_R8_UINT" + } + ], + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-usage-04550", + "text": " If the attachmentFragmentShadingRate feature is enabled, and the usage for the image view includes VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, then the image view’s format features must contain VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-usage-04551", + "text": " If the attachmentFragmentShadingRate feature is enabled, the usage for the image view includes VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, and layeredShadingRateAttachments is VK_FALSE, subresourceRange.layerCount must be 1" + } + ], + "(VK_EXT_fragment_density_map)+(VK_EXT_fragment_density_map2)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-flags-03567", + "text": " If deferred fragment density map feature is not enabled, flags must not contain VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-flags-03568", + "text": " If flags contains VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT, flags must not contain VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-03569", + "text": " If image was created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT and usage containing VK_IMAGE_USAGE_SAMPLED_BIT, subresourceRange.layerCount must be less than or equal to VkPhysicalDeviceFragmentDensityMap2PropertiesEXT::maxSubsampledArrayLayers" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance2)+!(VK_VERSION_1_2+VK_EXT_separate_stencil_usage)": [ { "vuid": "VUID-VkImageViewCreateInfo-pNext-02661", "text": " If the pNext chain includes a VkImageViewUsageCreateInfo structure, its usage member must not include any bits that were not set in the usage member of the VkImageCreateInfo structure used to create image" } ], - "(VK_VERSION_1_1,VK_KHR_maintenance2)+(VK_EXT_separate_stencil_usage)": [ + "(VK_VERSION_1_1,VK_KHR_maintenance2)+(VK_VERSION_1_2,VK_EXT_separate_stencil_usage)": [ { "vuid": "VUID-VkImageViewCreateInfo-pNext-02662", "text": " If the pNext chain includes a VkImageViewUsageCreateInfo structure, and image was not created with a VkImageStencilUsageCreateInfo structure included in the pNext chain of VkImageCreateInfo, its usage member must not include any bits that were not set in the usage member of the VkImageCreateInfo structure used to create image" @@ -10944,6 +13868,28 @@ "vuid": "VUID-VkImageViewCreateInfo-pNext-02664", "text": " If the pNext chain includes a VkImageViewUsageCreateInfo structure, image was created with a VkImageStencilUsageCreateInfo structure included in the pNext chain of VkImageCreateInfo, and subResourceRange.aspectMask includes bits other than VK_IMAGE_ASPECT_STENCIL_BIT, the usage member of the VkImageViewUsageCreateInfo structure must not include any bits that were not set in the usage member of the VkImageCreateInfo structure used to create image" } + ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-imageViewFormatSwizzle-04465", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::imageViewFormatSwizzle is VK_FALSE, all elements of components must be VK_COMPONENT_SWIZZLE_IDENTITY." + }, + { + "vuid": "VUID-VkImageViewCreateInfo-imageViewFormatReinterpretation-04466", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::imageViewFormatReinterpretation is VK_FALSE, the VkFormat in format must not contain a different number of components, or a different number of bits in each component, than the format of the VkImage in image." + } + ], + "(VK_KHR_video_decode_queue)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-04817", + "text": " If image was created with usage containing VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, then the viewType must be VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY and all members of components must have the identity swizzle" + } + ], + "(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-04818", + "text": " If image was created with usage containing VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, then the viewType must be VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY and all members of components must have the identity swizzle" + } ] }, "VkImageViewUsageCreateInfo": { @@ -11162,13 +14108,625 @@ } ] }, - "vkGetBufferMemoryRequirements": { - "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + "vkCreateAccelerationStructureNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ { - "vuid": "VUID-vkGetBufferMemoryRequirements-buffer-04003", - "text": " If buffer was created with the VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID external memory handle type, then buffer must be bound to memory" + "vuid": "VUID-vkCreateAccelerationStructureNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateAccelerationStructureNV-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkAccelerationStructureCreateInfoNV structure" + }, + { + "vuid": "VUID-vkCreateAccelerationStructureNV-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateAccelerationStructureNV-pAccelerationStructure-parameter", + "text": " pAccelerationStructure must be a valid pointer to a VkAccelerationStructureNV handle" + } + ] + }, + "VkAccelerationStructureCreateInfoNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureCreateInfoNV-compactedSize-02421", + "text": " If compactedSize is not 0 then both info.geometryCount and info.instanceCount must be 0" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoNV-info-parameter", + "text": " info must be a valid VkAccelerationStructureInfoNV structure" + } + ] + }, + "VkAccelerationStructureInfoNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureInfoNV-geometryCount-02422", + "text": " geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-instanceCount-02423", + "text": " instanceCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxInstanceCount" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-maxTriangleCount-02424", + "text": " The total number of triangles in all geometries must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxTriangleCount" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-type-02425", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV then geometryCount must be 0" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-type-02426", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then instanceCount must be 0" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-type-02786", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then the geometryType member of each geometry in pGeometries must be the same" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-flags-02592", + "text": " If flags has the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV bit set, then it must not have the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV bit set" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-scratch-02781", + "text": " scratch must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-instanceData-02782", + "text": " If instanceData is not VK_NULL_HANDLE, instanceData must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-type-parameter", + "text": " type must be a valid VkAccelerationStructureTypeNV value" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-flags-parameter", + "text": " flags must be a valid combination of VkBuildAccelerationStructureFlagBitsNV values" + }, + { + "vuid": "VUID-VkAccelerationStructureInfoNV-pGeometries-parameter", + "text": " If geometryCount is not 0, pGeometries must be a valid pointer to an array of geometryCount valid VkGeometryNV structures" } ], + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureInfoNV-type-04623", + "text": " type must not be VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR" + } + ] + }, + "vkCreateAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCreateAccelerationStructureKHR-accelerationStructure-03611", + "text": " The accelerationStructure feature must be enabled" + }, + { + "vuid": "VUID-vkCreateAccelerationStructureKHR-deviceAddress-03488", + "text": " If VkAccelerationStructureCreateInfoKHR::deviceAddress is not zero, the accelerationStructureCaptureReplay feature must be enabled" + }, + { + "vuid": "VUID-vkCreateAccelerationStructureKHR-device-03489", + "text": " If device was created with multiple physical devices, then the bufferDeviceAddressMultiDevice feature must be enabled" + }, + { + "vuid": "VUID-vkCreateAccelerationStructureKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateAccelerationStructureKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkAccelerationStructureCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateAccelerationStructureKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateAccelerationStructureKHR-pAccelerationStructure-parameter", + "text": " pAccelerationStructure must be a valid pointer to a VkAccelerationStructureKHR handle" + } + ] + }, + "VkAccelerationStructureCreateInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-deviceAddress-03612", + "text": " If deviceAddress is not zero, createFlags must include VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-createFlags-03613", + "text": " If createFlags includes VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, VkPhysicalDeviceAccelerationStructureFeaturesKHR::accelerationStructureCaptureReplay must be VK_TRUE" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-buffer-03614", + "text": " buffer must have been created with a usage value containing VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-buffer-03615", + "text": " buffer must not have been created with VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-offset-03616", + "text": " The sum of offset and size must be less than the size of buffer" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-offset-03734", + "text": " offset must be a multiple of 256 bytes" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-createFlags-parameter", + "text": " createFlags must be a valid combination of VkAccelerationStructureCreateFlagBitsKHR values" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-parameter", + "text": " type must be a valid VkAccelerationStructureTypeKHR value" + } + ] + }, + "vkGetAccelerationStructureBuildSizesKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkGetAccelerationStructureBuildSizesKHR-rayTracingPipeline-03617", + "text": " The rayTracingPipeline or rayQuery feature must be enabled" + }, + { + "vuid": "VUID-vkGetAccelerationStructureBuildSizesKHR-device-03618", + "text": " If device was created with multiple physical devices, then the bufferDeviceAddressMultiDevice feature must be enabled" + }, + { + "vuid": "VUID-vkGetAccelerationStructureBuildSizesKHR-pBuildInfo-03619", + "text": " If pBuildInfo->geometryCount is not 0, pMaxPrimitiveCounts must be a valid pointer to an array of pBuildInfo->geometryCount uint32_t values" + }, + { + "vuid": "VUID-vkGetAccelerationStructureBuildSizesKHR-pBuildInfo-03785", + "text": " If pBuildInfo->pGeometries or pBuildInfo->ppGeometries has a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, each pMaxPrimitiveCounts[i] must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxInstanceCount" + }, + { + "vuid": "VUID-vkGetAccelerationStructureBuildSizesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetAccelerationStructureBuildSizesKHR-buildType-parameter", + "text": " buildType must be a valid VkAccelerationStructureBuildTypeKHR value" + }, + { + "vuid": "VUID-vkGetAccelerationStructureBuildSizesKHR-pBuildInfo-parameter", + "text": " pBuildInfo must be a valid pointer to a valid VkAccelerationStructureBuildGeometryInfoKHR structure" + }, + { + "vuid": "VUID-vkGetAccelerationStructureBuildSizesKHR-pMaxPrimitiveCounts-parameter", + "text": " If pMaxPrimitiveCounts is not NULL, pMaxPrimitiveCounts must be a valid pointer to an array of pBuildInfo->geometryCount uint32_t values" + }, + { + "vuid": "VUID-vkGetAccelerationStructureBuildSizesKHR-pSizeInfo-parameter", + "text": " pSizeInfo must be a valid pointer to a VkAccelerationStructureBuildSizesInfoKHR structure" + } + ] + }, + "VkAccelerationStructureBuildSizesInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureBuildSizesInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildSizesInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "VkGeometryNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkGeometryNV-geometryType-03503", + "text": " geometryType must be VK_GEOMETRY_TYPE_TRIANGLES_NV or VK_GEOMETRY_TYPE_AABBS_NV" + }, + { + "vuid": "VUID-VkGeometryNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_GEOMETRY_NV" + }, + { + "vuid": "VUID-VkGeometryNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkGeometryNV-geometryType-parameter", + "text": " geometryType must be a valid VkGeometryTypeKHR value" + }, + { + "vuid": "VUID-VkGeometryNV-geometry-parameter", + "text": " geometry must be a valid VkGeometryDataNV structure" + }, + { + "vuid": "VUID-VkGeometryNV-flags-parameter", + "text": " flags must be a valid combination of VkGeometryFlagBitsKHR values" + } + ] + }, + "VkGeometryDataNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkGeometryDataNV-triangles-parameter", + "text": " triangles must be a valid VkGeometryTrianglesNV structure" + }, + { + "vuid": "VUID-VkGeometryDataNV-aabbs-parameter", + "text": " aabbs must be a valid VkGeometryAABBNV structure" + } + ] + }, + "VkGeometryTrianglesNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexOffset-02428", + "text": " vertexOffset must be less than the size of vertexData" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexOffset-02429", + "text": " vertexOffset must be a multiple of the component size of vertexFormat" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexFormat-02430", + "text": " vertexFormat must be one of VK_FORMAT_R32G32B32_SFLOAT, VK_FORMAT_R32G32_SFLOAT, VK_FORMAT_R16G16B16_SFLOAT, VK_FORMAT_R16G16_SFLOAT, VK_FORMAT_R16G16_SNORM, or VK_FORMAT_R16G16B16_SNORM" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexStride-03818", + "text": " vertexStride must be less than or equal to 232-1" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexOffset-02431", + "text": " indexOffset must be less than the size of indexData" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexOffset-02432", + "text": " indexOffset must be a multiple of the element size of indexType" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexType-02433", + "text": " indexType must be VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_UINT32, or VK_INDEX_TYPE_NONE_NV" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexData-02434", + "text": " indexData must be VK_NULL_HANDLE if indexType is VK_INDEX_TYPE_NONE_NV" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexData-02435", + "text": " indexData must be a valid VkBuffer handle if indexType is not VK_INDEX_TYPE_NONE_NV" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexCount-02436", + "text": " indexCount must be 0 if indexType is VK_INDEX_TYPE_NONE_NV" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-transformOffset-02437", + "text": " transformOffset must be less than the size of transformData" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-transformOffset-02438", + "text": " transformOffset must be a multiple of 16" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexData-parameter", + "text": " If vertexData is not VK_NULL_HANDLE, vertexData must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-vertexFormat-parameter", + "text": " vertexFormat must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexData-parameter", + "text": " If indexData is not VK_NULL_HANDLE, indexData must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-indexType-parameter", + "text": " indexType must be a valid VkIndexType value" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-transformData-parameter", + "text": " If transformData is not VK_NULL_HANDLE, transformData must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkGeometryTrianglesNV-commonparent", + "text": " Each of indexData, transformData, and vertexData that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkGeometryAABBNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkGeometryAABBNV-offset-02439", + "text": " offset must be less than the size of aabbData" + }, + { + "vuid": "VUID-VkGeometryAABBNV-offset-02440", + "text": " offset must be a multiple of 8" + }, + { + "vuid": "VUID-VkGeometryAABBNV-stride-02441", + "text": " stride must be a multiple of 8" + }, + { + "vuid": "VUID-VkGeometryAABBNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV" + }, + { + "vuid": "VUID-VkGeometryAABBNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkGeometryAABBNV-aabbData-parameter", + "text": " If aabbData is not VK_NULL_HANDLE, aabbData must be a valid VkBuffer handle" + } + ] + }, + "vkDestroyAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02442", + "text": " All submitted commands that refer to accelerationStructure must have completed execution" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02443", + "text": " If VkAllocationCallbacks were provided when accelerationStructure was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02444", + "text": " If no VkAllocationCallbacks were provided when accelerationStructure was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-parameter", + "text": " If accelerationStructure is not VK_NULL_HANDLE, accelerationStructure must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-parent", + "text": " If accelerationStructure is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkDestroyAccelerationStructureNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-03752", + "text": " All submitted commands that refer to accelerationStructure must have completed execution" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-03753", + "text": " If VkAllocationCallbacks were provided when accelerationStructure was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-03754", + "text": " If no VkAllocationCallbacks were provided when accelerationStructure was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parameter", + "text": " If accelerationStructure is not VK_NULL_HANDLE, accelerationStructure must be a valid VkAccelerationStructureNV handle" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureNV-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parent", + "text": " If accelerationStructure is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetAccelerationStructureMemoryRequirementsNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsNV-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureMemoryRequirementsInfoNV structure" + }, + { + "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsNV-pMemoryRequirements-parameter", + "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2KHR structure" + } + ] + }, + "VkAccelerationStructureMemoryRequirementsInfoNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV" + }, + { + "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-type-parameter", + "text": " type must be a valid VkAccelerationStructureMemoryRequirementsTypeNV value" + }, + { + "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-accelerationStructure-parameter", + "text": " accelerationStructure must be a valid VkAccelerationStructureNV handle" + } + ] + }, + "vkBindAccelerationStructureMemoryNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkBindAccelerationStructureMemoryNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkBindAccelerationStructureMemoryNV-pBindInfos-parameter", + "text": " pBindInfos must be a valid pointer to an array of bindInfoCount valid VkBindAccelerationStructureMemoryInfoNV structures" + }, + { + "vuid": "VUID-vkBindAccelerationStructureMemoryNV-bindInfoCount-arraylength", + "text": " bindInfoCount must be greater than 0" + } + ] + }, + "VkBindAccelerationStructureMemoryInfoNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-03620", + "text": " accelerationStructure must not already be backed by a memory object" + }, + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-03621", + "text": " memoryOffset must be less than the size of memory" + }, + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-03622", + "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV" + }, + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-03623", + "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV" + }, + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-size-03624", + "text": " The size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV must be less than or equal to the size of memory minus memoryOffset" + }, + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV" + }, + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-parameter", + "text": " accelerationStructure must be a valid VkAccelerationStructureNV handle" + }, + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-pDeviceIndices-parameter", + "text": " If deviceIndexCount is not 0, pDeviceIndices must be a valid pointer to an array of deviceIndexCount uint32_t values" + }, + { + "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-commonparent", + "text": " Both of accelerationStructure, and memory must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkGetAccelerationStructureHandleNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkGetAccelerationStructureHandleNV-dataSize-02240", + "text": " dataSize must be large enough to contain the result of the query, as described above" + }, + { + "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-02787", + "text": " accelerationStructure must be bound completely and contiguously to a single VkDeviceMemory object via vkBindAccelerationStructureMemoryNV" + }, + { + "vuid": "VUID-vkGetAccelerationStructureHandleNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parameter", + "text": " accelerationStructure must be a valid VkAccelerationStructureNV handle" + }, + { + "vuid": "VUID-vkGetAccelerationStructureHandleNV-pData-parameter", + "text": " pData must be a valid pointer to an array of dataSize bytes" + }, + { + "vuid": "VUID-vkGetAccelerationStructureHandleNV-dataSize-arraylength", + "text": " dataSize must be greater than 0" + }, + { + "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parent", + "text": " accelerationStructure must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetAccelerationStructureDeviceAddressKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkGetAccelerationStructureDeviceAddressKHR-device-03504", + "text": " If device was created with multiple physical devices, then the bufferDeviceAddressMultiDevice feature must be enabled" + }, + { + "vuid": "VUID-vkGetAccelerationStructureDeviceAddressKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetAccelerationStructureDeviceAddressKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureDeviceAddressInfoKHR structure" + } + ] + }, + "VkAccelerationStructureDeviceAddressInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureDeviceAddressInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureDeviceAddressInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureDeviceAddressInfoKHR-accelerationStructure-parameter", + "text": " accelerationStructure must be a valid VkAccelerationStructureKHR handle" + } + ] + }, + "vkGetBufferMemoryRequirements": { "core": [ { "vuid": "VUID-vkGetBufferMemoryRequirements-device-parameter", @@ -11237,12 +14795,6 @@ ] }, "VkBufferMemoryRequirementsInfo2": { - "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ - { - "vuid": "VUID-VkBufferMemoryRequirementsInfo2-buffer-04005", - "text": " If buffer was created with the VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID external memory handle type, then buffer must be bound to memory" - } - ], "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [ { "vuid": "VUID-VkBufferMemoryRequirementsInfo2-sType-sType", @@ -11422,7 +14974,7 @@ "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ { "vuid": "VUID-vkBindBufferMemory-buffer-01444", - "text": " If buffer requires a dedicated allocation(as reported by vkGetBufferMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for buffer), memory must have been created with VkMemoryDedicatedAllocateInfo::buffer equal to buffer" + "text": " If buffer requires a dedicated allocation (as reported by vkGetBufferMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for buffer), memory must have been allocated with VkMemoryDedicatedAllocateInfo::buffer equal to buffer" }, { "vuid": "VUID-vkBindBufferMemory-memory-01508", @@ -11432,17 +14984,17 @@ "(VK_VERSION_1_1)": [ { "vuid": "VUID-vkBindBufferMemory-None-01898", - "text": " If buffer was created with the VK_BUFFER_CREATE_PROTECTED_BIT bit set, the buffer must be bound to a memory object allocated with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" + "text": " If buffer was created with the VK_BUFFER_CREATE_PROTECTED_BIT bit set, the buffer must be bound to a memory object allocated with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" }, { "vuid": "VUID-vkBindBufferMemory-None-01899", - "text": " If buffer was created with the VK_BUFFER_CREATE_PROTECTED_BIT bit not set, the buffer must not be bound to a memory object created with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" + "text": " If buffer was created with the VK_BUFFER_CREATE_PROTECTED_BIT bit not set, the buffer must not be bound to a memory object allocated with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" } ], "(VK_NV_dedicated_allocation)": [ { "vuid": "VUID-vkBindBufferMemory-buffer-01038", - "text": " If buffer was created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::buffer equal to a buffer handle created with identical creation parameters to buffer and memoryOffset must be zero" + "text": " If buffer was created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been allocated with VkDedicatedAllocationMemoryAllocateInfoNV::buffer equal to a buffer handle created with identical creation parameters to buffer and memoryOffset must be zero" } ], "(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ @@ -11460,17 +15012,17 @@ "(VK_VERSION_1_1,VK_KHR_external_memory)+!(VK_ANDROID_external_memory_android_hardware_buffer)": [ { "vuid": "VUID-vkBindBufferMemory-memory-02727", - "text": " If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" + "text": " If memory was allocated by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" } ], "(VK_VERSION_1_1,VK_KHR_external_memory)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ { "vuid": "VUID-vkBindBufferMemory-memory-02985", - "text": " If memory was created by a memory import operation, that is not VkImportAndroidHardwareBufferInfoANDROID with a non-NULL buffer value, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" + "text": " If memory was allocated by a memory import operation, that is not VkImportAndroidHardwareBufferInfoANDROID with a non-NULL buffer value, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" }, { "vuid": "VUID-vkBindBufferMemory-memory-02986", - "text": " If memory was created with the VkImportAndroidHardwareBufferInfoANDROID memory import operation with a non-NULL buffer value, VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" + "text": " If memory was allocated with the VkImportAndroidHardwareBufferInfoANDROID memory import operation with a non-NULL buffer value, VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" } ], "(VK_VERSION_1_2,VK_KHR_buffer_device_address)": [ @@ -11499,27 +15051,27 @@ "VkBindBufferMemoryInfo": { "(VK_VERSION_1_1,VK_KHR_bind_memory2)": [ { - "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01593", + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01029", "text": " buffer must not already be backed by a memory object" }, { - "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01594", + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01030", "text": " buffer must not have been created with any sparse memory binding flags" }, { - "vuid": "VUID-VkBindBufferMemoryInfo-memoryOffset-01595", + "vuid": "VUID-VkBindBufferMemoryInfo-memoryOffset-01031", "text": " memoryOffset must be less than the size of memory" }, { - "vuid": "VUID-VkBindBufferMemoryInfo-memory-01599", + "vuid": "VUID-VkBindBufferMemoryInfo-memory-01035", "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer" }, { - "vuid": "VUID-VkBindBufferMemoryInfo-memoryOffset-01600", + "vuid": "VUID-VkBindBufferMemoryInfo-memoryOffset-01036", "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer" }, { - "vuid": "VUID-VkBindBufferMemoryInfo-size-01601", + "vuid": "VUID-VkBindBufferMemoryInfo-size-01037", "text": " The size member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer must be less than or equal to the size of memory minus memoryOffset" }, { @@ -11549,59 +15101,69 @@ ], "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ { - "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01602", - "text": " If buffer requires a dedicated allocation(as reported by vkGetBufferMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for buffer), memory must have been created with VkMemoryDedicatedAllocateInfo::buffer equal to buffer and memoryOffset must be zero" + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01444", + "text": " If buffer requires a dedicated allocation (as reported by vkGetBufferMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for buffer), memory must have been allocated with VkMemoryDedicatedAllocateInfo::buffer equal to buffer" }, { - "vuid": "VUID-VkBindBufferMemoryInfo-memory-01900", - "text": " If the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::buffer was not VK_NULL_HANDLE, then buffer must equal VkMemoryDedicatedAllocateInfo::buffer and memoryOffset must be zero" + "vuid": "VUID-VkBindBufferMemoryInfo-memory-01508", + "text": " If the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::buffer was not VK_NULL_HANDLE, then buffer must equal VkMemoryDedicatedAllocateInfo::buffer, and memoryOffset must be zero" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkBindBufferMemoryInfo-None-01898", + "text": " If buffer was created with the VK_BUFFER_CREATE_PROTECTED_BIT bit set, the buffer must be bound to a memory object allocated with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-None-01899", + "text": " If buffer was created with the VK_BUFFER_CREATE_PROTECTED_BIT bit not set, the buffer must not be bound to a memory object allocated with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" } ], "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)": [ { - "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01603", - "text": " If buffer was created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::buffer equal to buffer and memoryOffset must be zero" + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01038", + "text": " If buffer was created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been allocated with VkDedicatedAllocationMemoryAllocateInfoNV::buffer equal to a buffer handle created with identical creation parameters to buffer and memoryOffset must be zero" } ], "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ { - "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01604", + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01039", "text": " If buffer was not created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image" } ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkBindBufferMemoryInfo-memory-02726", + "text": " If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)+!(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkBindBufferMemoryInfo-memory-02727", + "text": " If memory was allocated by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkBindBufferMemoryInfo-memory-02985", + "text": " If memory was allocated by a memory import operation, that is not VkImportAndroidHardwareBufferInfoANDROID with a non-NULL buffer value, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-memory-02986", + "text": " If memory was allocated with the VkImportAndroidHardwareBufferInfoANDROID memory import operation with a non-NULL buffer value, VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_2,VK_KHR_buffer_device_address)": [ + { + "vuid": "VUID-VkBindBufferMemoryInfo-bufferDeviceAddress-03339", + "text": " If the VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress feature is enabled and buffer was created with the VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" + } + ], "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)": [ { "vuid": "VUID-VkBindBufferMemoryInfo-pNext-01605", "text": " If the pNext chain includes a VkBindBufferMemoryDeviceGroupInfo structure, all instances of memory specified by VkBindBufferMemoryDeviceGroupInfo::pDeviceIndices must have been allocated" } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)": [ - { - "vuid": "VUID-VkBindBufferMemoryInfo-handleTypes-02791", - "text": " If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" - } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)+!(VK_ANDROID_external_memory_android_hardware_buffer)": [ - { - "vuid": "VUID-VkBindBufferMemoryInfo-memory-02792", - "text": " If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" - } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ - { - "vuid": "VUID-VkBindBufferMemoryInfo-memory-02987", - "text": " If memory was created by a memory import operation, that is not VkImportAndroidHardwareBufferInfoANDROID with a non-NULL buffer value, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" - }, - { - "vuid": "VUID-VkBindBufferMemoryInfo-memory-02988", - "text": " If memory was created with the VkImportAndroidHardwareBufferInfoANDROID memory import operation with a non-NULL buffer value, VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created" - } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_KHR_buffer_device_address)": [ - { - "vuid": "VUID-VkBindBufferMemoryInfo-bufferDeviceAddress-02838", - "text": " If the VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::bufferDeviceAddress feature is enabled and buffer was created with the VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR bit set" - } ] }, "VkBindBufferMemoryDeviceGroupInfo": { @@ -11625,12 +15187,6 @@ ] }, "vkBindImageMemory": { - "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ - { - "vuid": "VUID-vkBindImageMemory-image-01608", - "text": " image must not have been created with the VK_IMAGE_CREATE_DISJOINT_BIT set" - } - ], "core": [ { "vuid": "VUID-vkBindImageMemory-image-01044", @@ -11646,11 +15202,11 @@ }, { "vuid": "VUID-vkBindImageMemory-memory-01047", - "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image" + "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image" }, { "vuid": "VUID-vkBindImageMemory-memoryOffset-01048", - "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image" + "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image" }, { "vuid": "VUID-vkBindImageMemory-size-01049", @@ -11680,7 +15236,7 @@ "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ { "vuid": "VUID-vkBindImageMemory-image-01445", - "text": " If image requires a dedicated allocation (as reported by vkGetImageMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for image), memory must have been created with VkMemoryDedicatedAllocateInfo::image equal to image" + "text": " If image requires a dedicated allocation (as reported by vkGetImageMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for image), memory must have been created with VkMemoryDedicatedAllocateInfo::image equal to image" } ], "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+!(VK_NV_dedicated_allocation_image_aliasing)": [ @@ -11742,13 +15298,19 @@ "vuid": "VUID-vkBindImageMemory-memory-02990", "text": " If memory was created with the VkImportAndroidHardwareBufferInfoANDROID memory import operation with a non-NULL buffer value, VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created" } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkBindImageMemory-image-01608", + "text": " image must not have been created with the VK_IMAGE_CREATE_DISJOINT_BIT set" + } ] }, "vkBindImageMemory2": { "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ { "vuid": "VUID-vkBindImageMemory2-pBindInfos-02858", - "text": " If any VkBindImageMemoryInfo::image was created with VK_IMAGE_CREATE_DISJOINT_BIT then all planes of VkBindImageMemoryInfo::image must be bound individually in separate pBindInfos" + "text": " If any VkBindImageMemoryInfo::image was created with VK_IMAGE_CREATE_DISJOINT_BIT then all planes of VkBindImageMemoryInfo::image must be bound individually in separate pBindInfos" }, { "vuid": "VUID-vkBindImageMemory2-pBindInfos-04006", @@ -11773,15 +15335,15 @@ "VkBindImageMemoryInfo": { "(VK_VERSION_1_1,VK_KHR_bind_memory2)": [ { - "vuid": "VUID-VkBindImageMemoryInfo-image-01609", + "vuid": "VUID-VkBindImageMemoryInfo-image-01044", "text": " image must not already be backed by a memory object" }, { - "vuid": "VUID-VkBindImageMemoryInfo-image-01610", + "vuid": "VUID-VkBindImageMemoryInfo-image-01045", "text": " image must not have been created with any sparse memory binding flags" }, { - "vuid": "VUID-VkBindImageMemoryInfo-memoryOffset-01611", + "vuid": "VUID-VkBindImageMemoryInfo-memoryOffset-01046", "text": " memoryOffset must be less than the size of memory" }, { @@ -11805,6 +15367,72 @@ "text": " Both of image, and memory that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" } ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-image-01445", + "text": " If image requires a dedicated allocation (as reported by vkGetImageMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for image), memory must have been created with VkMemoryDedicatedAllocateInfo::image equal to image" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+!(VK_NV_dedicated_allocation_image_aliasing)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-01509", + "text": " If the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_NV_dedicated_allocation_image_aliasing)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-02628", + "text": " If the dedicated allocation image aliasing feature is not enabled, and the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-02629", + "text": " If the dedicated allocation image aliasing feature is enabled, and the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then memoryOffset must be zero, and image must be either equal to VkMemoryDedicatedAllocateInfo::image or an image that was created using the same parameters in VkImageCreateInfo, with the exception that extent and arrayLayers may differ subject to the following restrictions: every dimension in the extent parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created; and the arrayLayers parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-None-01901", + "text": " If image was created with the VK_IMAGE_CREATE_PROTECTED_BIT bit set, the image must be bound to a memory object allocated with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-None-01902", + "text": " If image was created with the VK_IMAGE_CREATE_PROTECTED_BIT bit not set, the image must not be bound to a memory object created with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-image-01050", + "text": " If image was created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::image equal to an image handle created with identical creation parameters to image and memoryOffset must be zero" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-image-01051", + "text": " If image was not created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-02728", + "text": " If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryImageCreateInfo::handleTypes when image was created" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)+!(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-02729", + "text": " If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-02989", + "text": " If memory was created by a memory import operation, that is not VkImportAndroidHardwareBufferInfoANDROID with a non-NULL buffer value, the external handle type of the imported memory must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-02990", + "text": " If memory was created with the VkImportAndroidHardwareBufferInfoANDROID memory import operation with a non-NULL buffer value, VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created" + } + ], "(VK_VERSION_1_1,VK_KHR_bind_memory2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ { "vuid": "VUID-VkBindImageMemoryInfo-memory-01612", @@ -11849,40 +15477,6 @@ "text": " If the pNext chain includes a VkBindImagePlaneMemoryInfo structure, the difference of the size of memory and memoryOffset must be greater than or equal to the size member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with the same image and where VkBindImagePlaneMemoryInfo::planeAspect corresponds to the VkImagePlaneMemoryRequirementsInfo::planeAspect in the VkImageMemoryRequirementsInfo2 structure’s pNext chain" } ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ - { - "vuid": "VUID-VkBindImageMemoryInfo-image-01622", - "text": " If image requires a dedicated allocation (as reported by vkGetImageMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for image), memory must have been created with VkMemoryDedicatedAllocateInfo::image equal to image and memoryOffset must be zero" - } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+!(VK_NV_dedicated_allocation_image_aliasing)": [ - { - "vuid": "VUID-VkBindImageMemoryInfo-memory-01903", - "text": " If the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero" - } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_NV_dedicated_allocation_image_aliasing)": [ - { - "vuid": "VUID-VkBindImageMemoryInfo-memory-02630", - "text": " If the dedicated allocation image aliasing feature is not enabled, and the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero" - }, - { - "vuid": "VUID-VkBindImageMemoryInfo-memory-02631", - "text": " If the dedicated allocation image aliasing feature is enabled, and the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then memoryOffset must be zero, and image must be either equal to VkMemoryDedicatedAllocateInfo::image or an image that was created using the same parameters in VkImageCreateInfo, with the exception that extent and arrayLayers may differ subject to the following restrictions: every dimension in the extent parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created; and the arrayLayers parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created" - } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)": [ - { - "vuid": "VUID-VkBindImageMemoryInfo-image-01623", - "text": " If image was created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::image equal to image and memoryOffset must be zero" - } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ - { - "vuid": "VUID-VkBindImageMemoryInfo-image-01624", - "text": " If image was not created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image" - } - ], "(VK_VERSION_1_1,VK_KHR_bind_memory2)+!(VK_VERSION_1_1+VK_KHR_swapchain)+!(VK_KHR_device_group+VK_KHR_swapchain)": [ { "vuid": "VUID-VkBindImageMemoryInfo-memory-01625", @@ -11920,28 +15514,6 @@ "vuid": "VUID-VkBindImageMemoryInfo-pNext-01632", "text": " If the pNext chain does not include a VkBindImageMemorySwapchainInfoKHR structure, memory must be a valid VkDeviceMemory handle" } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)": [ - { - "vuid": "VUID-VkBindImageMemoryInfo-handleTypes-02793", - "text": " If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryImageCreateInfo::handleTypes when image was created" - } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)+!(VK_ANDROID_external_memory_android_hardware_buffer)": [ - { - "vuid": "VUID-VkBindImageMemoryInfo-memory-02794", - "text": " If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created" - } - ], - "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ - { - "vuid": "VUID-VkBindImageMemoryInfo-memory-02991", - "text": " If memory was created by a memory import operation, that is not VkImportAndroidHardwareBufferInfoANDROID with a non-NULL buffer value, the external handle type of the imported memory must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created" - }, - { - "vuid": "VUID-VkBindImageMemoryInfo-memory-02992", - "text": " If memory was created with the VkImportAndroidHardwareBufferInfoANDROID memory import operation with a non-NULL buffer value, VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created" - } ] }, "VkBindImageMemoryDeviceGroupInfo": { @@ -11980,7 +15552,7 @@ }, { "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-extent-01641", - "text": " The extent.height member of any element of pSplitInstanceBindRegions must either be a multiple of the sparse image block height of all non-metadata aspects of the image, or else extent.height + offset.y must equal the width of the image subresource" + "text": " The extent.height member of any element of pSplitInstanceBindRegions must either be a multiple of the sparse image block height of all non-metadata aspects of the image, or else extent.height + offset.y must equal the height of the image subresource" }, { "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-sType-sType", @@ -12034,631 +15606,11 @@ } ] }, - "vkCreateAccelerationStructureNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-vkCreateAccelerationStructureNV-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkCreateAccelerationStructureNV-pCreateInfo-parameter", - "text": " pCreateInfo must be a valid pointer to a valid VkAccelerationStructureCreateInfoNV structure" - }, - { - "vuid": "VUID-vkCreateAccelerationStructureNV-pAllocator-parameter", - "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" - }, - { - "vuid": "VUID-vkCreateAccelerationStructureNV-pAccelerationStructure-parameter", - "text": " pAccelerationStructure must be a valid pointer to a VkAccelerationStructureNV handle" - } - ] - }, - "VkAccelerationStructureCreateInfoNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-VkAccelerationStructureCreateInfoNV-compactedSize-02421", - "text": " If compactedSize is not 0 then both info.geometryCount and info.instanceCount must be 0" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoNV-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoNV-info-parameter", - "text": " info must be a valid VkAccelerationStructureInfoNV structure" - } - ] - }, - "VkAccelerationStructureInfoNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-VkAccelerationStructureInfoNV-geometryCount-02422", - "text": " geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-instanceCount-02423", - "text": " instanceCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxInstanceCount" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-maxTriangleCount-02424", - "text": " The total number of triangles in all geometries must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxTriangleCount" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-type-02425", - "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV then geometryCount must be 0" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-type-02426", - "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then instanceCount must be 0" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-type-02786", - "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then the geometryType member of each geometry in pGeometries must be the same" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-flags-02592", - "text": " If flags has the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV bit set, then it must not have the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV bit set" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-scratch-02781", - "text": " scratch must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-instanceData-02782", - "text": " If instanceData is not VK_NULL_HANDLE, instanceData must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-type-parameter", - "text": " type must be a valid VkAccelerationStructureTypeNV value" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-flags-parameter", - "text": " flags must be a valid combination of VkBuildAccelerationStructureFlagBitsNV values" - }, - { - "vuid": "VUID-VkAccelerationStructureInfoNV-pGeometries-parameter", - "text": " If geometryCount is not 0, pGeometries must be a valid pointer to an array of geometryCount valid VkGeometryNV structures" - } - ] - }, - "vkCreateAccelerationStructureKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkCreateAccelerationStructureKHR-rayTracing-03487", - "text": " The rayTracing or rayQuery feature must be enabled" - }, - { - "vuid": "VUID-vkCreateAccelerationStructureKHR-deviceAddress-03488", - "text": " If VkAccelerationStructureCreateInfoKHR::deviceAddress is not zero, the rayTracingAccelerationStructureCaptureReplay feature must be enabled" - }, - { - "vuid": "VUID-vkCreateAccelerationStructureKHR-device-03489", - "text": " If device was created with multiple physical devices, then the bufferDeviceAddressMultiDevice feature must be enabled" - }, - { - "vuid": "VUID-vkCreateAccelerationStructureKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkCreateAccelerationStructureKHR-pCreateInfo-parameter", - "text": " pCreateInfo must be a valid pointer to a valid VkAccelerationStructureCreateInfoKHR structure" - }, - { - "vuid": "VUID-vkCreateAccelerationStructureKHR-pAllocator-parameter", - "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" - }, - { - "vuid": "VUID-vkCreateAccelerationStructureKHR-pAccelerationStructure-parameter", - "text": " pAccelerationStructure must be a valid pointer to a VkAccelerationStructureKHR handle" - } - ] - }, - "VkAccelerationStructureCreateInfoKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-compactedSize-03490", - "text": " If compactedSize is not 0 then maxGeometryCount must be 0" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-compactedSize-02993", - "text": " If compactedSize is 0 then maxGeometryCount must not be 0" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03491", - "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR then maxGeometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxGeometryCount" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03492", - "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR then the maxPrimitiveCount member of each element of the pGeometryInfos array must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxInstanceCount" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-maxPrimitiveCount-03493", - "text": " The total number of triangles in all geometries must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxPrimitiveCount" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-maxPrimitiveCount-03494", - "text": " The total number of AABBs in all geometries must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxPrimitiveCount" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03495", - "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR and compactedSize is 0, maxGeometryCount must be 1" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03496", - "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR and compactedSize is 0, the geometryType member of elements of pGeometryInfos must be VK_GEOMETRY_TYPE_INSTANCES_KHR" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03497", - "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR and compactedSize is 0, the geometryType member of elements of pGeometryInfos must not be VK_GEOMETRY_TYPE_INSTANCES_KHR" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-03498", - "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR then the geometryType member of each geometry in pGeometryInfos must be the same" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-flags-03499", - "text": " If flags has the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR bit set, then it must not have the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR bit set" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-deviceAddress-03500", - "text": " If deviceAddress is not 0, VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingAccelerationStructureCaptureReplay must be VK_TRUE" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-type-parameter", - "text": " type must be a valid VkAccelerationStructureTypeKHR value" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-flags-parameter", - "text": " flags must be a valid combination of VkBuildAccelerationStructureFlagBitsKHR values" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateInfoKHR-pGeometryInfos-parameter", - "text": " If maxGeometryCount is not 0, pGeometryInfos must be a valid pointer to an array of maxGeometryCount valid VkAccelerationStructureCreateGeometryTypeInfoKHR structures" - } - ] - }, - "VkAccelerationStructureCreateGeometryTypeInfoKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-geometryType-03501", - "text": " If geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, vertexFormat must support the VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR in VkFormatProperties::bufferFeatures as returned by vkGetPhysicalDeviceFormatProperties2" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-geometryType-03502", - "text": " If geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, indexType must be VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_UINT32, or VK_INDEX_TYPE_NONE_KHR" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_GEOMETRY_TYPE_INFO_KHR" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-geometryType-parameter", - "text": " geometryType must be a valid VkGeometryTypeKHR value" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-indexType-parameter", - "text": " indexType must be a valid VkIndexType value" - }, - { - "vuid": "VUID-VkAccelerationStructureCreateGeometryTypeInfoKHR-vertexFormat-parameter", - "text": " If vertexFormat is not 0, vertexFormat must be a valid VkFormat value" - } - ] - }, - "VkGeometryNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-VkGeometryNV-geometryType-03503", - "text": " geometryType must be VK_GEOMETRY_TYPE_TRIANGLES_NV or VK_GEOMETRY_TYPE_AABBS_NV" - }, - { - "vuid": "VUID-VkGeometryNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_GEOMETRY_NV" - }, - { - "vuid": "VUID-VkGeometryNV-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkGeometryNV-geometryType-parameter", - "text": " geometryType must be a valid VkGeometryTypeKHR value" - }, - { - "vuid": "VUID-VkGeometryNV-geometry-parameter", - "text": " geometry must be a valid VkGeometryDataNV structure" - }, - { - "vuid": "VUID-VkGeometryNV-flags-parameter", - "text": " flags must be a valid combination of VkGeometryFlagBitsKHR values" - } - ] - }, - "VkGeometryDataNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-VkGeometryDataNV-triangles-parameter", - "text": " triangles must be a valid VkGeometryTrianglesNV structure" - }, - { - "vuid": "VUID-VkGeometryDataNV-aabbs-parameter", - "text": " aabbs must be a valid VkGeometryAABBNV structure" - } - ] - }, - "VkGeometryTrianglesNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-VkGeometryTrianglesNV-vertexOffset-02428", - "text": " vertexOffset must be less than the size of vertexData" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-vertexOffset-02429", - "text": " vertexOffset must be a multiple of the component size of vertexFormat" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-vertexFormat-02430", - "text": " vertexFormat must be one of VK_FORMAT_R32G32B32_SFLOAT, VK_FORMAT_R32G32_SFLOAT, VK_FORMAT_R16G16B16_SFLOAT, VK_FORMAT_R16G16_SFLOAT, VK_FORMAT_R16G16_SNORM, or VK_FORMAT_R16G16B16_SNORM" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-indexOffset-02431", - "text": " indexOffset must be less than the size of indexData" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-indexOffset-02432", - "text": " indexOffset must be a multiple of the element size of indexType" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-indexType-02433", - "text": " indexType must be VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_UINT32, or VK_INDEX_TYPE_NONE_NV" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-indexData-02434", - "text": " indexData must be VK_NULL_HANDLE if indexType is VK_INDEX_TYPE_NONE_NV" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-indexData-02435", - "text": " indexData must be a valid VkBuffer handle if indexType is not VK_INDEX_TYPE_NONE_NV" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-indexCount-02436", - "text": " indexCount must be 0 if indexType is VK_INDEX_TYPE_NONE_NV" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-transformOffset-02437", - "text": " transformOffset must be less than the size of transformData" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-transformOffset-02438", - "text": " transformOffset must be a multiple of 16" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-vertexData-parameter", - "text": " If vertexData is not VK_NULL_HANDLE, vertexData must be a valid VkBuffer handle" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-vertexFormat-parameter", - "text": " vertexFormat must be a valid VkFormat value" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-indexData-parameter", - "text": " If indexData is not VK_NULL_HANDLE, indexData must be a valid VkBuffer handle" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-indexType-parameter", - "text": " indexType must be a valid VkIndexType value" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-transformData-parameter", - "text": " If transformData is not VK_NULL_HANDLE, transformData must be a valid VkBuffer handle" - }, - { - "vuid": "VUID-VkGeometryTrianglesNV-commonparent", - "text": " Each of indexData, transformData, and vertexData that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" - } - ] - }, - "VkGeometryAABBNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-VkGeometryAABBNV-offset-02439", - "text": " offset must be less than the size of aabbData" - }, - { - "vuid": "VUID-VkGeometryAABBNV-offset-02440", - "text": " offset must be a multiple of 8" - }, - { - "vuid": "VUID-VkGeometryAABBNV-stride-02441", - "text": " stride must be a multiple of 8" - }, - { - "vuid": "VUID-VkGeometryAABBNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV" - }, - { - "vuid": "VUID-VkGeometryAABBNV-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkGeometryAABBNV-aabbData-parameter", - "text": " If aabbData is not VK_NULL_HANDLE, aabbData must be a valid VkBuffer handle" - } - ] - }, - "vkDestroyAccelerationStructureKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02442", - "text": " All submitted commands that refer to accelerationStructure must have completed execution" - }, - { - "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02443", - "text": " If VkAllocationCallbacks were provided when accelerationStructure was created, a compatible set of callbacks must be provided here" - }, - { - "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-02444", - "text": " If no VkAllocationCallbacks were provided when accelerationStructure was created, pAllocator must be NULL" - }, - { - "vuid": "VUID-vkDestroyAccelerationStructureKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-parameter", - "text": " If accelerationStructure is not VK_NULL_HANDLE, accelerationStructure must be a valid VkAccelerationStructureKHR handle" - }, - { - "vuid": "VUID-vkDestroyAccelerationStructureKHR-pAllocator-parameter", - "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" - }, - { - "vuid": "VUID-vkDestroyAccelerationStructureKHR-accelerationStructure-parent", - "text": " If accelerationStructure is a valid handle, it must have been created, allocated, or retrieved from device" - } - ] - }, - "vkGetAccelerationStructureMemoryRequirementsNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsNV-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsNV-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureMemoryRequirementsInfoNV structure" - }, - { - "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsNV-pMemoryRequirements-parameter", - "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2KHR structure" - } - ] - }, - "VkAccelerationStructureMemoryRequirementsInfoNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV" - }, - { - "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-type-parameter", - "text": " type must be a valid VkAccelerationStructureMemoryRequirementsTypeNV value" - }, - { - "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-accelerationStructure-parameter", - "text": " accelerationStructure must be a valid VkAccelerationStructureNV handle" - } - ] - }, - "vkGetAccelerationStructureMemoryRequirementsKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsKHR-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureMemoryRequirementsInfoKHR structure" - }, - { - "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsKHR-pMemoryRequirements-parameter", - "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2 structure" - } - ] - }, - "VkAccelerationStructureMemoryRequirementsInfoKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR" - }, - { - "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoKHR-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoKHR-type-parameter", - "text": " type must be a valid VkAccelerationStructureMemoryRequirementsTypeKHR value" - }, - { - "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoKHR-buildType-parameter", - "text": " buildType must be a valid VkAccelerationStructureBuildTypeKHR value" - }, - { - "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoKHR-accelerationStructure-parameter", - "text": " accelerationStructure must be a valid VkAccelerationStructureKHR handle" - } - ] - }, - "vkBindAccelerationStructureMemoryKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkBindAccelerationStructureMemoryKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkBindAccelerationStructureMemoryKHR-pBindInfos-parameter", - "text": " pBindInfos must be a valid pointer to an array of bindInfoCount valid VkBindAccelerationStructureMemoryInfoKHR structures" - }, - { - "vuid": "VUID-vkBindAccelerationStructureMemoryKHR-bindInfoCount-arraylength", - "text": " bindInfoCount must be greater than 0" - } - ] - }, - "VkBindAccelerationStructureMemoryInfoKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-accelerationStructure-02450", - "text": " accelerationStructure must not already be backed by a memory object" - }, - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-memoryOffset-02451", - "text": " memoryOffset must be less than the size of memory" - }, - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-memory-02593", - "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsKHR with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR" - }, - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-memoryOffset-02594", - "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsKHR with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR" - }, - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-size-02595", - "text": " The size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsKHR with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR must be less than or equal to the size of memory minus memoryOffset" - }, - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_KHR" - }, - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-accelerationStructure-parameter", - "text": " accelerationStructure must be a valid VkAccelerationStructureKHR handle" - }, - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-memory-parameter", - "text": " memory must be a valid VkDeviceMemory handle" - }, - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-pDeviceIndices-parameter", - "text": " If deviceIndexCount is not 0, pDeviceIndices must be a valid pointer to an array of deviceIndexCount uint32_t values" - }, - { - "vuid": "VUID-VkBindAccelerationStructureMemoryInfoKHR-commonparent", - "text": " Both of accelerationStructure, and memory must have been created, allocated, or retrieved from the same VkDevice" - } - ] - }, - "vkGetAccelerationStructureHandleNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-vkGetAccelerationStructureHandleNV-dataSize-02240", - "text": " dataSize must be large enough to contain the result of the query, as described above" - }, - { - "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-02787", - "text": " accelerationStructure must be bound completely and contiguously to a single VkDeviceMemory object via vkBindAccelerationStructureMemoryKHR" - }, - { - "vuid": "VUID-vkGetAccelerationStructureHandleNV-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parameter", - "text": " accelerationStructure must be a valid VkAccelerationStructureKHR handle" - }, - { - "vuid": "VUID-vkGetAccelerationStructureHandleNV-pData-parameter", - "text": " pData must be a valid pointer to an array of dataSize bytes" - }, - { - "vuid": "VUID-vkGetAccelerationStructureHandleNV-dataSize-arraylength", - "text": " dataSize must be greater than 0" - }, - { - "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parent", - "text": " accelerationStructure must have been created, allocated, or retrieved from device" - } - ] - }, - "vkGetAccelerationStructureDeviceAddressKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkGetAccelerationStructureDeviceAddressKHR-device-03504", - "text": " If device was created with multiple physical devices, then the bufferDeviceAddressMultiDevice feature must be enabled" - }, - { - "vuid": "VUID-vkGetAccelerationStructureDeviceAddressKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkGetAccelerationStructureDeviceAddressKHR-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureDeviceAddressInfoKHR structure" - } - ] - }, - "VkAccelerationStructureDeviceAddressInfoKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkAccelerationStructureDeviceAddressInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR" - }, - { - "vuid": "VUID-VkAccelerationStructureDeviceAddressInfoKHR-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkAccelerationStructureDeviceAddressInfoKHR-accelerationStructure-parameter", - "text": " accelerationStructure must be a valid VkAccelerationStructureKHR handle" - } - ] - }, "vkCreateSampler": { "core": [ { "vuid": "VUID-vkCreateSampler-maxSamplerAllocationCount-04110", - "text": " There must be less than VkPhysicalDeviceLimits::maxSamplerAllocationCount VkSampler objects currently created on the device." + "text": " There must be less than VkPhysicalDeviceLimits::maxSamplerAllocationCount VkSampler objects currently created on the device." }, { "vuid": "VUID-vkCreateSampler-device-parameter", @@ -12724,6 +15676,10 @@ "vuid": "VUID-VkSamplerCreateInfo-addressModeU-01078", "text": " If any of addressModeU, addressModeV or addressModeW are VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, borderColor must be a valid VkBorderColor value" }, + { + "vuid": "VUID-VkSamplerCreateInfo-addressModeU-01079", + "text": " If samplerMirrorClampToEdge is not enabled, and if the VK_KHR_sampler_mirror_clamp_to_edge extension is not enabled, addressModeU, addressModeV and addressModeW must not be VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE" + }, { "vuid": "VUID-VkSamplerCreateInfo-compareEnable-01080", "text": " If compareEnable is VK_TRUE, compareOp must be a valid VkCompareOp value" @@ -12769,6 +15725,12 @@ "text": " addressModeW must be a valid VkSamplerAddressMode value" } ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkSamplerCreateInfo-samplerMipLodBias-04467", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::samplerMipLodBias is VK_FALSE, mipLodBias must be zero." + } + ], "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ { "vuid": "VUID-VkSamplerCreateInfo-minFilter-01645", @@ -12785,12 +15747,6 @@ "text": " The sampler reduction mode must be set to VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE if sampler {YCbCr} conversion is enabled" } ], - "(VK_VERSION_1_2,VK_KHR_sampler_mirror_clamp_to_edge)": [ - { - "vuid": "VUID-VkSamplerCreateInfo-addressModeU-01079", - "text": " If ifdef::VK_VERSION_1_2[samplerMirrorClampToEdge is not enabled, and if] the VK_KHR_sampler_mirror_clamp_to_edge extension is not enabled, addressModeU, addressModeV and addressModeW must not be VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE" - } - ], "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ { "vuid": "VUID-VkSamplerCreateInfo-magFilter-01081", @@ -12842,11 +15798,15 @@ "(VK_EXT_custom_border_color)": [ { "vuid": "VUID-VkSamplerCreateInfo-borderColor-04011", - "text": " If borderColor is set to one of VK_BORDER_COLOR_FLOAT_CUSTOM_EXT or VK_BORDER_COLOR_INT_CUSTOM_EXT, then a VkSamplerCustomBorderColorCreateInfoEXT must be present in the pNext chain" + "text": " If borderColor is one of VK_BORDER_COLOR_FLOAT_CUSTOM_EXT or VK_BORDER_COLOR_INT_CUSTOM_EXT, then a VkSamplerCustomBorderColorCreateInfoEXT must be present in the pNext chain" }, { "vuid": "VUID-VkSamplerCreateInfo-customBorderColors-04085", - "text": " If the customBorderColors feature is not enabled, borderColor must not be set to VK_BORDER_COLOR_FLOAT_CUSTOM_EXT or VK_BORDER_COLOR_INT_CUSTOM_EXT" + "text": " If the customBorderColors feature is not enabled, borderColor must not be VK_BORDER_COLOR_FLOAT_CUSTOM_EXT or VK_BORDER_COLOR_INT_CUSTOM_EXT" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-borderColor-04442", + "text": " If borderColor is one of VK_BORDER_COLOR_FLOAT_CUSTOM_EXT or VK_BORDER_COLOR_INT_CUSTOM_EXT, and VkSamplerCustomBorderColorCreateInfoEXT::format is not VK_FORMAT_UNDEFINED, VkSamplerCustomBorderColorCreateInfoEXT::customBorderColor must be within the range of values representable in format." }, { "vuid": "VUID-VkSamplerCreateInfo-None-04012", @@ -12914,7 +15874,7 @@ "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ { "vuid": "VUID-vkCreateSamplerYcbcrConversion-None-01648", - "text": " The sampler {YCbCr} conversion feature must be enabled" + "text": " The sampler {YCbCr} conversion feature must be enabled" }, { "vuid": "VUID-vkCreateSamplerYcbcrConversion-device-parameter", @@ -12958,11 +15918,11 @@ }, { "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01651", - "text": " If the potential format features of the sampler {YCbCr} conversion do not support VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, xChromaOffset and yChromaOffset must not be VK_CHROMA_LOCATION_COSITED_EVEN" + "text": " If the potential format features of the sampler {YCbCr} conversion do not support VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, xChromaOffset and yChromaOffset must not be VK_CHROMA_LOCATION_COSITED_EVEN if the corresponding channels are downsampled" }, { "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01652", - "text": " If the potential format features of the sampler {YCbCr} conversion do not support VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, xChromaOffset and yChromaOffset must not be VK_CHROMA_LOCATION_MIDPOINT" + "text": " If the potential format features of the sampler {YCbCr} conversion do not support VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, xChromaOffset and yChromaOffset must not be VK_CHROMA_LOCATION_MIDPOINT if the corresponding channels are downsampled" }, { "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-components-02581", @@ -13118,7 +16078,7 @@ }, { "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkDescriptorSetLayoutBindingFlagsCreateInfo" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDescriptorSetLayoutBindingFlagsCreateInfo or VkMutableDescriptorTypeCreateInfoVALVE" }, { "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-sType-unique", @@ -13149,6 +16109,16 @@ "text": " If flags contains VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then all elements of pBindings must not have a descriptorType of VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT" } ], + "(VK_KHR_push_descriptor)+(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-04590", + "text": " If flags contains VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, flags must not contain VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-04591", + "text": " If flags contains VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, pBindings must not have a descriptorType of VK_DESCRIPTOR_TYPE_MUTABLE_VALVE" + } + ], "(VK_VERSION_1_2,VK_EXT_descriptor_indexing)": [ { "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-03000", @@ -13158,6 +16128,80 @@ "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-03001", "text": " If any binding has the VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT bit set, then all bindings must not have descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC" } + ], + "(VK_VERSION_1_2,VK_EXT_descriptor_indexing)+(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-04592", + "text": " If flags contains VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, flags must not contain VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE" + } + ], + "(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-04593", + "text": " If any binding has a descriptorType of VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, then a VkMutableDescriptorTypeCreateInfoVALVE must be present in the pNext chain" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-04594", + "text": " If a binding has a descriptorType value of VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, then pImmutableSamplers must be NULL" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-mutableDescriptorType-04595", + "text": " If VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE::mutableDescriptorType is not enabled, pBindings must not contain a descriptorType of VK_DESCRIPTOR_TYPE_MUTABLE_VALVE" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-04596", + "text": " If flags contains VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE::mutableDescriptorType must be enabled" + } + ] + }, + "VkMutableDescriptorTypeCreateInfoVALVE": { + "(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkMutableDescriptorTypeCreateInfoVALVE-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE" + }, + { + "vuid": "VUID-VkMutableDescriptorTypeCreateInfoVALVE-pMutableDescriptorTypeLists-parameter", + "text": " If mutableDescriptorTypeListCount is not 0, pMutableDescriptorTypeLists must be a valid pointer to an array of mutableDescriptorTypeListCount valid VkMutableDescriptorTypeListVALVE structures" + } + ] + }, + "VkMutableDescriptorTypeListVALVE": { + "(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkMutableDescriptorTypeListVALVE-descriptorTypeCount-04597", + "text": " descriptorTypeCount must not be 0 if the corresponding binding is of VK_DESCRIPTOR_TYPE_MUTABLE_VALVE" + }, + { + "vuid": "VUID-VkMutableDescriptorTypeListVALVE-pDescriptorTypes-04598", + "text": " pDescriptorTypes must be a valid pointer to an array of descriptorTypeCount valid, unique VkDescriptorType values if the given binding is of VK_DESCRIPTOR_TYPE_MUTABLE_VALVE type" + }, + { + "vuid": "VUID-VkMutableDescriptorTypeListVALVE-descriptorTypeCount-04599", + "text": " descriptorTypeCount must be 0 if the corresponding binding is not of VK_DESCRIPTOR_TYPE_MUTABLE_VALVE" + }, + { + "vuid": "VUID-VkMutableDescriptorTypeListVALVE-pDescriptorTypes-04600", + "text": " pDescriptorTypes must not contain VK_DESCRIPTOR_TYPE_MUTABLE_VALVE" + }, + { + "vuid": "VUID-VkMutableDescriptorTypeListVALVE-pDescriptorTypes-04601", + "text": " pDescriptorTypes must not contain VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC" + }, + { + "vuid": "VUID-VkMutableDescriptorTypeListVALVE-pDescriptorTypes-04602", + "text": " pDescriptorTypes must not contain VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC" + }, + { + "vuid": "VUID-VkMutableDescriptorTypeListVALVE-pDescriptorTypes-parameter", + "text": " If descriptorTypeCount is not 0, pDescriptorTypes must be a valid pointer to an array of descriptorTypeCount valid VkDescriptorType values" + } + ], + "(VK_VALVE_mutable_descriptor_type)+(VK_EXT_inline_uniform_block)": [ + { + "vuid": "VUID-VkMutableDescriptorTypeListVALVE-pDescriptorTypes-04603", + "text": " pDescriptorTypes must not contain VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT" + } ] }, "VkDescriptorSetLayoutBinding": { @@ -13180,6 +16224,10 @@ } ], "(VK_EXT_inline_uniform_block)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-04604", + "text": " If the inlineUniformBlock feature is not enabled, descriptorType must not be VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT" + }, { "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209", "text": " If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT then descriptorCount must be a multiple of 4" @@ -13194,6 +16242,12 @@ "vuid": "VUID-VkDescriptorSetLayoutBinding-pImmutableSamplers-04009", "text": " The sampler objects indicated by pImmutableSamplers must not have a borderColor with one of the values VK_BORDER_COLOR_FLOAT_CUSTOM_EXT or VK_BORDER_COLOR_INT_CUSTOM_EXT" } + ], + "(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-04605", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, then pImmutableSamplers must be NULL." + } ] }, "VkDescriptorSetLayoutBindingFlagsCreateInfo": { @@ -13256,7 +16310,7 @@ }, { "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-pBindingFlags-parameter", - "text": " If bindingCount is not 0, and pBindingFlags is not NULL, pBindingFlags must be a valid pointer to an array of bindingCount valid combinations of VkDescriptorBindingFlagBits values" + "text": " If bindingCount is not 0, pBindingFlags must be a valid pointer to an array of bindingCount valid combinations of VkDescriptorBindingFlagBits values" } ], "(VK_VERSION_1_2,VK_EXT_descriptor_indexing)+(VK_KHR_push_descriptor)": [ @@ -13270,6 +16324,12 @@ "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-descriptorBindingInlineUniformBlockUpdateAfterBind-02211", "text": " If VkPhysicalDeviceInlineUniformBlockFeaturesEXT::descriptorBindingInlineUniformBlockUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT" } + ], + "(VK_VERSION_1_2,VK_EXT_descriptor_indexing)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-descriptorBindingAccelerationStructureUpdateAfterBind-03570", + "text": " If VkPhysicalDeviceAccelerationStructureFeaturesKHR::descriptorBindingAccelerationStructureUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR or VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT" + } ] }, "vkGetDescriptorSetLayoutSupport": { @@ -13597,10 +16657,40 @@ "text": " pSetLayouts must not contain more than one descriptor set layout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR set" } ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03571", + "text": " The total number of bindings in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxPerStageDescriptorAccelerationStructures" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03572", + "text": " The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxPerStageDescriptorUpdateAfterBindAccelerationStructures" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03573", + "text": " The total number of bindings in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxDescriptorSetAccelerationStructures" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03574", + "text": " The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxDescriptorSetUpdateAfterBindAccelerationStructures" + } + ], + "(VK_NV_ray_tracing)": [ { "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-02381", - "text": " The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxDescriptorSetAccelerationStructures" + "text": " The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxDescriptorSetAccelerationStructures" + } + ], + "(VK_EXT_fragment_density_map2)": [ + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pImmutableSamplers-03566", + "text": " The total number of pImmutableSamplers created with flags containing VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT or VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceFragmentDensityMap2PropertiesEXT::maxDescriptorSetSubsampledSamplers" + } + ], + "(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-04606", + "text": " Any element of pSetLayouts must not have been created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE bit set" } ] }, @@ -13700,7 +16790,7 @@ }, { "vuid": "VUID-VkDescriptorPoolCreateInfo-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkDescriptorPoolInlineUniformBlockCreateInfoEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDescriptorPoolInlineUniformBlockCreateInfoEXT or VkMutableDescriptorTypeCreateInfoVALVE" }, { "vuid": "VUID-VkDescriptorPoolCreateInfo-sType-unique", @@ -13718,6 +16808,26 @@ "vuid": "VUID-VkDescriptorPoolCreateInfo-poolSizeCount-arraylength", "text": " poolSizeCount must be greater than 0" } + ], + "(VK_VERSION_1_2,VK_EXT_descriptor_indexing)+(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkDescriptorPoolCreateInfo-flags-04607", + "text": " If flags has the VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE bit set, then the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT bit must not be set" + } + ], + "(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkDescriptorPoolCreateInfo-mutableDescriptorType-04608", + "text": " If VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE::mutableDescriptorType is not enabled, pPoolSizes must not contain a descriptorType of VK_DESCRIPTOR_TYPE_MUTABLE_VALVE" + }, + { + "vuid": "VUID-VkDescriptorPoolCreateInfo-flags-04609", + "text": " If flags has the VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE bit set, VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE::mutableDescriptorType must be enabled" + }, + { + "vuid": "VUID-VkDescriptorPoolCreateInfo-pPoolSizes-04787", + "text": " If pPoolSizes contains a descriptorType of VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, any other VK_DESCRIPTOR_TYPE_MUTABLE_VALVE element in pPoolSizes must not have sets of supported descriptor types which partially overlap" + } ] }, "VkDescriptorPoolInlineUniformBlockCreateInfoEXT": { @@ -13794,7 +16904,7 @@ }, { "vuid": "VUID-vkAllocateDescriptorSets-pAllocateInfo::descriptorSetCount-arraylength", - "text": " The value referenced by pAllocateInfo->descriptorSetCount must be greater than 0" + "text": " pAllocateInfo->descriptorSetCount must be greater than 0" } ] }, @@ -13821,6 +16931,12 @@ "text": " If any element of pSetLayouts was created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT bit set, descriptorPool must have been created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set" } ], + "(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-04610", + "text": " If any element of pSetLayouts was created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE bit set, descriptorPool must have been created with the VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE flag set" + } + ], "core": [ { "vuid": "VUID-VkDescriptorSetAllocateInfo-sType-sType", @@ -13882,10 +16998,6 @@ "vuid": "VUID-vkFreeDescriptorSets-pDescriptorSets-00310", "text": " pDescriptorSets must be a valid pointer to an array of descriptorSetCount VkDescriptorSet handles, each element of which must either be a valid handle or VK_NULL_HANDLE" }, - { - "vuid": "VUID-vkFreeDescriptorSets-pDescriptorSets-00311", - "text": " Each valid handle in pDescriptorSets must have been allocated from descriptorPool" - }, { "vuid": "VUID-vkFreeDescriptorSets-descriptorPool-00312", "text": " descriptorPool must have been created with the VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT flag" @@ -14022,10 +17134,6 @@ "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02997", "text": " If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT and the nullDescriptor feature is not enabled, the imageView member of each element of pImageInfo must not be VK_NULL_HANDLE" }, - { - "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01402", - "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, for each descriptor that will be accessed via load or store operations the imageLayout member for corresponding elements of pImageInfo must be VK_IMAGE_LAYOUT_GENERAL" - }, { "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00327", "text": " If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER or VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, the offset member of each element of pBufferInfo must be a multiple of VkPhysicalDeviceLimits::minUniformBufferOffsetAlignment" @@ -14071,8 +17179,20 @@ "text": " If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, the imageView member of each element of pImageInfo must have been created with VK_IMAGE_USAGE_SAMPLED_BIT set" }, { - "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01403", - "text": " If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, the imageLayout member of each element of pImageInfo must be a member of the list given in Sampled Image or Combined Image Sampler, corresponding to its type" + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-04149", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE the imageLayout member of each element of pImageInfo must be a member of the list given in Sampled Image" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-04150", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER the imageLayout member of each element of pImageInfo must be a member of the list given in Combined Image Sampler" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-04151", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT the imageLayout member of each element of pImageInfo must be a member of the list given in Input Attachment" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-04152", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE the imageLayout member of each element of pImageInfo must be a member of the list given in Storage Image" }, { "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00338", @@ -14092,7 +17212,7 @@ }, { "vuid": "VUID-VkWriteDescriptorSet-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkWriteDescriptorSetAccelerationStructureKHR or VkWriteDescriptorSetInlineUniformBlockEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkWriteDescriptorSetAccelerationStructureKHR, VkWriteDescriptorSetAccelerationStructureNV, or VkWriteDescriptorSetInlineUniformBlockEXT" }, { "vuid": "VUID-VkWriteDescriptorSet-sType-unique", @@ -14125,12 +17245,18 @@ "text": " If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, the pNext chain must include a VkWriteDescriptorSetInlineUniformBlockEXT structure whose dataSize member equals descriptorCount" } ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "(VK_KHR_acceleration_structure)": [ { "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02382", "text": " If descriptorType is VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, the pNext chain must include a VkWriteDescriptorSetAccelerationStructureKHR structure whose accelerationStructureCount member equals descriptorCount" } ], + "(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-03817", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, the pNext chain must include a VkWriteDescriptorSetAccelerationStructureNV structure whose accelerationStructureCount member equals descriptorCount" + } + ], "(VK_VULKAN_1_1,VK_KHR_sampler_ycbcr_conversion)": [ { "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01946", @@ -14145,10 +17271,10 @@ "text": " If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and dstSet was allocated with a layout that included immutable samplers for dstBinding, then the imageView member of each element of pImageInfo which corresponds to an immutable sampler that enables sampler {YCbCr} conversion must have been created with a VkSamplerYcbcrConversionInfo structure in its pNext chain with an identically defined VkSamplerYcbcrConversionInfo to the corresponding immutable sampler" } ], - "(VK_VERSION_1_2,VK_EXT_descriptor_indexing)": [ + "(VK_VALVE_mutable_descriptor_type)": [ { - "vuid": "VUID-VkWriteDescriptorSet-descriptorCount-03048", - "text": " All consecutive bindings updated via a single VkWriteDescriptorSet structure, except those with a descriptorCount of zero, must have identical VkDescriptorBindingFlagBits" + "vuid": "VUID-VkWriteDescriptorSet-dstSet-04611", + "text": " If the VkDescriptorSetLayoutBinding for dstSet at dstBinding is VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, the new active descriptor type descriptorType must exist in the corresponding pMutableDescriptorTypeLists list for dstBinding" } ] }, @@ -14208,6 +17334,12 @@ "vuid": "VUID-VkDescriptorImageInfo-sampler-01564", "text": " If sampler is used and the VkFormat of the image is a multi-planar format, the image must have been created with VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, and the aspectMask of the imageView must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT or (for three-plane formats only) VK_IMAGE_ASPECT_PLANE_2_BIT" } + ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkDescriptorImageInfo-mutableComparisonSamplers-04450", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::mutableComparisonSamplers is VK_FALSE, then sampler must have been created with VkSamplerCreateInfo::compareEnable set to VK_FALSE." + } ] }, "VkWriteDescriptorSetInlineUniformBlockEXT": { @@ -14231,14 +17363,18 @@ ] }, "VkWriteDescriptorSetAccelerationStructureKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "(VK_KHR_acceleration_structure)": [ { "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-accelerationStructureCount-02236", "text": " accelerationStructureCount must be equal to descriptorCount in the extended structure" }, { - "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-pAccelerationStructures-02764", - "text": " Each acceleration structure in pAccelerationStructures must have been created with VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR" + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-pAccelerationStructures-03579", + "text": " Each acceleration structure in pAccelerationStructures must have been created with a type of VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR or VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR" + }, + { + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-pAccelerationStructures-03580", + "text": " If the nullDescriptor feature is not enabled, each element of pAccelerationStructures must not be VK_NULL_HANDLE" }, { "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-sType-sType", @@ -14246,7 +17382,7 @@ }, { "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-pAccelerationStructures-parameter", - "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureKHR handles" + "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid or VK_NULL_HANDLE VkAccelerationStructureKHR handles" }, { "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureKHR-accelerationStructureCount-arraylength", @@ -14254,6 +17390,34 @@ } ] }, + "VkWriteDescriptorSetAccelerationStructureNV": { + "(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-03747", + "text": " accelerationStructureCount must be equal to descriptorCount in the extended structure" + }, + { + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-03748", + "text": " Each acceleration structure in pAccelerationStructures must have been created with VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR" + }, + { + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-03749", + "text": " If the nullDescriptor feature is not enabled, each member of pAccelerationStructures must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV" + }, + { + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-parameter", + "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid or VK_NULL_HANDLE VkAccelerationStructureNV handles" + }, + { + "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-arraylength", + "text": " accelerationStructureCount must be greater than 0" + } + ] + }, "VkCopyDescriptorSet": { "core": [ { @@ -14325,16 +17489,42 @@ "text": " If srcSet’s layout was created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set, then dstSet’s layout must also have been created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set" }, { - "vuid": "VUID-VkCopyDescriptorSet-srcSet-01919", + "vuid": "VUID-VkCopyDescriptorSet-srcSet-01920", + "text": " If the descriptor pool from which srcSet was allocated was created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set, then the descriptor pool from which dstSet was allocated must also have been created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set" + } + ], + "(VK_VERSION_1_2,VK_EXT_descriptor_indexing)+(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkCopyDescriptorSet-srcSet-04885", + "text": " If srcSet’s layout was created with neither VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT nor VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE flags set, then dstSet’s layout must have been created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-srcSet-04887", + "text": " If the descriptor pool from which srcSet was allocated was created with neither VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT nor VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE flags set, then the descriptor pool from which dstSet was allocated must have been created without the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set" + } + ], + "(VK_VERSION_1_2,VK_EXT_descriptor_indexing)+!(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkCopyDescriptorSet-srcSet-04886", "text": " If srcSet’s layout was created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set, then dstSet’s layout must also have been created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT flag set" }, { - "vuid": "VUID-VkCopyDescriptorSet-srcSet-01920", - "text": " If the descriptor pool from which srcSet was allocated was created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set, then the descriptor pool from which dstSet was allocated must also have been created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set" + "vuid": "VUID-VkCopyDescriptorSet-srcSet-04888", + "text": " If the descriptor pool from which srcSet was allocated was created without the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set, then the descriptor pool from which dstSet was allocated must also have been created without the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set" + } + ], + "(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkCopyDescriptorSet-dstSet-04612", + "text": " If VkDescriptorSetLayoutBinding for dstSet at dstBinding is VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, the new active descriptor type must exist in the corresponding pMutableDescriptorTypeLists list for dstBinding if the new active descriptor type is not VK_DESCRIPTOR_TYPE_MUTABLE_VALVE" }, { - "vuid": "VUID-VkCopyDescriptorSet-srcSet-01921", - "text": " If the descriptor pool from which srcSet was allocated was created without the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set, then the descriptor pool from which dstSet was allocated must also have been created without the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT flag set" + "vuid": "VUID-VkCopyDescriptorSet-srcSet-04613", + "text": " If VkDescriptorSetLayoutBinding for srcSet at srcBinding is VK_DESCRIPTOR_TYPE_MUTABLE_VALVE and the VkDescriptorSetLayoutBinding for dstSet at dstBinding is not VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, the active descriptor type for the source descriptor must match the descriptor type of dstBinding" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-dstSet-04614", + "text": " If VkDescriptorSetLayoutBinding for dstSet at dstBinding is VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, and the new active descriptor type is VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, the pMutableDescriptorTypeLists for srcBinding and dstBinding must match exactly" } ] }, @@ -14406,6 +17596,12 @@ "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353", "text": " If templateType is VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, set must be the unique set number in the pipeline layout that uses a descriptor set layout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR" } + ], + "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)+(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-04615", + "text": " If templateType is VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, descriptorSetLayout must not contain a binding with type VK_DESCRIPTOR_TYPE_MUTABLE_VALVE" + } ] }, "VkDescriptorUpdateTemplateEntry": { @@ -14438,11 +17634,11 @@ "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [ { "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00356", - "text": " If VkAllocationCallbacks were provided when descriptorSetLayout was created, a compatible set of callbacks must be provided here" + "text": " If VkAllocationCallbacks were provided when descriptorUpdateTemplate was created, a compatible set of callbacks must be provided here" }, { "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00357", - "text": " If no VkAllocationCallbacks were provided when descriptorSetLayout was created, pAllocator must be NULL" + "text": " If no VkAllocationCallbacks were provided when descriptorUpdateTemplate was created, pAllocator must be NULL" }, { "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-device-parameter", @@ -14552,6 +17748,12 @@ "vuid": "VUID-vkCmdBindDescriptorSets-commonparent", "text": " Each of commandBuffer, layout, and the elements of pDescriptorSets must have been created, allocated, or retrieved from the same VkDevice" } + ], + "(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-vkCmdBindDescriptorSets-pDescriptorSets-04616", + "text": " Each element of pDescriptorSets must not have been allocated from a VkDescriptorPool with the VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE flag set" + } ] }, "vkCmdPushDescriptorSetKHR": { @@ -14610,7 +17812,7 @@ }, { "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-pData-01686", - "text": " pData must be a valid pointer to a memory containing one or more valid instances of VkDescriptorImageInfo, VkDescriptorBufferInfo, or VkBufferView in a layout defined by descriptorUpdateTemplate when it was created with vkCreateDescriptorUpdateTemplateKHR" + "text": " pData must be a valid pointer to a memory containing one or more valid instances of VkDescriptorImageInfo, VkDescriptorBufferInfo, or VkBufferView in a layout defined by descriptorUpdateTemplate when it was created with vkCreateDescriptorUpdateTemplate" }, { "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-parameter", @@ -14766,6 +17968,1540 @@ } ] }, + "BaryCoordNV": { + "(VK_NV_fragment_shader_barycentric)": [ + { + "vuid": "VUID-BaryCoordNV-BaryCoordNV-04154", + "text": " The BaryCoordNV decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-BaryCoordNV-BaryCoordNV-04155", + "text": " The variable decorated with BaryCoordNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaryCoordNV-BaryCoordNV-04156", + "text": " The variable decorated with BaryCoordNV must be declared as a three-component vector of 32-bit floating-point values" + } + ] + }, + "BaryCoordNoPerspAMD": { + "(VK_AMD_shader_explicit_vertex_parameter)": [ + { + "vuid": "VUID-BaryCoordNoPerspAMD-BaryCoordNoPerspAMD-04157", + "text": " The BaryCoordNoPerspAMD decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-BaryCoordNoPerspAMD-BaryCoordNoPerspAMD-04158", + "text": " The variable decorated with BaryCoordNoPerspAMD must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaryCoordNoPerspAMD-BaryCoordNoPerspAMD-04159", + "text": " The variable decorated with BaryCoordNoPerspAMD must be declared as a two-component vector of 32-bit floating-point values" + } + ] + }, + "BaryCoordNoPerspNV": { + "(VK_NV_fragment_shader_barycentric)": [ + { + "vuid": "VUID-BaryCoordNoPerspNV-BaryCoordNoPerspNV-04160", + "text": " The BaryCoordNoPerspNV decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-BaryCoordNoPerspNV-BaryCoordNoPerspNV-04161", + "text": " The variable decorated with BaryCoordNoPerspNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaryCoordNoPerspNV-BaryCoordNoPerspNV-04162", + "text": " The variable decorated with BaryCoordNoPerspNV must be declared as a three-component vector of 32-bit floating-point values" + } + ] + }, + "BaryCoordNoPerspCentroidAMD": { + "(VK_AMD_shader_explicit_vertex_parameter)": [ + { + "vuid": "VUID-BaryCoordNoPerspCentroidAMD-BaryCoordNoPerspCentroidAMD-04163", + "text": " The BaryCoordNoPerspCentroidAMD decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-BaryCoordNoPerspCentroidAMD-BaryCoordNoPerspCentroidAMD-04164", + "text": " The variable decorated with BaryCoordNoPerspCentroidAMD must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaryCoordNoPerspCentroidAMD-BaryCoordNoPerspCentroidAMD-04165", + "text": " The variable decorated with BaryCoordNoPerspCentroidAMD must be declared as a three-component vector of 32-bit floating-point values" + } + ] + }, + "BaryCoordNoPerspSampleAMD": { + "(VK_AMD_shader_explicit_vertex_parameter)": [ + { + "vuid": "VUID-BaryCoordNoPerspSampleAMD-BaryCoordNoPerspSampleAMD-04166", + "text": " The BaryCoordNoPerspSampleAMD decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-BaryCoordNoPerspSampleAMD-BaryCoordNoPerspSampleAMD-04167", + "text": " The variable decorated with BaryCoordNoPerspSampleAMD must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaryCoordNoPerspSampleAMD-BaryCoordNoPerspSampleAMD-04168", + "text": " The variable decorated with BaryCoordNoPerspSampleAMD must be declared as a two-component vector of 32-bit floating-point values" + } + ] + }, + "BaryCoordPullModelAMD": { + "(VK_AMD_shader_explicit_vertex_parameter)": [ + { + "vuid": "VUID-BaryCoordPullModelAMD-BaryCoordPullModelAMD-04169", + "text": " The BaryCoordPullModelAMD decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-BaryCoordPullModelAMD-BaryCoordPullModelAMD-04170", + "text": " The variable decorated with BaryCoordPullModelAMD must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaryCoordPullModelAMD-BaryCoordPullModelAMD-04171", + "text": " The variable decorated with BaryCoordPullModelAMD must be declared as a three-component vector of 32-bit floating-point values" + } + ] + }, + "BaryCoordSmoothAMD": { + "(VK_AMD_shader_explicit_vertex_parameter)": [ + { + "vuid": "VUID-BaryCoordSmoothAMD-BaryCoordSmoothAMD-04172", + "text": " The BaryCoordSmoothAMD decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-BaryCoordSmoothAMD-BaryCoordSmoothAMD-04173", + "text": " The variable decorated with BaryCoordSmoothAMD must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaryCoordSmoothAMD-BaryCoordSmoothAMD-04174", + "text": " The variable decorated with BaryCoordSmoothAMD must be declared as a two-component vector of 32-bit floating-point values" + } + ] + }, + "BaryCoordSmoothCentroidAMD": { + "(VK_AMD_shader_explicit_vertex_parameter)": [ + { + "vuid": "VUID-BaryCoordSmoothCentroidAMD-BaryCoordSmoothCentroidAMD-04175", + "text": " The BaryCoordSmoothCentroidAMD decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-BaryCoordSmoothCentroidAMD-BaryCoordSmoothCentroidAMD-04176", + "text": " The variable decorated with BaryCoordSmoothCentroidAMD must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaryCoordSmoothCentroidAMD-BaryCoordSmoothCentroidAMD-04177", + "text": " The variable decorated with BaryCoordSmoothCentroidAMD must be declared as a two-component vector of 32-bit floating-point values" + } + ] + }, + "BaryCoordSmoothSampleAMD": { + "(VK_AMD_shader_explicit_vertex_parameter)": [ + { + "vuid": "VUID-BaryCoordSmoothSampleAMD-BaryCoordSmoothSampleAMD-04178", + "text": " The BaryCoordSmoothSampleAMD decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-BaryCoordSmoothSampleAMD-BaryCoordSmoothSampleAMD-04179", + "text": " The variable decorated with BaryCoordSmoothSampleAMD must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaryCoordSmoothSampleAMD-BaryCoordSmoothSampleAMD-04180", + "text": " The variable decorated with BaryCoordSmoothSampleAMD must be declared as a two-component vector of 32-bit floating-point values" + } + ] + }, + "BaseInstance": { + "(VK_VERSION_1_1,VK_KHR_shader_draw_parameters)": [ + { + "vuid": "VUID-BaseInstance-BaseInstance-04181", + "text": " The BaseInstance decoration must be used only within the Vertex {ExecutionModel}" + }, + { + "vuid": "VUID-BaseInstance-BaseInstance-04182", + "text": " The variable decorated with BaseInstance must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaseInstance-BaseInstance-04183", + "text": " The variable decorated with BaseInstance must be declared as a scalar 32-bit integer value" + } + ] + }, + "BaseVertex": { + "(VK_VERSION_1_1,VK_KHR_shader_draw_parameters)": [ + { + "vuid": "VUID-BaseVertex-BaseVertex-04184", + "text": " The BaseVertex decoration must be used only within the Vertex {ExecutionModel}" + }, + { + "vuid": "VUID-BaseVertex-BaseVertex-04185", + "text": " The variable decorated with BaseVertex must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-BaseVertex-BaseVertex-04186", + "text": " The variable decorated with BaseVertex must be declared as a scalar 32-bit integer value" + } + ] + }, + "ClipDistance": { + "core": [ + { + "vuid": "VUID-ClipDistance-ClipDistance-04187", + "text": " The ClipDistance decoration must be used only within the MeshNV, Vertex, Fragment, TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel}" + }, + { + "vuid": "VUID-ClipDistance-ClipDistance-04188", + "text": " The variable decorated with ClipDistance within the MeshNV or Vertex {ExecutionModel} must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-ClipDistance-ClipDistance-04189", + "text": " The variable decorated with ClipDistance within the Fragment {ExecutionModel} must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-ClipDistance-ClipDistance-04190", + "text": " The variable decorated with ClipDistance within the TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel} must not be declared in a {StorageClass} other than Input or Output" + }, + { + "vuid": "VUID-ClipDistance-ClipDistance-04191", + "text": " The variable decorated with ClipDistance must be declared as an array of 32-bit floating-point values" + } + ] + }, + "ClipDistancePerViewNV": { + "(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-ClipDistancePerViewNV-ClipDistancePerViewNV-04192", + "text": " The ClipDistancePerViewNV decoration must be used only within the MeshNV {ExecutionModel}" + }, + { + "vuid": "VUID-ClipDistancePerViewNV-ClipDistancePerViewNV-04193", + "text": " The variable decorated with ClipDistancePerViewNV must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-ClipDistancePerViewNV-ClipDistancePerViewNV-04194", + "text": " The variable decorated with ClipDistancePerViewNV must also be decorated with the PerViewNV decoration." + }, + { + "vuid": "VUID-ClipDistancePerViewNV-ClipDistancePerViewNV-04195", + "text": " The variable decorated with ClipDistancePerViewNV must be declared as a two-dimensional array of 32-bit floating-point values" + } + ] + }, + "CullDistance": { + "core": [ + { + "vuid": "VUID-CullDistance-CullDistance-04196", + "text": " The CullDistance decoration must be used only within the MeshNV, Vertex, Fragment, TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel}" + }, + { + "vuid": "VUID-CullDistance-CullDistance-04197", + "text": " The variable decorated with CullDistance within the MeshNV or Vertex {ExecutionModel} must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-CullDistance-CullDistance-04198", + "text": " The variable decorated with CullDistance within the Fragment {ExecutionModel} must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-CullDistance-CullDistance-04199", + "text": " The variable decorated with CullDistance within the TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel} must not be declared using a {StorageClass} other than Input or Output" + }, + { + "vuid": "VUID-CullDistance-CullDistance-04200", + "text": " The variable decorated with CullDistance must be declared as an array of 32-bit floating-point values" + } + ] + }, + "CullDistancePerViewNV": { + "(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-CullDistancePerViewNV-CullDistancePerViewNV-04201", + "text": " The CullDistancePerViewNV decoration must be used only within the MeshNV {ExecutionModel}" + }, + { + "vuid": "VUID-CullDistancePerViewNV-CullDistancePerViewNV-04202", + "text": " The variable decorated with CullDistancePerViewNV must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-CullDistancePerViewNV-CullDistancePerViewNV-04203", + "text": " The variable decorated with CullDistancePerViewNV must also be decorated with the PerViewNV decoration." + }, + { + "vuid": "VUID-CullDistancePerViewNV-CullDistancePerViewNV-04204", + "text": " The variable decorated with CullDistancePerViewNV must be declared as a two-dimensional array of 32-bit floating-point values" + } + ] + }, + "DeviceIndex": { + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-DeviceIndex-DeviceIndex-04205", + "text": " The variable decorated with DeviceIndex must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-DeviceIndex-DeviceIndex-04206", + "text": " The variable decorated with DeviceIndex must be declared as a scalar 32-bit integer value" + } + ] + }, + "DrawIndex": { + "(VK_VERSION_1_1,VK_KHR_shader_draw_parameters)": [ + { + "vuid": "VUID-DrawIndex-DrawIndex-04207", + "text": " The DrawIndex decoration must be used only within the Vertex, MeshNV, or TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-DrawIndex-DrawIndex-04208", + "text": " The variable decorated with DrawIndex must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-DrawIndex-DrawIndex-04209", + "text": " The variable decorated with DrawIndex must be declared as a scalar 32-bit integer value" + } + ] + }, + "FragCoord": { + "core": [ + { + "vuid": "VUID-FragCoord-FragCoord-04210", + "text": " The FragCoord decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-FragCoord-FragCoord-04211", + "text": " The variable decorated with FragCoord must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-FragCoord-FragCoord-04212", + "text": " The variable decorated with FragCoord must be declared as a four-component vector of 32-bit floating-point values" + } + ] + }, + "FragDepth": { + "core": [ + { + "vuid": "VUID-FragDepth-FragDepth-04213", + "text": " The FragDepth decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-FragDepth-FragDepth-04214", + "text": " The variable decorated with FragDepth must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-FragDepth-FragDepth-04215", + "text": " The variable decorated with FragDepth must be declared as a scalar 32-bit floating-point value" + }, + { + "vuid": "VUID-FragDepth-FragDepth-04216", + "text": " If the shader dynamically writes to the variable decorated with FragDepth, the DepthReplacing {ExecutionMode} must be declared" + } + ] + }, + "FragInvocationCountEXT": { + "(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-FragInvocationCountEXT-FragInvocationCountEXT-04217", + "text": " The FragInvocationCountEXT decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-FragInvocationCountEXT-FragInvocationCountEXT-04218", + "text": " The variable decorated with FragInvocationCountEXT must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-FragInvocationCountEXT-FragInvocationCountEXT-04219", + "text": " The variable decorated with FragInvocationCountEXT must be declared as a scalar 32-bit integer value" + } + ] + }, + "FragSizeEXT": { + "(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-FragSizeEXT-FragSizeEXT-04220", + "text": " The FragSizeEXT decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-FragSizeEXT-FragSizeEXT-04221", + "text": " The variable decorated with FragSizeEXT must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-FragSizeEXT-FragSizeEXT-04222", + "text": " The variable decorated with FragSizeEXT must be declared as a two-component vector of 32-bit integer values" + } + ] + }, + "FragStencilRefEXT": { + "(VK_EXT_shader_stencil_export)": [ + { + "vuid": "VUID-FragStencilRefEXT-FragStencilRefEXT-04223", + "text": " The FragStencilRefEXT decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-FragStencilRefEXT-FragStencilRefEXT-04224", + "text": " The variable decorated with FragStencilRefEXT must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-FragStencilRefEXT-FragStencilRefEXT-04225", + "text": " The variable decorated with FragStencilRefEXT must be declared as a scalar integer value" + } + ] + }, + "FragmentSizeNV": { + "(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-FragmentSizeNV-FragmentSizeNV-04226", + "text": " The FragmentSizeNV decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-FragmentSizeNV-FragmentSizeNV-04227", + "text": " The variable decorated with FragmentSizeNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-FragmentSizeNV-FragmentSizeNV-04228", + "text": " The variable decorated with FragmentSizeNV must be declared as a two-component vector of 32-bit integer values" + } + ] + }, + "FrontFacing": { + "core": [ + { + "vuid": "VUID-FrontFacing-FrontFacing-04229", + "text": " The FrontFacing decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-FrontFacing-FrontFacing-04230", + "text": " The variable decorated with FrontFacing must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-FrontFacing-FrontFacing-04231", + "text": " The variable decorated with FrontFacing must be declared as a boolean value" + } + ] + }, + "FullyCoveredEXT": { + "(VK_EXT_conservative_rasterization)": [ + { + "vuid": "VUID-FullyCoveredEXT-FullyCoveredEXT-04232", + "text": " The FullyCoveredEXT decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-FullyCoveredEXT-FullyCoveredEXT-04233", + "text": " The variable decorated with FullyCoveredEXT must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-FullyCoveredEXT-FullyCoveredEXT-04234", + "text": " The variable decorated with FullyCoveredEXT must be declared as a boolean value" + } + ], + "(VK_EXT_conservative_rasterization)+(VK_EXT_post_depth_coverage)": [ + { + "vuid": "VUID-FullyCoveredEXT-conservativeRasterizationPostDepthCoverage-04235", + "text": " If VkPhysicalDeviceConservativeRasterizationPropertiesEXT::conservativeRasterizationPostDepthCoverage is not supported the PostDepthCoverage {ExecutionMode} must not be declared, when a variable with the FullyCoveredEXT decoration is declared" + } + ] + }, + "GlobalInvocationId": { + "core": [ + { + "vuid": "VUID-GlobalInvocationId-GlobalInvocationId-04236", + "text": " The GlobalInvocationId decoration must be used only within the GLCompute, MeshNV, or TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-GlobalInvocationId-GlobalInvocationId-04237", + "text": " The variable decorated with GlobalInvocationId must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-GlobalInvocationId-GlobalInvocationId-04238", + "text": " The variable decorated with GlobalInvocationId must be declared as a three-component vector of 32-bit integer values" + } + ] + }, + "HelperInvocation": { + "core": [ + { + "vuid": "VUID-HelperInvocation-HelperInvocation-04239", + "text": " The HelperInvocation decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-HelperInvocation-HelperInvocation-04240", + "text": " The variable decorated with HelperInvocation must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-HelperInvocation-HelperInvocation-04241", + "text": " The variable decorated with HelperInvocation must be declared as a boolean value" + } + ] + }, + "HitKindKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-HitKindKHR-HitKindKHR-04242", + "text": " The HitKindKHR decoration must be used only within the AnyHitKHR or ClosestHitKHR {ExecutionModel}" + }, + { + "vuid": "VUID-HitKindKHR-HitKindKHR-04243", + "text": " The variable decorated with HitKindKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-HitKindKHR-HitKindKHR-04244", + "text": " The variable decorated with HitKindKHR must be declared as a scalar 32-bit integer value" + } + ] + }, + "HitTNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-HitTNV-HitTNV-04245", + "text": " The HitTNV decoration must be used only within the AnyHitNV or ClosestHitNV {ExecutionModel}" + }, + { + "vuid": "VUID-HitTNV-HitTNV-04246", + "text": " The variable decorated with HitTNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-HitTNV-HitTNV-04247", + "text": " The variable decorated with HitTNV must be declared as a scalar 32-bit floating-point value" + } + ] + }, + "IncomingRayFlagsKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-IncomingRayFlagsKHR-IncomingRayFlagsKHR-04248", + "text": " The IncomingRayFlagsKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, ClosestHitKHR, or MissKHR {ExecutionModel}" + }, + { + "vuid": "VUID-IncomingRayFlagsKHR-IncomingRayFlagsKHR-04249", + "text": " The variable decorated with IncomingRayFlagsKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-IncomingRayFlagsKHR-IncomingRayFlagsKHR-04250", + "text": " The variable decorated with IncomingRayFlagsKHR must be declared as a scalar 32-bit integer value" + } + ] + }, + "InstanceCustomIndexKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-InstanceCustomIndexKHR-InstanceCustomIndexKHR-04251", + "text": " The InstanceCustomIndexKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, or ClosestHitKHR {ExecutionModel}" + }, + { + "vuid": "VUID-InstanceCustomIndexKHR-InstanceCustomIndexKHR-04252", + "text": " The variable decorated with InstanceCustomIndexKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-InstanceCustomIndexKHR-InstanceCustomIndexKHR-04253", + "text": " The variable decorated with InstanceCustomIndexKHR must be declared as a scalar 32-bit integer value" + } + ] + }, + "InstanceId": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-InstanceId-InstanceId-04254", + "text": " The InstanceId decoration must be used only within the IntersectionKHR, AnyHitKHR, or ClosestHitKHR {ExecutionModel}" + }, + { + "vuid": "VUID-InstanceId-InstanceId-04255", + "text": " The variable decorated with InstanceId must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-InstanceId-InstanceId-04256", + "text": " The variable decorated with InstanceId must be declared as a scalar 32-bit integer value" + } + ] + }, + "InvocationId": { + "core": [ + { + "vuid": "VUID-InvocationId-InvocationId-04257", + "text": " The InvocationId decoration must be used only within the TessellationControl or Geometry {ExecutionModel}" + }, + { + "vuid": "VUID-InvocationId-InvocationId-04258", + "text": " The variable decorated with InvocationId must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-InvocationId-InvocationId-04259", + "text": " The variable decorated with InvocationId must be declared as a scalar 32-bit integer value" + } + ] + }, + "InvocationsPerPixelNV": { + "(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-InvocationsPerPixelNV-InvocationsPerPixelNV-04260", + "text": " The InvocationsPerPixelNV decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-InvocationsPerPixelNV-InvocationsPerPixelNV-04261", + "text": " The variable decorated with InvocationsPerPixelNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-InvocationsPerPixelNV-InvocationsPerPixelNV-04262", + "text": " The variable decorated with InvocationsPerPixelNV must be declared as a scalar 32-bit integer value" + } + ] + }, + "InstanceIndex": { + "core": [ + { + "vuid": "VUID-InstanceIndex-InstanceIndex-04263", + "text": " The InstanceIndex decoration must be used only within the Vertex {ExecutionModel}" + }, + { + "vuid": "VUID-InstanceIndex-InstanceIndex-04264", + "text": " The variable decorated with InstanceIndex must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-InstanceIndex-InstanceIndex-04265", + "text": " The variable decorated with InstanceIndex must be declared as a scalar 32-bit integer value" + } + ] + }, + "LaunchIdKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-LaunchIdKHR-LaunchIdKHR-04266", + "text": " The LaunchIdKHR decoration must be used only within the RayGenerationKHR, IntersectionKHR, AnyHitKHR, ClosestHitKHR, MissKHR, or CallableKHR {ExecutionModel}" + }, + { + "vuid": "VUID-LaunchIdKHR-LaunchIdKHR-04267", + "text": " The variable decorated with LaunchIdKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-LaunchIdKHR-LaunchIdKHR-04268", + "text": " The variable decorated with LaunchIdKHR must be declared as a three-component vector of 32-bit integer values" + } + ] + }, + "LaunchSizeKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-LaunchSizeKHR-LaunchSizeKHR-04269", + "text": " The LaunchSizeKHR decoration must be used only within the RayGenerationKHR, IntersectionKHR, AnyHitKHR, ClosestHitKHR, MissKHR, or CallableKHR {ExecutionModel}" + }, + { + "vuid": "VUID-LaunchSizeKHR-LaunchSizeKHR-04270", + "text": " The variable decorated with LaunchSizeKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-LaunchSizeKHR-LaunchSizeKHR-04271", + "text": " The variable decorated with LaunchSizeKHR must be declared as a three-component vector of 32-bit integer values" + } + ] + }, + "Layer": { + "core": [ + { + "vuid": "VUID-Layer-Layer-04272", + "text": " The Layer decoration must be used only within the MeshNV, Vertex, TessellationEvaluation, Geometry, or Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-Layer-Layer-04274", + "text": " The variable decorated with Layer within the MeshNV, Vertex, TessellationEvaluation, or Geometry {ExecutionModel} must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-Layer-Layer-04275", + "text": " The variable decorated with Layer within the Fragment {ExecutionModel} must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-Layer-Layer-04276", + "text": " The variable decorated with Layer must be declared as a scalar 32-bit integer value" + } + ], + "(VK_VERSION_1_2)": [ + { + "vuid": "VUID-Layer-Layer-04273", + "text": " If the shaderOutputLayer feature is not enabled then the Layer decoration must be used only within the Geometry or Fragment {ExecutionModel}" + } + ] + }, + "LayerPerViewNV": { + "(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-LayerPerViewNV-LayerPerViewNV-04277", + "text": " The LayerPerViewNV decoration must be used only within the MeshNV {ExecutionModel}" + }, + { + "vuid": "VUID-LayerPerViewNV-LayerPerViewNV-04278", + "text": " The variable decorated with LayerPerViewNV must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-LayerPerViewNV-LayerPerViewNV-04279", + "text": " The variable decorated with LayerPerViewNV must also be decorated with the PerViewNV decoration." + }, + { + "vuid": "VUID-LayerPerViewNV-LayerPerViewNV-04280", + "text": " The variable decorated with LayerPerViewNV must be declared as an array of scalar 32-bit integer values" + } + ] + }, + "LocalInvocationId": { + "core": [ + { + "vuid": "VUID-LocalInvocationId-LocalInvocationId-04281", + "text": " The LocalInvocationId decoration must be used only within the GLCompute, MeshNV, or TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-LocalInvocationId-LocalInvocationId-04282", + "text": " The variable decorated with LocalInvocationId must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-LocalInvocationId-LocalInvocationId-04283", + "text": " The variable decorated with LocalInvocationId must be declared as a three-component vector of 32-bit integer values" + } + ] + }, + "LocalInvocationIndex": { + "core": [ + { + "vuid": "VUID-LocalInvocationIndex-LocalInvocationIndex-04284", + "text": " The LocalInvocationIndex decoration must be used only within the GLCompute, MeshNV, or TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-LocalInvocationIndex-LocalInvocationIndex-04285", + "text": " The variable decorated with LocalInvocationIndex must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-LocalInvocationIndex-LocalInvocationIndex-04286", + "text": " The variable decorated with LocalInvocationIndex must be declared as a scalar 32-bit integer value" + } + ] + }, + "MeshViewCountNV": { + "(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-MeshViewCountNV-MeshViewCountNV-04287", + "text": " The MeshViewCountNV decoration must be used only within the MeshNV or TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-MeshViewCountNV-MeshViewCountNV-04288", + "text": " The variable decorated with MeshViewCountNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-MeshViewCountNV-MeshViewCountNV-04289", + "text": " The variable decorated with MeshViewCountNV must be declared as a scalar 32-bit integer value" + } + ] + }, + "MeshViewIndicesNV": { + "(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-MeshViewIndicesNV-MeshViewIndicesNV-04290", + "text": " The MeshViewIndicesNV decoration must be used only within the MeshNV or TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-MeshViewIndicesNV-MeshViewIndicesNV-04291", + "text": " The variable decorated with MeshViewIndicesNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-MeshViewIndicesNV-MeshViewIndicesNV-04292", + "text": " The variable decorated with MeshViewIndicesNV must be declared as an array of scalar 32-bit integer values" + } + ] + }, + "NumSubgroups": { + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-NumSubgroups-NumSubgroups-04293", + "text": " The NumSubgroups decoration must be used only within the GLCompute, MeshNV, or TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-NumSubgroups-NumSubgroups-04294", + "text": " The variable decorated with NumSubgroups must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-NumSubgroups-NumSubgroups-04295", + "text": " The variable decorated with NumSubgroups must be declared as a scalar 32-bit integer value" + } + ] + }, + "NumWorkgroups": { + "core": [ + { + "vuid": "VUID-NumWorkgroups-NumWorkgroups-04296", + "text": " The NumWorkgroups decoration must be used only within the GLCompute {ExecutionModel}" + }, + { + "vuid": "VUID-NumWorkgroups-NumWorkgroups-04297", + "text": " The variable decorated with NumWorkgroups must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-NumWorkgroups-NumWorkgroups-04298", + "text": " The variable decorated with NumWorkgroups must be declared as a three-component vector of 32-bit integer values" + } + ] + }, + "ObjectRayDirectionKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-ObjectRayDirectionKHR-ObjectRayDirectionKHR-04299", + "text": " The ObjectRayDirectionKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, or ClosestHitKHR {ExecutionModel}" + }, + { + "vuid": "VUID-ObjectRayDirectionKHR-ObjectRayDirectionKHR-04300", + "text": " The variable decorated with ObjectRayDirectionKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-ObjectRayDirectionKHR-ObjectRayDirectionKHR-04301", + "text": " The variable decorated with ObjectRayDirectionKHR must be declared as a three-component vector of 32-bit floating-point values" + } + ] + }, + "ObjectRayOriginKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-ObjectRayOriginKHR-ObjectRayOriginKHR-04302", + "text": " The ObjectRayOriginKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, or ClosestHitKHR {ExecutionModel}" + }, + { + "vuid": "VUID-ObjectRayOriginKHR-ObjectRayOriginKHR-04303", + "text": " The variable decorated with ObjectRayOriginKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-ObjectRayOriginKHR-ObjectRayOriginKHR-04304", + "text": " The variable decorated with ObjectRayOriginKHR must be declared as a three-component vector of 32-bit floating-point values" + } + ] + }, + "ObjectToWorldKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-ObjectToWorldKHR-ObjectToWorldKHR-04305", + "text": " The ObjectToWorldKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, or ClosestHitKHR {ExecutionModel}" + }, + { + "vuid": "VUID-ObjectToWorldKHR-ObjectToWorldKHR-04306", + "text": " The variable decorated with ObjectToWorldKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-ObjectToWorldKHR-ObjectToWorldKHR-04307", + "text": " The variable decorated with ObjectToWorldKHR must be declared as a matrix with four columns of three-component vectors of 32-bit floating-point values" + } + ] + }, + "PatchVertices": { + "core": [ + { + "vuid": "VUID-PatchVertices-PatchVertices-04308", + "text": " The PatchVertices decoration must be used only within the TessellationControl or TessellationEvaluation {ExecutionModel}" + }, + { + "vuid": "VUID-PatchVertices-PatchVertices-04309", + "text": " The variable decorated with PatchVertices must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-PatchVertices-PatchVertices-04310", + "text": " The variable decorated with PatchVertices must be declared as a scalar 32-bit integer value" + } + ] + }, + "PointCoord": { + "core": [ + { + "vuid": "VUID-PointCoord-PointCoord-04311", + "text": " The PointCoord decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-PointCoord-PointCoord-04312", + "text": " The variable decorated with PointCoord must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-PointCoord-PointCoord-04313", + "text": " The variable decorated with PointCoord must be declared as a two-component vector of 32-bit floating-point values" + } + ] + }, + "PointSize": { + "core": [ + { + "vuid": "VUID-PointSize-PointSize-04314", + "text": " The PointSize decoration must be used only within the MeshNV, Vertex, TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel}" + }, + { + "vuid": "VUID-PointSize-PointSize-04315", + "text": " The variable decorated with PointSize within the MeshNV or Vertex {ExecutionModel} must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-PointSize-PointSize-04316", + "text": " The variable decorated with PointSize within the TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel} must not be declared using a {StorageClass} other than Input or Output" + }, + { + "vuid": "VUID-PointSize-PointSize-04317", + "text": " The variable decorated with PointSize must be declared as a scalar 32-bit floating-point value" + } + ] + }, + "Position": { + "core": [ + { + "vuid": "VUID-Position-Position-04318", + "text": " The Position decoration must be used only within the MeshNV, Vertex, TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel}" + }, + { + "vuid": "VUID-Position-Position-04319", + "text": " The variable decorated with Position within MeshNV or Vertex {ExecutionModel} must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-Position-Position-04320", + "text": " The variable decorated with Position within TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel} must not be declared using a {StorageClass} other than Input or Output" + }, + { + "vuid": "VUID-Position-Position-04321", + "text": " The variable decorated with Position must be declared as a four-component vector of 32-bit floating-point values" + } + ] + }, + "PositionPerViewNV": { + "(VK_NVX_multiview_per_view_attributes)": [ + { + "vuid": "VUID-PositionPerViewNV-PositionPerViewNV-04322", + "text": " The PositionPerViewNV decoration must be used only within the MeshNV, Vertex, TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel}" + }, + { + "vuid": "VUID-PositionPerViewNV-PositionPerViewNV-04323", + "text": " The variable decorated with PositionPerViewNV within the Vertex, or MeshNV {ExecutionModel} must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-PositionPerViewNV-PositionPerViewNV-04324", + "text": " The variable decorated with PositionPerViewNV within the TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel} must not be declared using a {StorageClass} other than Input or Output" + }, + { + "vuid": "VUID-PositionPerViewNV-PositionPerViewNV-04325", + "text": " The variable decorated with PositionPerViewNV must be declared as an array of four-component vector of 32-bit floating-point values with at least as many elements as the maximum view in the subpass’s view mask plus one" + }, + { + "vuid": "VUID-PositionPerViewNV-PositionPerViewNV-04326", + "text": " The array variable decorated with PositionPerViewNV must only be indexed by a constant or specialization constant" + } + ] + }, + "PrimitiveCountNV": { + "(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-PrimitiveCountNV-PrimitiveCountNV-04327", + "text": " The PrimitiveCountNV decoration must be used only within the MeshNV {ExecutionModel}" + }, + { + "vuid": "VUID-PrimitiveCountNV-PrimitiveCountNV-04328", + "text": " The variable decorated with PrimitiveCountNV must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-PrimitiveCountNV-PrimitiveCountNV-04329", + "text": " The variable decorated with PrimitiveCountNV must be declared as a scalar 32-bit integer value" + } + ] + }, + "PrimitiveId": { + "core": [ + { + "vuid": "VUID-PrimitiveId-PrimitiveId-04330", + "text": " The PrimitiveId decoration must be used only within the MeshNV, IntersectionKHR, AnyHitKHR, ClosestHitKHR, TessellationControl, TessellationEvaluation, Geometry, or Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-PrimitiveId-Fragment-04331", + "text": " If pipeline contains both the Fragment and Geometry {ExecutionModel} and a variable decorated with PrimitiveId is read from Fragment shader, then the Geometry shader must write to the output variables decorated with PrimitiveId in all execution paths" + }, + { + "vuid": "VUID-PrimitiveId-Fragment-04332", + "text": " If pipeline contains both the Fragment and MeshNV {ExecutionModel} and a variable decorated with PrimitiveId is read from Fragment shader, then the MeshNV shader must write to the output variables decorated with PrimitiveId in all execution paths" + }, + { + "vuid": "VUID-PrimitiveId-Fragment-04333", + "text": " If Fragment {ExecutionModel} contains a variable decorated with PrimitiveId either the Geometry or Tessellation capability must also be declared" + }, + { + "vuid": "VUID-PrimitiveId-PrimitiveId-04334", + "text": " The variable decorated with PrimitiveId within the TessellationControl, TessellationEvaluation, Fragment, IntersectionKHR, AnyHitKHR, or ClosestHitKHR {ExecutionModel} must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-PrimitiveId-PrimitiveId-04335", + "text": " The variable decorated with PrimitiveId within the Geometry {ExecutionModel} must be declared using the Input or Output {StorageClass}" + }, + { + "vuid": "VUID-PrimitiveId-PrimitiveId-04336", + "text": " The variable decorated with PrimitiveId within the MeshNV {ExecutionModel} must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-PrimitiveId-PrimitiveId-04337", + "text": " The variable decorated with PrimitiveId must be declared as a scalar 32-bit integer value" + } + ] + }, + "PrimitiveIndicesNV": { + "(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-PrimitiveIndicesNV-PrimitiveIndicesNV-04338", + "text": " The PrimitiveIndicesNV decoration must be used only within the MeshNV {ExecutionModel}" + }, + { + "vuid": "VUID-PrimitiveIndicesNV-PrimitiveIndicesNV-04339", + "text": " The variable decorated with PrimitiveIndicesNV must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-PrimitiveIndicesNV-PrimitiveIndicesNV-04340", + "text": " The variable decorated with PrimitiveIndicesNV must be declared as an array of scalar 32-bit integer values" + }, + { + "vuid": "VUID-PrimitiveIndicesNV-PrimitiveIndicesNV-04341", + "text": " All index values of the array decorated with PrimitiveIndicesNV must be in the range [0, N-1], where N is the value specified by the OutputVertices {ExecutionMode}" + }, + { + "vuid": "VUID-PrimitiveIndicesNV-OutputPoints-04342", + "text": " If the {ExecutionMode} is OutputPoints, then the array decorated with PrimitiveIndicesNV must be the size of the value specified by OutputPrimitivesNV" + }, + { + "vuid": "VUID-PrimitiveIndicesNV-OutputLinesNV-04343", + "text": " If the {ExecutionMode} is OutputLinesNV, then the array decorated with PrimitiveIndicesNV must be the size of two times the value specified by OutputPrimitivesNV" + }, + { + "vuid": "VUID-PrimitiveIndicesNV-OutputTrianglesNV-04344", + "text": " If the {ExecutionMode} is OutputTrianglesNV, then the array decorated with PrimitiveIndicesNV must be the size of three times the value specified by OutputPrimitivesNV" + } + ] + }, + "PrimitiveShadingRateKHR": { + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-PrimitiveShadingRateKHR-PrimitiveShadingRateKHR-04484", + "text": " The PrimitiveShadingRateKHR decoration must be used only within the MeshNV, Vertex, or Geometry {ExecutionModel}" + }, + { + "vuid": "VUID-PrimitiveShadingRateKHR-PrimitiveShadingRateKHR-04485", + "text": " The variable decorated with PrimitiveShadingRateKHR must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-PrimitiveShadingRateKHR-PrimitiveShadingRateKHR-04486", + "text": " The variable decorated with PrimitiveShadingRateKHR must be declared as a scalar 32-bit integer value" + }, + { + "vuid": "VUID-PrimitiveShadingRateKHR-PrimitiveShadingRateKHR-04487", + "text": " The value written to PrimitiveShadingRateKHR must include no more than one of Vertical2Pixels and Vertical4Pixels" + }, + { + "vuid": "VUID-PrimitiveShadingRateKHR-PrimitiveShadingRateKHR-04488", + "text": " The value written to PrimitiveShadingRateKHR must include no more than one of Horizontal2Pixels and Horizontal4Pixels" + }, + { + "vuid": "VUID-PrimitiveShadingRateKHR-PrimitiveShadingRateKHR-04489", + "text": " The value written to PrimitiveShadingRateKHR must not have any bits set other than those defined by Fragment Shading Rate Flags enumerants in the SPIR-V specification" + } + ] + }, + "RayGeometryIndexKHR": { + "(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-RayGeometryIndexKHR-RayGeometryIndexKHR-04345", + "text": " The RayGeometryIndexKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, or ClosestHitKHR {ExecutionModel}" + }, + { + "vuid": "VUID-RayGeometryIndexKHR-RayGeometryIndexKHR-04346", + "text": " The variable decorated with RayGeometryIndexKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-RayGeometryIndexKHR-RayGeometryIndexKHR-04347", + "text": " The variable decorated with RayGeometryIndexKHR must be declared as a scalar 32-bit integer value" + } + ] + }, + "RayTmaxKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-RayTmaxKHR-RayTmaxKHR-04348", + "text": " The RayTmaxKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, ClosestHitKHR, or MissKHR {ExecutionModel}" + }, + { + "vuid": "VUID-RayTmaxKHR-RayTmaxKHR-04349", + "text": " The variable decorated with RayTmaxKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-RayTmaxKHR-RayTmaxKHR-04350", + "text": " The variable decorated with RayTmaxKHR must be declared as a scalar 32-bit floating-point value" + } + ] + }, + "RayTminKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-RayTminKHR-RayTminKHR-04351", + "text": " The RayTminKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, ClosestHitKHR, or MissKHR {ExecutionModel}" + }, + { + "vuid": "VUID-RayTminKHR-RayTminKHR-04352", + "text": " The variable decorated with RayTminKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-RayTminKHR-RayTminKHR-04353", + "text": " The variable decorated with RayTminKHR must be declared as a scalar 32-bit floating-point value" + } + ] + }, + "SampleId": { + "core": [ + { + "vuid": "VUID-SampleId-SampleId-04354", + "text": " The SampleId decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-SampleId-SampleId-04355", + "text": " The variable decorated with SampleId must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SampleId-SampleId-04356", + "text": " The variable decorated with SampleId must be declared as a scalar 32-bit integer value" + } + ] + }, + "SampleMask": { + "core": [ + { + "vuid": "VUID-SampleMask-SampleMask-04357", + "text": " The SampleMask decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-SampleMask-SampleMask-04358", + "text": " The variable decorated with SampleMask must be declared using the Input or Output {StorageClass}" + }, + { + "vuid": "VUID-SampleMask-SampleMask-04359", + "text": " The variable decorated with SampleMask must be declared as an array of 32-bit integer values" + } + ] + }, + "SamplePosition": { + "core": [ + { + "vuid": "VUID-SamplePosition-SamplePosition-04360", + "text": " The SamplePosition decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-SamplePosition-SamplePosition-04361", + "text": " The variable decorated with SamplePosition must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SamplePosition-SamplePosition-04362", + "text": " The variable decorated with SamplePosition must be declared as a two-component vector of 32-bit floating-point values" + } + ] + }, + "ShadingRateKHR": { + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-ShadingRateKHR-ShadingRateKHR-04490", + "text": " The ShadingRateKHR decoration must be used only within the Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-ShadingRateKHR-ShadingRateKHR-04491", + "text": " The variable decorated with ShadingRateKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-ShadingRateKHR-ShadingRateKHR-04492", + "text": " The variable decorated with ShadingRateKHR must be declared as a scalar 32-bit integer value" + } + ] + }, + "SMCountNV": { + "(VK_NV_shader_sm_builtins)": [ + { + "vuid": "VUID-SMCountNV-SMCountNV-04363", + "text": " The variable decorated with SMCountNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SMCountNV-SMCountNV-04364", + "text": " The variable decorated with SMCountNV must be declared as a scalar 32-bit integer value" + } + ] + }, + "SMIDNV": { + "(VK_NV_shader_sm_builtins)": [ + { + "vuid": "VUID-SMIDNV-SMIDNV-04365", + "text": " The variable decorated with SMIDNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SMIDNV-SMIDNV-04366", + "text": " The variable decorated with SMIDNV must be declared as a scalar 32-bit integer value" + } + ] + }, + "SubgroupId": { + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-SubgroupId-SubgroupId-04367", + "text": " The SubgroupId decoration must be used only within the GLCompute, MeshNV, or TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-SubgroupId-SubgroupId-04368", + "text": " The variable decorated with SubgroupId must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SubgroupId-SubgroupId-04369", + "text": " The variable decorated with SubgroupId must be declared as a scalar 32-bit integer value" + } + ] + }, + "SubgroupEqMask": { + "(VK_VERSION_1_1,VK_EXT_shader_subgroup_ballot)": [ + { + "vuid": "VUID-SubgroupEqMask-SubgroupEqMask-04370", + "text": " The variable decorated with SubgroupEqMask must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SubgroupEqMask-SubgroupEqMask-04371", + "text": " The variable decorated with SubgroupEqMask must be declared as a four-component vector of 32-bit integer values" + } + ] + }, + "SubgroupGeMask": { + "(VK_VERSION_1_1,VK_EXT_shader_subgroup_ballot)": [ + { + "vuid": "VUID-SubgroupGeMask-SubgroupGeMask-04372", + "text": " The variable decorated with SubgroupGeMask must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SubgroupGeMask-SubgroupGeMask-04373", + "text": " The variable decorated with SubgroupGeMask must be declared as a four-component vector of 32-bit integer values" + } + ] + }, + "SubgroupGtMask": { + "(VK_VERSION_1_1,VK_EXT_shader_subgroup_ballot)": [ + { + "vuid": "VUID-SubgroupGtMask-SubgroupGtMask-04374", + "text": " The variable decorated with SubgroupGtMask must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SubgroupGtMask-SubgroupGtMask-04375", + "text": " The variable decorated with SubgroupGtMask must be declared as a four-component vector of 32-bit integer values" + } + ] + }, + "SubgroupLeMask": { + "(VK_VERSION_1_1,VK_EXT_shader_subgroup_ballot)": [ + { + "vuid": "VUID-SubgroupLeMask-SubgroupLeMask-04376", + "text": " The variable decorated with SubgroupLeMask must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SubgroupLeMask-SubgroupLeMask-04377", + "text": " The variable decorated with SubgroupLeMask must be declared as a four-component vector of 32-bit integer values" + } + ] + }, + "SubgroupLtMask": { + "(VK_VERSION_1_1,VK_EXT_shader_subgroup_ballot)": [ + { + "vuid": "VUID-SubgroupLtMask-SubgroupLtMask-04378", + "text": " The variable decorated with SubgroupLtMask must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SubgroupLtMask-SubgroupLtMask-04379", + "text": " The variable decorated with SubgroupLtMask must be declared as a four-component vector of 32-bit integer values" + } + ] + }, + "SubgroupLocalInvocationId": { + "(VK_VERSION_1_1,VK_EXT_shader_subgroup_ballot)": [ + { + "vuid": "VUID-SubgroupLocalInvocationId-SubgroupLocalInvocationId-04380", + "text": " The variable decorated with SubgroupLocalInvocationId must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SubgroupLocalInvocationId-SubgroupLocalInvocationId-04381", + "text": " The variable decorated with SubgroupLocalInvocationId must be declared as a scalar 32-bit integer value" + } + ] + }, + "SubgroupSize": { + "(VK_VERSION_1_1,VK_EXT_shader_subgroup_ballot)": [ + { + "vuid": "VUID-SubgroupSize-SubgroupSize-04382", + "text": " The variable decorated with SubgroupSize must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-SubgroupSize-SubgroupSize-04383", + "text": " The variable decorated with SubgroupSize must be declared as a scalar 32-bit integer value" + } + ] + }, + "TaskCountNV": { + "(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-TaskCountNV-TaskCountNV-04384", + "text": " The TaskCountNV decoration must be used only within the TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-TaskCountNV-TaskCountNV-04385", + "text": " The variable decorated with TaskCountNV must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-TaskCountNV-TaskCountNV-04386", + "text": " The variable decorated with TaskCountNV must be declared as a scalar 32-bit integer value" + } + ] + }, + "TessCoord": { + "core": [ + { + "vuid": "VUID-TessCoord-TessCoord-04387", + "text": " The TessCoord decoration must be used only within the TessellationEvaluation {ExecutionModel}" + }, + { + "vuid": "VUID-TessCoord-TessCoord-04388", + "text": " The variable decorated with TessCoord must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-TessCoord-TessCoord-04389", + "text": " The variable decorated with TessCoord must be declared as a three-component vector of 32-bit floating-point values" + } + ] + }, + "TessLevelOuter": { + "core": [ + { + "vuid": "VUID-TessLevelOuter-TessLevelOuter-04390", + "text": " The TessLevelOuter decoration must be used only within the TessellationControl or TessellationEvaluation {ExecutionModel}" + }, + { + "vuid": "VUID-TessLevelOuter-TessLevelOuter-04391", + "text": " The variable decorated with TessLevelOuter within the TessellationControl {ExecutionModel} must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-TessLevelOuter-TessLevelOuter-04392", + "text": " The variable decorated with TessLevelOuter within the TessellationEvaluation {ExecutionModel} must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-TessLevelOuter-TessLevelOuter-04393", + "text": " The variable decorated with TessLevelOuter must be declared as an array of size four, containing 32-bit floating-point values" + } + ] + }, + "TessLevelInner": { + "core": [ + { + "vuid": "VUID-TessLevelInner-TessLevelInner-04394", + "text": " The TessLevelInner decoration must be used only within the TessellationControl or TessellationEvaluation {ExecutionModel}" + }, + { + "vuid": "VUID-TessLevelInner-TessLevelInner-04395", + "text": " The variable decorated with TessLevelInner within the TessellationControl {ExecutionModel} must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-TessLevelInner-TessLevelInner-04396", + "text": " The variable decorated with TessLevelInner within the TessellationEvaluation {ExecutionModel} must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-TessLevelInner-TessLevelInner-04397", + "text": " The variable decorated with TessLevelInner must be declared as an array of size two, containing 32-bit floating-point values" + } + ] + }, + "VertexIndex": { + "core": [ + { + "vuid": "VUID-VertexIndex-VertexIndex-04398", + "text": " The VertexIndex decoration must be used only within the Vertex {ExecutionModel}" + }, + { + "vuid": "VUID-VertexIndex-VertexIndex-04399", + "text": " The variable decorated with VertexIndex must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-VertexIndex-VertexIndex-04400", + "text": " The variable decorated with VertexIndex must be declared as a scalar 32-bit integer value" + } + ] + }, + "ViewIndex": { + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-ViewIndex-ViewIndex-04401", + "text": " The ViewIndex decoration must not be used within the GLCompute {ExecutionModel}" + }, + { + "vuid": "VUID-ViewIndex-ViewIndex-04402", + "text": " The variable decorated with ViewIndex must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-ViewIndex-ViewIndex-04403", + "text": " The variable decorated with ViewIndex must be declared as a scalar 32-bit integer value" + } + ] + }, + "ViewportIndex": { + "core": [ + { + "vuid": "VUID-ViewportIndex-ViewportIndex-04404", + "text": " The ViewportIndex decoration must be used only within the MeshNV, Vertex, TessellationEvaluation, Geometry, or Fragment {ExecutionModel}" + }, + { + "vuid": "VUID-ViewportIndex-ViewportIndex-04406", + "text": " The variable decorated with ViewportIndex within the MeshNV, Vertex, TessellationEvaluation, or Geometry {ExecutionModel} must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-ViewportIndex-ViewportIndex-04407", + "text": " The variable decorated with ViewportIndex within the Fragment {ExecutionModel} must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-ViewportIndex-ViewportIndex-04408", + "text": " The variable decorated with ViewportIndex must be declared as a scalar 32-bit integer value" + } + ], + "(VK_VERSION_1_2)": [ + { + "vuid": "VUID-ViewportIndex-ViewportIndex-04405", + "text": " If the shaderOutputViewportIndex feature is not enabled then the ViewportIndex decoration must be used only within the Geometry or Fragment {ExecutionModel}" + } + ] + }, + "ViewportMaskNV": { + "(VK_NV_viewport_array2)": [ + { + "vuid": "VUID-ViewportMaskNV-ViewportMaskNV-04409", + "text": " The ViewportMaskNV decoration must be used only within the Vertex, MeshNV, TessellationEvaluation, or Geometry {ExecutionModel}" + }, + { + "vuid": "VUID-ViewportMaskNV-ViewportMaskNV-04410", + "text": " The variable decorated with ViewportMaskNV must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-ViewportMaskNV-ViewportMaskNV-04411", + "text": " The variable decorated with ViewportMaskNV must be declared as an array of 32-bit integer values" + } + ] + }, + "ViewportMaskPerViewNV": { + "(VK_NVX_multiview_per_view_attributes+VK_NV_viewport_array2)": [ + { + "vuid": "VUID-ViewportMaskPerViewNV-ViewportMaskPerViewNV-04412", + "text": " The ViewportMaskPerViewNV decoration must be used only within the Vertex, MeshNV, TessellationControl, TessellationEvaluation, or Geometry {ExecutionModel}" + }, + { + "vuid": "VUID-ViewportMaskPerViewNV-ViewportMaskPerViewNV-04413", + "text": " The variable decorated with ViewportMaskPerViewNV must be declared using the Output {StorageClass}" + }, + { + "vuid": "VUID-ViewportMaskPerViewNV-ViewportMaskPerViewNV-04414", + "text": " The variable decorated with ViewportMaskPerViewNV must be declared as an array of 32-bit integer values" + }, + { + "vuid": "VUID-ViewportMaskPerViewNV-ViewportMaskPerViewNV-04415", + "text": " The array decorated with ViewportMaskPerViewNV must be a size less than or equal to 32" + }, + { + "vuid": "VUID-ViewportMaskPerViewNV-ViewportMaskPerViewNV-04416", + "text": " The array decorated with ViewportMaskPerViewNV must be a size greater than the maximum view in the subpass’s view mask" + }, + { + "vuid": "VUID-ViewportMaskPerViewNV-ViewportMaskPerViewNV-04417", + "text": " The array variable decorated with ViewportMaskPerViewNV must only be indexed by a constant or specialization constant." + } + ] + }, + "WarpsPerSMNV": { + "(VK_NV_shader_sm_builtins)": [ + { + "vuid": "VUID-WarpsPerSMNV-WarpsPerSMNV-04418", + "text": " The variable decorated with WarpsPerSMNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-WarpsPerSMNV-WarpsPerSMNV-04419", + "text": " The variable decorated with WarpsPerSMNV must be declared as a scalar 32-bit integer value" + } + ] + }, + "WarpIDNV": { + "(VK_NV_shader_sm_builtins)": [ + { + "vuid": "VUID-WarpIDNV-WarpIDNV-04420", + "text": " The variable decorated with WarpIDNV must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-WarpIDNV-WarpIDNV-04421", + "text": " The variable decorated with WarpIDNV must be declared as a scalar 32-bit integer value" + } + ] + }, + "WorkgroupId": { + "core": [ + { + "vuid": "VUID-WorkgroupId-WorkgroupId-04422", + "text": " The WorkgroupId decoration must be used only within the GLCompute, MeshNV, or TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-WorkgroupId-WorkgroupId-04423", + "text": " The variable decorated with WorkgroupId must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-WorkgroupId-WorkgroupId-04424", + "text": " The variable decorated with WorkgroupId must be declared as a three-component vector of 32-bit integer values" + } + ] + }, + "WorkgroupSize": { + "core": [ + { + "vuid": "VUID-WorkgroupSize-WorkgroupSize-04425", + "text": " The WorkgroupSize decoration must be used only within the GLCompute, MeshNV, or TaskNV {ExecutionModel}" + }, + { + "vuid": "VUID-WorkgroupSize-WorkgroupSize-04426", + "text": " The variable decorated with WorkgroupSize must be a specialization constant or a constant" + }, + { + "vuid": "VUID-WorkgroupSize-WorkgroupSize-04427", + "text": " The variable decorated with WorkgroupSize must be declared as a three-component vector of 32-bit integer values" + } + ] + }, + "WorldRayDirectionKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-WorldRayDirectionKHR-WorldRayDirectionKHR-04428", + "text": " The WorldRayDirectionKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, ClosestHitKHR, or MissKHR {ExecutionModel}" + }, + { + "vuid": "VUID-WorldRayDirectionKHR-WorldRayDirectionKHR-04429", + "text": " The variable decorated with WorldRayDirectionKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-WorldRayDirectionKHR-WorldRayDirectionKHR-04430", + "text": " The variable decorated with WorldRayDirectionKHR must be declared as a three-component vector of 32-bit floating-point values" + } + ] + }, + "WorldRayOriginKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-WorldRayOriginKHR-WorldRayOriginKHR-04431", + "text": " The WorldRayOriginKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, ClosestHitKHR, or MissKHR {ExecutionModel}" + }, + { + "vuid": "VUID-WorldRayOriginKHR-WorldRayOriginKHR-04432", + "text": " The variable decorated with WorldRayOriginKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-WorldRayOriginKHR-WorldRayOriginKHR-04433", + "text": " The variable decorated with WorldRayOriginKHR must be declared as a three-component vector of 32-bit floating-point values" + } + ] + }, + "WorldToObjectKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-WorldToObjectKHR-WorldToObjectKHR-04434", + "text": " The WorldToObjectKHR decoration must be used only within the IntersectionKHR, AnyHitKHR, or ClosestHitKHR {ExecutionModel}" + }, + { + "vuid": "VUID-WorldToObjectKHR-WorldToObjectKHR-04435", + "text": " The variable decorated with WorldToObjectKHR must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-WorldToObjectKHR-WorldToObjectKHR-04436", + "text": " The variable decorated with WorldToObjectKHR must be declared as a matrix with four columns of three-component vectors of 32-bit floating-point values" + } + ] + }, "vkCreateQueryPool": { "core": [ { @@ -14806,7 +19542,7 @@ }, { "vuid": "VUID-VkQueryPoolCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkQueryPoolPerformanceCreateInfoKHR or VkQueryPoolPerformanceQueryCreateInfoINTEL" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkQueryPoolPerformanceCreateInfoKHR, VkQueryPoolPerformanceQueryCreateInfoINTEL, or VkVideoProfileKHR" }, { "vuid": "VUID-VkQueryPoolCreateInfo-sType-unique", @@ -14824,7 +19560,7 @@ "(VK_KHR_performance_query)": [ { "vuid": "VUID-VkQueryPoolCreateInfo-queryType-03222", - "text": " If queryType is VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the pNext chain must include a structure of type VkQueryPoolPerformanceCreateInfoKHR" + "text": " If queryType is VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the pNext chain must include a VkQueryPoolPerformanceCreateInfoKHR structure" } ] }, @@ -14836,7 +19572,7 @@ }, { "vuid": "VUID-VkQueryPoolPerformanceCreateInfoKHR-performanceCounterQueryPools-03237", - "text": " The performanceCounterQueryPools feature must be enabled" + "text": " The performanceCounterQueryPools feature must be enabled" }, { "vuid": "VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-03321", @@ -14988,10 +19724,6 @@ }, "vkCmdBeginQuery": { "core": [ - { - "vuid": "VUID-vkCmdBeginQuery-queryPool-01922", - "text": " queryPool must have been created with a queryType that differs from that of any queries that are active within commandBuffer" - }, { "vuid": "VUID-vkCmdBeginQuery-None-00807", "text": " All queries used by the command must be unavailable" @@ -15020,6 +19752,10 @@ "vuid": "VUID-vkCmdBeginQuery-queryType-00805", "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate compute operations, the VkCommandPool that commandBuffer was allocated from must support compute operations" }, + { + "vuid": "VUID-vkCmdBeginQuery-queryPool-01922", + "text": " queryPool must have been created with a queryType that differs from that of any queries that are active within commandBuffer" + }, { "vuid": "VUID-vkCmdBeginQuery-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -15045,6 +19781,18 @@ "text": " Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice" } ], + "(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCmdBeginQuery-queryType-04728", + "text": " The queryType used to create queryPool must not be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR or VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR" + } + ], + "(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkCmdBeginQuery-queryType-04729", + "text": " The queryType used to create queryPool must not be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV" + } + ], "(VK_VERSION_1_1)": [ { "vuid": "VUID-vkCmdBeginQuery-commandBuffer-01885", @@ -15057,6 +19805,12 @@ "text": " If called within a render pass instance, the sum of query and the number of bits set in the current subpass’s view mask must be less than or equal to the number of queries in queryPool" } ], + "(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-vkCmdBeginQuery-queryType-04862", + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR the VkCommandPool that commandBuffer was allocated from must support video encode operations" + } + ], "(VK_EXT_transform_feedback)": [ { "vuid": "VUID-vkCmdBeginQuery-queryType-02327", @@ -15082,7 +19836,7 @@ }, { "vuid": "VUID-vkCmdBeginQuery-queryPool-03226", - "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and another query pool with a queryType VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR has been used within commandBuffer, its parent primary command buffer or secondary command buffer recorded within the same parent primary command buffer as commandBuffer, the performanceCounterMultipleQueryPools feature must be enabled" + "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and another query pool with a queryType VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR has been used within commandBuffer, its parent primary command buffer or secondary command buffer recorded within the same parent primary command buffer as commandBuffer, the performanceCounterMultipleQueryPools feature must be enabled" }, { "vuid": "VUID-vkCmdBeginQuery-None-02863", @@ -15091,11 +19845,7 @@ ] }, "vkCmdBeginQueryIndexedEXT": { - "core": [ - { - "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-01922", - "text": " queryPool must have been created with a queryType that differs from that of any queries that are active within commandBuffer" - }, + "(VK_EXT_transform_feedback)": [ { "vuid": "VUID-vkCmdBeginQueryIndexedEXT-None-00807", "text": " All queries used by the command must be unavailable" @@ -15124,6 +19874,10 @@ "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-00805", "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate compute operations, the VkCommandPool that commandBuffer was allocated from must support compute operations" }, + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-04753", + "text": " If the queryPool was created with the same queryType as that of another active query within commandBuffer, then index must not match the index used for the active query" + }, { "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-02338", "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the VkCommandPool that commandBuffer was allocated from must support graphics operations" @@ -15139,43 +19893,7 @@ { "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-02341", "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT then VkPhysicalDeviceTransformFeedbackPropertiesEXT::transformFeedbackQueries must be supported" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-01885", - "text": " commandBuffer must not be a protected command buffer" - } - ], - "(VK_VERSION_1_1,VK_KHR_multiview)": [ - { - "vuid": "VUID-vkCmdBeginQueryIndexedEXT-query-00808", - "text": " If called within a render pass instance, the sum of query and the number of bits set in the current subpass’s view mask must be less than or equal to the number of queries in queryPool" - } - ], - "(VK_KHR_performance_query)": [ - { - "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03223", - "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the profiling lock must have been held before vkBeginCommandBuffer was called on commandBuffer" }, - { - "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03224", - "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and one of the counters used to create queryPool was VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, the query begin must be the first recorded command in commandBuffer" - }, - { - "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03225", - "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and one of the counters used to create queryPool was VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, the begin command must not be recorded within a render pass instance" - }, - { - "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03226", - "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and another query pool with a queryType VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR has been used within commandBuffer, its parent primary command buffer or secondary command buffer recorded within the same parent primary command buffer as commandBuffer, the performanceCounterMultipleQueryPools feature must be enabled" - }, - { - "vuid": "VUID-vkCmdBeginQueryIndexedEXT-None-02863", - "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, this command must not be recorded in a command buffer that, either directly or through secondary command buffers, also contains a vkCmdResetQueryPool command affecting the same query" - } - ], - "(VK_EXT_transform_feedback)": [ { "vuid": "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -15200,6 +19918,58 @@ "vuid": "VUID-vkCmdBeginQueryIndexedEXT-commonparent", "text": " Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice" } + ], + "(VK_EXT_transform_feedback)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-04728", + "text": " The queryType used to create queryPool must not be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR or VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR" + } + ], + "(VK_EXT_transform_feedback)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-04729", + "text": " The queryType used to create queryPool must not be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV" + } + ], + "(VK_EXT_transform_feedback)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-01885", + "text": " commandBuffer must not be a protected command buffer" + } + ], + "(VK_EXT_transform_feedback)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-query-00808", + "text": " If called within a render pass instance, the sum of query and the number of bits set in the current subpass’s view mask must be less than or equal to the number of queries in queryPool" + } + ], + "(VK_EXT_transform_feedback)+(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-04862", + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR the VkCommandPool that commandBuffer was allocated from must support video encode operations" + } + ], + "(VK_EXT_transform_feedback)+(VK_KHR_performance_query)": [ + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03223", + "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the profiling lock must have been held before vkBeginCommandBuffer was called on commandBuffer" + }, + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03224", + "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and one of the counters used to create queryPool was VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, the query begin must be the first recorded command in commandBuffer" + }, + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03225", + "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and one of the counters used to create queryPool was VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, the begin command must not be recorded within a render pass instance" + }, + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03226", + "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and another query pool with a queryType VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR has been used within commandBuffer, its parent primary command buffer or secondary command buffer recorded within the same parent primary command buffer as commandBuffer, the performanceCounterMultipleQueryPools feature must be enabled" + }, + { + "vuid": "VUID-vkCmdBeginQueryIndexedEXT-None-02863", + "text": " If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, this command must not be recorded in a command buffer that, either directly or through secondary command buffers, also contains a vkCmdResetQueryPool command affecting the same query" + } ] }, "vkCmdEndQuery": { @@ -15318,10 +20088,6 @@ "vuid": "VUID-vkGetQueryPoolResults-firstQuery-00813", "text": " firstQuery must be less than the number of queries in queryPool" }, - { - "vuid": "VUID-vkGetQueryPoolResults-flags-02827", - "text": " If VK_QUERY_RESULT_64_BIT is not set in flags, then pData and stride must be multiples of 4" - }, { "vuid": "VUID-vkGetQueryPoolResults-flags-00815", "text": " If VK_QUERY_RESULT_64_BIT is set in flags then pData and stride must be multiples of 8" @@ -15363,6 +20129,12 @@ "text": " queryPool must have been created, allocated, or retrieved from device" } ], + "!(VK_KHR_performance_query)": [ + { + "vuid": "VUID-vkGetQueryPoolResults-flags-02827", + "text": " If VK_QUERY_RESULT_64_BIT is not set in flags, then pData and stride must be multiples of 4" + } + ], "(VK_KHR_performance_query)": [ { "vuid": "VUID-vkGetQueryPoolResults-flags-02828", @@ -15372,6 +20144,10 @@ "vuid": "VUID-vkGetQueryPoolResults-queryType-03229", "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, then pData and stride must be multiples of the size of VkPerformanceCounterResultKHR" }, + { + "vuid": "VUID-vkGetQueryPoolResults-queryType-04519", + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, then stride must be large enough to contain VkQueryPoolPerformanceCreateInfoKHR::counterIndexCount used to create queryPool times the size of VkPerformanceCounterResultKHR." + }, { "vuid": "VUID-vkGetQueryPoolResults-queryType-03230", "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, flags must not contain VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, VK_QUERY_RESULT_PARTIAL_BIT or VK_QUERY_RESULT_64_BIT" @@ -15380,6 +20156,16 @@ "vuid": "VUID-vkGetQueryPoolResults-queryType-03231", "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the queryPool must have been recorded once for each pass as retrieved via a call to vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" } + ], + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-vkGetQueryPoolResults-queryType-04810", + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR, flags must include VK_QUERY_RESULT_WITH_STATUS_BIT_KHR" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-flags-04811", + "text": " If flags includes VK_QUERY_RESULT_WITH_STATUS_BIT_KHR, it must not include VK_QUERY_RESULT_WITH_AVAILABILITY_BIT" + } ] }, "vkCmdCopyQueryPoolResults": { @@ -15472,6 +20258,124 @@ "vuid": "VUID-vkCmdCopyQueryPoolResults-queryType-02734", "text": " vkCmdCopyQueryPoolResults must not be called if the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL" } + ], + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-queryType-04812", + "text": " vkCmdCopyQueryPoolResults must not be called if the queryType used to create queryPool was" + } + ] + }, + "vkCmdWriteTimestamp2KHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03929", + "text": " If the geometry shaders feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03930", + "text": " If the tessellation shaders feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR or VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-synchronization2-03858", + "text": " The synchronization2 feature must be enabled" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03859", + "text": " stage must only include a single pipeline stage" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03860", + "text": " stage must only include stages valid for the queue family that was used to create the command pool that commandBuffer was allocated from" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-queryPool-03861", + "text": " queryPool must have been created with a queryType of VK_QUERY_TYPE_TIMESTAMP" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-queryPool-03862", + "text": " The query identified by queryPool and query must be unavailable" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-timestampValidBits-03863", + "text": " The command pool’s queue family must support a non-zero timestampValidBits" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-query-04903", + "text": " query must be less than the number of queries in queryPool" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-parameter", + "text": " stage must be a valid combination of VkPipelineStageFlagBits2KHR values" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-requiredbitmask", + "text": " stage must not be 0" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-queryPool-parameter", + "text": " queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-commonparent", + "text": " Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_conditional_rendering)": [ + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03931", + "text": " If the conditional rendering feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03932", + "text": " If the fragment density map feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + } + ], + "(VK_KHR_synchronization2)+(VK_EXT_transform_feedback)": [ + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03933", + "text": " If the transform feedback feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03934", + "text": " If the mesh shaders feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03935", + "text": " If the task shaders feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV" + } + ], + "(VK_KHR_synchronization2)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-stage-03936", + "text": " If the shading rate image feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" + } + ], + "(VK_KHR_synchronization2)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-None-03864", + "text": " All queries used by the command must be unavailable" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp2KHR-query-03865", + "text": " If vkCmdWriteTimestamp2KHR is called within a render pass instance, the sum of query and the number of bits set in the current subpass’s view mask must be less than or equal to the number of queries in queryPool" + } ] }, "vkCmdWriteTimestamp": { @@ -15500,6 +20404,10 @@ "vuid": "VUID-vkCmdWriteTimestamp-timestampValidBits-00829", "text": " The command pool’s queue family must support a non-zero timestampValidBits" }, + { + "vuid": "VUID-vkCmdWriteTimestamp-query-04904", + "text": " query must be less than the number of queries in queryPool" + }, { "vuid": "VUID-vkCmdWriteTimestamp-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -15846,11 +20754,11 @@ }, { "vuid": "VUID-vkReleasePerformanceConfigurationINTEL-configuration-parameter", - "text": " configuration must be a valid VkPerformanceConfigurationINTEL handle" + "text": " If configuration is not VK_NULL_HANDLE, configuration must be a valid VkPerformanceConfigurationINTEL handle" }, { "vuid": "VUID-vkReleasePerformanceConfigurationINTEL-configuration-parent", - "text": " configuration must have been created, allocated, or retrieved from device" + "text": " If configuration is a valid handle, it must have been created, allocated, or retrieved from device" } ] }, @@ -15910,10 +20818,6 @@ "vuid": "VUID-vkCmdClearColorImage-imageLayout-parameter", "text": " imageLayout must be a valid VkImageLayout value" }, - { - "vuid": "VUID-vkCmdClearColorImage-pColor-parameter", - "text": " pColor must be a valid pointer to a valid VkClearColorValue union" - }, { "vuid": "VUID-vkCmdClearColorImage-pRanges-parameter", "text": " pRanges must be a valid pointer to an array of rangeCount valid VkImageSubresourceRange structures" @@ -16464,6 +21368,128 @@ } ] }, + "vkCmdCopyBuffer2KHR": { + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdCopyBuffer2KHR-commandBuffer-01822", + "text": " If commandBuffer is an unprotected command buffer, then srcBuffer must not be a protected buffer" + }, + { + "vuid": "VUID-vkCmdCopyBuffer2KHR-commandBuffer-01823", + "text": " If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer" + }, + { + "vuid": "VUID-vkCmdCopyBuffer2KHR-commandBuffer-01824", + "text": " If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer" + } + ], + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-vkCmdCopyBuffer2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyBuffer2KHR-pCopyBufferInfo-parameter", + "text": " pCopyBufferInfo must be a valid pointer to a valid VkCopyBufferInfo2KHR structure" + }, + { + "vuid": "VUID-vkCmdCopyBuffer2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdCopyBuffer2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdCopyBuffer2KHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, + "VkCopyBufferInfo2KHR": { + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkCopyBufferInfo2KHR-srcOffset-00113", + "text": " The srcOffset member of each element of pRegions must be less than the size of srcBuffer" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-dstOffset-00114", + "text": " The dstOffset member of each element of pRegions must be less than the size of dstBuffer" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-size-00115", + "text": " The size member of each element of pRegions must be less than or equal to the size of srcBuffer minus srcOffset" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-size-00116", + "text": " The size member of each element of pRegions must be less than or equal to the size of dstBuffer minus dstOffset" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-pRegions-00117", + "text": " The union of the source regions, and the union of the destination regions, specified by the elements of pRegions, must not overlap in memory" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-srcBuffer-00118", + "text": " srcBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_SRC_BIT usage flag" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-srcBuffer-00119", + "text": " If srcBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-dstBuffer-00120", + "text": " dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-dstBuffer-00121", + "text": " If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-srcBuffer-parameter", + "text": " srcBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-dstBuffer-parameter", + "text": " dstBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkBufferCopy2KHR structures" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-VkCopyBufferInfo2KHR-commonparent", + "text": " Both of dstBuffer, and srcBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkBufferCopy2KHR": { + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkBufferCopy2KHR-size-01988", + "text": " The size must be greater than 0" + }, + { + "vuid": "VUID-VkBufferCopy2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR" + }, + { + "vuid": "VUID-VkBufferCopy2KHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, "vkCmdCopyImage": { "(VK_VERSION_1_1)": [ { @@ -16528,6 +21554,94 @@ "vuid": "VUID-vkCmdCopyImage-dstOffset-01784", "text": " The dstOffset and extent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" }, + { + "vuid": "VUID-vkCmdCopyImage-aspectMask-00142", + "text": " For each element of pRegions, srcSubresource.aspectMask must specify aspects present in srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-aspectMask-00143", + "text": " For each element of pRegions, dstSubresource.aspectMask must specify aspects present in dstImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcOffset-00144", + "text": " For each element of pRegions, srcOffset.x and (extent.width + srcOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcOffset-00145", + "text": " For each element of pRegions, srcOffset.y and (extent.height + srcOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-00146", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, srcOffset.y must be 0 and extent.height must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcOffset-00147", + "text": " For each element of pRegions, srcOffset.z and (extent.depth + srcOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01785", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, srcOffset.z must be 0 and extent.depth must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01786", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, dstOffset.z must be 0 and extent.depth must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01787", + "text": " If srcImage is of type VK_IMAGE_TYPE_2D, then for each element of pRegions, srcOffset.z must be 0" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01788", + "text": " If dstImage is of type VK_IMAGE_TYPE_2D, then for each element of pRegions, dstOffset.z must be 0" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstOffset-00150", + "text": " For each element of pRegions, dstOffset.x and (extent.width + dstOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstOffset-00151", + "text": " For each element of pRegions, dstOffset.y and (extent.height + dstOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-00152", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, dstOffset.y must be 0 and extent.height must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstOffset-00153", + "text": " For each element of pRegions, dstOffset.z and (extent.depth + dstOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01727", + "text": " If srcImage is a blocked image, then for each element of pRegions, all members of srcOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01728", + "text": " If srcImage is a blocked image, then for each element of pRegions, extent.width must be a multiple of the compressed texel block width or (extent.width + srcOffset.x) must equal the width of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01729", + "text": " If srcImage is a blocked image, then for each element of pRegions, extent.height must be a multiple of the compressed texel block height or (extent.height + srcOffset.y) must equal the height of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01730", + "text": " If srcImage is a blocked image, then for each element of pRegions, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + srcOffset.z) must equal the depth of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01731", + "text": " If dstImage is a blocked image, then for each element of pRegions, all members of dstOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01732", + "text": " If dstImage is a blocked image, then for each element of pRegions, extent.width must be a multiple of the compressed texel block width or (extent.width + dstOffset.x) must equal the width of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01733", + "text": " If dstImage is a blocked image, then for each element of pRegions, extent.height must be a multiple of the compressed texel block height or (extent.height + dstOffset.y) must equal the height of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01734", + "text": " If dstImage is a blocked image, then for each element of pRegions, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + dstOffset.z) must equal the depth of the specified dstSubresource of dstImage" + }, { "vuid": "VUID-vkCmdCopyImage-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -16581,6 +21695,26 @@ { "vuid": "VUID-vkCmdCopyImage-dstImage-01996", "text": " The format features of dstImage must contain VK_FORMAT_FEATURE_TRANSFER_DST_BIT" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-04443", + "text": " If srcImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, srcSubresource.baseArrayLayer must be 0 and srcSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-04444", + "text": " If dstImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, dstSubresource.baseArrayLayer must be 0 and dstSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01790", + "text": " If srcImage and dstImage are both of type VK_IMAGE_TYPE_2D, then for each element of pRegions, extent.depth must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01791", + "text": " If srcImage is of type VK_IMAGE_TYPE_2D, and dstImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, extent.depth must equal srcSubresource.layerCount" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01792", + "text": " If dstImage is of type VK_IMAGE_TYPE_2D, and srcImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, extent.depth must equal dstSubresource.layerCount" } ], "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ @@ -16613,6 +21747,34 @@ { "vuid": "VUID-vkCmdCopyImage-None-01549", "text": " In a copy to or from a plane of a multi-planar image, the VkFormat of the image and plane must be compatible according to the description of compatible planes for the plane being copied" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01551", + "text": " If neither srcImage nor dstImage has a multi-planar image format then for each element of pRegions, srcSubresource.aspectMask and dstSubresource.aspectMask must match" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01552", + "text": " If srcImage has a VkFormat with two planes then for each element of pRegions, srcSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01553", + "text": " If srcImage has a VkFormat with three planes then for each element of pRegions, srcSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01554", + "text": " If dstImage has a VkFormat with two planes then for each element of pRegions, dstSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01555", + "text": " If dstImage has a VkFormat with three planes then for each element of pRegions, dstSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01556", + "text": " If srcImage has a multi-planar image format and the dstImage does not have a multi-planar image format, then for each element of pRegions, dstSubresource.aspectMask must be VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01557", + "text": " If dstImage has a multi-planar image format and the srcImage does not have a multi-planar image format, then for each element of pRegions, srcSubresource.aspectMask must be VK_IMAGE_ASPECT_COLOR_BIT" } ], "!(VK_KHR_shared_presentable_image)": [ @@ -16640,6 +21802,16 @@ "vuid": "VUID-vkCmdCopyImage-dstImage-02542", "text": " dstImage and srcImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT" } + ], + "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-vkCmdCopyImage-srcImage-00139", + "text": " If either srcImage or dstImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, srcSubresource.baseArrayLayer and dstSubresource.baseArrayLayer must each be 0, and srcSubresource.layerCount and dstSubresource.layerCount must each be 1" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01789", + "text": " If srcImage or dstImage is of type VK_IMAGE_TYPE_2D, then for each element of pRegions, extent.depth must be 1" + } ] }, "VkImageCopy": { @@ -16647,195 +21819,21 @@ { "vuid": "VUID-VkImageCopy-aspectMask-00137", "text": " The aspectMask member of srcSubresource and dstSubresource must match" - }, - { - "vuid": "VUID-VkImageCopy-srcOffset-00157", - "text": " If the calling command’s srcImage is a compressed image, all members of srcOffset must be a multiple of the corresponding dimensions of the compressed texel block" - }, - { - "vuid": "VUID-VkImageCopy-extent-00158", - "text": " If the calling command’s srcImage is a compressed image, extent.width must be a multiple of the compressed texel block width or (extent.width + srcOffset.x) must equal the source image subresource width" - }, - { - "vuid": "VUID-VkImageCopy-extent-00159", - "text": " If the calling command’s srcImage is a compressed image, extent.height must be a multiple of the compressed texel block height or (extent.height + srcOffset.y) must equal the source image subresource height" - }, - { - "vuid": "VUID-VkImageCopy-extent-00160", - "text": " If the calling command’s srcImage is a compressed image, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + srcOffset.z) must equal the source image subresource depth" - }, - { - "vuid": "VUID-VkImageCopy-dstOffset-00162", - "text": " If the calling command’s dstImage is a compressed format image, all members of dstOffset must be a multiple of the corresponding dimensions of the compressed texel block" - }, - { - "vuid": "VUID-VkImageCopy-extent-00163", - "text": " If the calling command’s dstImage is a compressed format image, extent.width must be a multiple of the compressed texel block width or (extent.width + dstOffset.x) must equal the destination image subresource width" - }, - { - "vuid": "VUID-VkImageCopy-extent-00164", - "text": " If the calling command’s dstImage is a compressed format image, extent.height must be a multiple of the compressed texel block height or (extent.height + dstOffset.y) must equal the destination image subresource height" - }, - { - "vuid": "VUID-VkImageCopy-extent-00165", - "text": " If the calling command’s dstImage is a compressed format image, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + dstOffset.z) must equal the destination image subresource depth" - } - ], - "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ - { - "vuid": "VUID-VkImageCopy-srcImage-01551", - "text": " If neither the calling command’s srcImage nor the calling command’s dstImage has a multi-planar image format then the aspectMask member of srcSubresource and dstSubresource must match" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01552", - "text": " If the calling command’s srcImage has a VkFormat with two planes then the srcSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01553", - "text": " If the calling command’s srcImage has a VkFormat with three planes then the srcSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-01554", - "text": " If the calling command’s dstImage has a VkFormat with two planes then the dstSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-01555", - "text": " If the calling command’s dstImage has a VkFormat with three planes then the dstSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01556", - "text": " If the calling command’s srcImage has a multi-planar image format and the dstImage does not have a multi-planar image format, the dstSubresource aspectMask must be VK_IMAGE_ASPECT_COLOR_BIT" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-01557", - "text": " If the calling command’s dstImage has a multi-planar image format and the srcImage does not have a multi-planar image format, the srcSubresource aspectMask must be VK_IMAGE_ASPECT_COLOR_BIT" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01727", - "text": " If the calling command’s srcImage is a compressed image, or a single-plane, “_422” image format, all members of srcOffset must be a multiple of the corresponding dimensions of the compressed texel block" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01728", - "text": " If the calling command’s srcImage is a compressed image, or a single-plane, “_422” image format, extent.width must be a multiple of the compressed texel block width or (extent.width + srcOffset.x) must equal the source image subresource width" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01729", - "text": " If the calling command’s srcImage is a compressed image, or a single-plane, “_422” image format, extent.height must be a multiple of the compressed texel block height or (extent.height + srcOffset.y) must equal the source image subresource height" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01730", - "text": " If the calling command’s srcImage is a compressed image, or a single-plane, “_422” image format, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + srcOffset.z) must equal the source image subresource depth" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-01731", - "text": " If the calling command’s dstImage is a compressed format image, or a single-plane, “_422” image format, all members of dstOffset must be a multiple of the corresponding dimensions of the compressed texel block" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-01732", - "text": " If the calling command’s dstImage is a compressed format image, or a single-plane, “_422” image format, extent.width must be a multiple of the compressed texel block width or (extent.width + dstOffset.x) must equal the destination image subresource width" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-01733", - "text": " If the calling command’s dstImage is a compressed format image, or a single-plane, “_422” image format, extent.height must be a multiple of the compressed texel block height or (extent.height + dstOffset.y) must equal the destination image subresource height" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-01734", - "text": " If the calling command’s dstImage is a compressed format image, or a single-plane, “_422” image format, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + dstOffset.z) must equal the destination image subresource depth" } ], "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [ { "vuid": "VUID-VkImageCopy-layerCount-00138", "text": " The layerCount member of srcSubresource and dstSubresource must match" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-00139", - "text": " If either of the calling command’s srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of both srcSubresource and dstSubresource must be 0 and 1, respectively" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01789", - "text": " If the calling command’s srcImage or dstImage is of type VK_IMAGE_TYPE_2D, then extent.depth must be 1" } ], "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ { "vuid": "VUID-VkImageCopy-extent-00140", "text": " The number of slices of the extent (for 3D) or layers of the srcSubresource (for non-3D) must match the number of slices of the extent (for 3D) or layers of the dstSubresource (for non-3D)" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-00141", - "text": " If either of the calling command’s srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of the corresponding subresource must be 0 and 1, respectively" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01790", - "text": " If both srcImage and dstImage are of type VK_IMAGE_TYPE_2D then extent.depth must be 1" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01791", - "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_2D, and the dstImage is of type VK_IMAGE_TYPE_3D, then extent.depth must equal to the layerCount member of srcSubresource" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-01792", - "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_2D, and the srcImage is of type VK_IMAGE_TYPE_3D, then extent.depth must equal to the layerCount member of dstSubresource" } ], "core": [ - { - "vuid": "VUID-VkImageCopy-aspectMask-00142", - "text": " The aspectMask member of srcSubresource must specify aspects present in the calling command’s srcImage" - }, - { - "vuid": "VUID-VkImageCopy-aspectMask-00143", - "text": " The aspectMask member of dstSubresource must specify aspects present in the calling command’s dstImage" - }, - { - "vuid": "VUID-VkImageCopy-srcOffset-00144", - "text": " srcOffset.x and (extent.width + srcOffset.x) must both be greater than or equal to 0 and less than or equal to the source image subresource width" - }, - { - "vuid": "VUID-VkImageCopy-srcOffset-00145", - "text": " srcOffset.y and (extent.height + srcOffset.y) must both be greater than or equal to 0 and less than or equal to the source image subresource height" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-00146", - "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset.y must be 0 and extent.height must be 1" - }, - { - "vuid": "VUID-VkImageCopy-srcOffset-00147", - "text": " srcOffset.z and (extent.depth + srcOffset.z) must both be greater than or equal to 0 and less than or equal to the source image subresource depth" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01785", - "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset.z must be 0 and extent.depth must be 1" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-01786", - "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset.z must be 0 and extent.depth must be 1" - }, - { - "vuid": "VUID-VkImageCopy-srcImage-01787", - "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_2D, then srcOffset.z must be 0" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-01788", - "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_2D, then dstOffset.z must be 0" - }, - { - "vuid": "VUID-VkImageCopy-dstOffset-00150", - "text": " dstOffset.x and (extent.width + dstOffset.x) must both be greater than or equal to 0 and less than or equal to the destination image subresource width" - }, - { - "vuid": "VUID-VkImageCopy-dstOffset-00151", - "text": " dstOffset.y and (extent.height + dstOffset.y) must both be greater than or equal to 0 and less than or equal to the destination image subresource height" - }, - { - "vuid": "VUID-VkImageCopy-dstImage-00152", - "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset.y must be 0 and extent.height must be 1" - }, - { - "vuid": "VUID-VkImageCopy-dstOffset-00153", - "text": " dstOffset.z and (extent.depth + dstOffset.z) must both be greater than or equal to 0 and less than or equal to the destination image subresource depth" - }, { "vuid": "VUID-VkImageCopy-srcSubresource-parameter", "text": " srcSubresource must be a valid VkImageSubresourceLayers structure" @@ -16876,6 +21874,384 @@ } ] }, + "vkCmdCopyImage2KHR": { + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdCopyImage2KHR-commandBuffer-01825", + "text": " If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdCopyImage2KHR-commandBuffer-01826", + "text": " If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdCopyImage2KHR-commandBuffer-01827", + "text": " If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image" + } + ], + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-vkCmdCopyImage2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyImage2KHR-pCopyImageInfo-parameter", + "text": " pCopyImageInfo must be a valid pointer to a valid VkCopyImageInfo2KHR structure" + }, + { + "vuid": "VUID-vkCmdCopyImage2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdCopyImage2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdCopyImage2KHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, + "VkCopyImageInfo2KHR": { + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkCopyImageInfo2KHR-pRegions-00124", + "text": " The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-00126", + "text": " srcImage must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT usage flag" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImageLayout-00128", + "text": " srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-00131", + "text": " dstImage must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImageLayout-00133", + "text": " dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-00136", + "text": " The sample count of srcImage and dstImage must match" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcSubresource-01696", + "text": " The srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstSubresource-01697", + "text": " The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcSubresource-01698", + "text": " The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstSubresource-01699", + "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcOffset-01783", + "text": " The srcOffset and extent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstOffset-01784", + "text": " The dstOffset and extent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-aspectMask-00142", + "text": " For each element of pRegions, srcSubresource.aspectMask must specify aspects present in srcImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-aspectMask-00143", + "text": " For each element of pRegions, dstSubresource.aspectMask must specify aspects present in dstImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcOffset-00144", + "text": " For each element of pRegions, srcOffset.x and (extent.width + srcOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcOffset-00145", + "text": " For each element of pRegions, srcOffset.y and (extent.height + srcOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-00146", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, srcOffset.y must be 0 and extent.height must be 1" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcOffset-00147", + "text": " For each element of pRegions, srcOffset.z and (extent.depth + srcOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01785", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, srcOffset.z must be 0 and extent.depth must be 1" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01786", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, dstOffset.z must be 0 and extent.depth must be 1" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01787", + "text": " If srcImage is of type VK_IMAGE_TYPE_2D, then for each element of pRegions, srcOffset.z must be 0" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01788", + "text": " If dstImage is of type VK_IMAGE_TYPE_2D, then for each element of pRegions, dstOffset.z must be 0" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstOffset-00150", + "text": " For each element of pRegions, dstOffset.x and (extent.width + dstOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstOffset-00151", + "text": " For each element of pRegions, dstOffset.y and (extent.height + dstOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-00152", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, dstOffset.y must be 0 and extent.height must be 1" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstOffset-00153", + "text": " For each element of pRegions, dstOffset.z and (extent.depth + dstOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01727", + "text": " If srcImage is a blocked image, then for each element of pRegions, all members of srcOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01728", + "text": " If srcImage is a blocked image, then for each element of pRegions, extent.width must be a multiple of the compressed texel block width or (extent.width + srcOffset.x) must equal the width of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01729", + "text": " If srcImage is a blocked image, then for each element of pRegions, extent.height must be a multiple of the compressed texel block height or (extent.height + srcOffset.y) must equal the height of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01730", + "text": " If srcImage is a blocked image, then for each element of pRegions, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + srcOffset.z) must equal the depth of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01731", + "text": " If dstImage is a blocked image, then for each element of pRegions, all members of dstOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01732", + "text": " If dstImage is a blocked image, then for each element of pRegions, extent.width must be a multiple of the compressed texel block width or (extent.width + dstOffset.x) must equal the width of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01733", + "text": " If dstImage is a blocked image, then for each element of pRegions, extent.height must be a multiple of the compressed texel block height or (extent.height + dstOffset.y) must equal the height of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01734", + "text": " If dstImage is a blocked image, then for each element of pRegions, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + dstOffset.z) must equal the depth of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-parameter", + "text": " srcImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImageLayout-parameter", + "text": " srcImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-parameter", + "text": " dstImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImageLayout-parameter", + "text": " dstImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkImageCopy2KHR structures" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-commonparent", + "text": " Both of dstImage, and srcImage must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01995", + "text": " The format features of srcImage must contain VK_FORMAT_FEATURE_TRANSFER_SRC_BIT" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01996", + "text": " The format features of dstImage must contain VK_FORMAT_FEATURE_TRANSFER_DST_BIT" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-04443", + "text": " If srcImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, srcSubresource.baseArrayLayer must be 0 and srcSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-04444", + "text": " If dstImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, dstSubresource.baseArrayLayer must be 0 and dstSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01790", + "text": " If srcImage and dstImage are both of type VK_IMAGE_TYPE_2D, then for each element of pRegions, extent.depth must be 1" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01791", + "text": " If srcImage is of type VK_IMAGE_TYPE_2D, and dstImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, extent.depth must equal srcSubresource.layerCount" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01792", + "text": " If dstImage is of type VK_IMAGE_TYPE_2D, and srcImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, extent.depth must equal dstSubresource.layerCount" + } + ], + "(VK_KHR_copy_commands2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-00127", + "text": " If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-00132", + "text": " If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-00135", + "text": " The VkFormat of each of srcImage and dstImage must be compatible, as defined above" + } + ], + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01546", + "text": " If srcImage is non-sparse then the image or disjoint plane to be copied must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01547", + "text": " If dstImage is non-sparse then the image or disjoint plane that is the destination of the copy must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01548", + "text": " If the VkFormat of each of srcImage and dstImage is not a multi-planar format, the VkFormat of each of srcImage and dstImage must be compatible, as defined above" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-None-01549", + "text": " In a copy to or from a plane of a multi-planar image, the VkFormat of the image and plane must be compatible according to the description of compatible planes for the plane being copied" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01551", + "text": " If neither srcImage nor dstImage has a multi-planar image format then for each element of pRegions, srcSubresource.aspectMask and dstSubresource.aspectMask must match" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01552", + "text": " If srcImage has a VkFormat with two planes then for each element of pRegions, srcSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01553", + "text": " If srcImage has a VkFormat with three planes then for each element of pRegions, srcSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01554", + "text": " If dstImage has a VkFormat with two planes then for each element of pRegions, dstSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01555", + "text": " If dstImage has a VkFormat with three planes then for each element of pRegions, dstSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01556", + "text": " If srcImage has a multi-planar image format and the dstImage does not have a multi-planar image format, then for each element of pRegions, dstSubresource.aspectMask must be VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-01557", + "text": " If dstImage has a multi-planar image format and the srcImage does not have a multi-planar image format, then for each element of pRegions, srcSubresource.aspectMask must be VK_IMAGE_ASPECT_COLOR_BIT" + } + ], + "(VK_KHR_copy_commands2)+!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImageLayout-00129", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImageLayout-00134", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_copy_commands2)+(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImageLayout-01917", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImageLayout-01395", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" + } + ], + "(VK_KHR_copy_commands2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-VkCopyImageInfo2KHR-dstImage-02542", + "text": " dstImage and srcImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT" + } + ], + "(VK_KHR_copy_commands2)+!(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-00139", + "text": " If either srcImage or dstImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, srcSubresource.baseArrayLayer and dstSubresource.baseArrayLayer must each be 0, and srcSubresource.layerCount and dstSubresource.layerCount must each be 1" + }, + { + "vuid": "VUID-VkCopyImageInfo2KHR-srcImage-01789", + "text": " If srcImage or dstImage is of type VK_IMAGE_TYPE_2D, then for each element of pRegions, extent.depth must be 1" + } + ] + }, + "VkImageCopy2KHR": { + "(VK_KHR_copy_commands2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageCopy2KHR-aspectMask-00137", + "text": " The aspectMask member of srcSubresource and dstSubresource must match" + } + ], + "(VK_KHR_copy_commands2)+!(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkImageCopy2KHR-layerCount-00138", + "text": " The layerCount member of srcSubresource and dstSubresource must match" + } + ], + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkImageCopy2KHR-extent-00140", + "text": " The number of slices of the extent (for 3D) or layers of the srcSubresource (for non-3D) must match the number of slices of the extent (for 3D) or layers of the dstSubresource (for non-3D)" + } + ], + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkImageCopy2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR" + }, + { + "vuid": "VUID-VkImageCopy2KHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkImageCopy2KHR-srcSubresource-parameter", + "text": " srcSubresource must be a valid VkImageSubresourceLayers structure" + }, + { + "vuid": "VUID-VkImageCopy2KHR-dstSubresource-parameter", + "text": " dstSubresource must be a valid VkImageSubresourceLayers structure" + } + ] + }, "vkCmdCopyBufferToImage": { "(VK_VERSION_1_1)": [ { @@ -16892,14 +22268,14 @@ } ], "core": [ - { - "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00171", - "text": " srcBuffer must be large enough to contain all buffer locations that are accessed according to Buffer and Image Addressing, for each element of pRegions" - }, { "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00172", "text": " The image region specified by each element of pRegions must be a region that is contained within dstImage" }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00171", + "text": " srcBuffer must be large enough to contain all buffer locations that are accessed according to Buffer and Image Addressing, for each element of pRegions" + }, { "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00173", "text": " The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory" @@ -16940,13 +22316,81 @@ "vuid": "VUID-vkCmdCopyBufferToImage-imageOffset-01793", "text": " The imageOffset and imageExtent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-04477", + "text": " If the queue family used to create the VkCommandPool which commandBuffer was allocated from does not support VK_QUEUE_GRAPHICS_BIT, for each element of pRegions, the aspectMask member of imageSubresource must not be VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT." + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageOffset-00197", + "text": " For each element of pRegions, imageOffset.x and (imageExtent.width + imageOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified imageSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageOffset-00198", + "text": " For each element of pRegions, imageOffset.y and (imageExtent.height + imageOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified imageSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-srcImage-00199", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, imageOffset.y must be 0 and imageExtent.height must be 1" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageOffset-00200", + "text": " For each element of pRegions, imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-srcImage-00201", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, imageOffset.z must be 0 and imageExtent.depth must be 1" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-bufferRowLength-00203", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferRowLength must be a multiple of the compressed texel block width" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-bufferImageHeight-00204", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferImageHeight must be a multiple of the compressed texel block height" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageOffset-00205", + "text": " If {imageparam} is a blocked image, for each element of pRegions, all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-bufferOffset-00206", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferOffset must be a multiple of the compressed texel block size in bytes" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageExtent-00207", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) must equal the width of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageExtent-00208", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) must equal the height of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageExtent-00209", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) must equal the depth of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-aspectMask-00211", + "text": " For each element of pRegions, imageSubresource.aspectMask must specify aspects present in {imageparam}" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-baseArrayLayer-00213", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_3D, for each element of pRegions, imageSubresource.baseArrayLayer must be 0 and imageSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-04725", + "text": " If {imageparam} is not a blocked image, for each element of pRegions, bufferRowLength multiplied by the texel block size of {imageparam} must be less than or equal to 231-1" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-04726", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferRowLength divided by the compressed texel block width and then multiplied by the texel block size of {imageparam} must be less than or equal to 231-1" + }, { "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-04052", "text": " If the queue family used to create the VkCommandPool which commandBuffer was allocated from does not support VK_QUEUE_GRAPHICS_BIT or VK_QUEUE_COMPUTE_BIT, the bufferOffset member of any element of pRegions must be a multiple of 4" }, { - "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-04053", - "text": " If dstImage has a depth/stencil format, the bufferOffset member of any element of pRegions must be a multiple of 4" + "vuid": "VUID-vkCmdCopyBufferToImage-srcImage-04053", + "text": " If {imageparam} has a depth/stencil format, the bufferOffset member of any element of pRegions must be a multiple of 4" }, { "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-parameter", @@ -17012,6 +22456,32 @@ "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-02543", "text": " dstImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT" } + ], + "!(VK_EXT_depth_range_unrestricted)": [ + { + "vuid": "VUID-vkCmdCopyBufferToImage-None-00214", + "text": " For each element of pRegions whose imageSubresource contains a depth aspect, the data in srcBuffer must be in the range [0,1]" + } + ], + "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkCmdCopyBufferToImage-bufferOffset-00193", + "text": " If {imageparam} does not have a depth/stencil format, then for each element of pRegions, bufferOffset must be a multiple of the format’s texel block size" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkCmdCopyBufferToImage-bufferOffset-01558", + "text": " If {imageparam} does not have either a depth/stencil or a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the format’s texel block size" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-bufferOffset-01559", + "text": " If {imageparam} has a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in Compatible formats of planes of multi-planar formats" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-aspectMask-01560", + "text": " If {imageparam} has a multi-planar format, then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (with VK_IMAGE_ASPECT_PLANE_2_BIT valid only for image formats with three planes)" + } ] }, "vkCmdCopyImageToBuffer": { @@ -17050,14 +22520,6 @@ "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00187", "text": " If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00188", - "text": " srcImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189", - "text": " srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice" - }, { "vuid": "VUID-vkCmdCopyImageToBuffer-dstBuffer-00191", "text": " dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag" @@ -17066,6 +22528,14 @@ "vuid": "VUID-vkCmdCopyImageToBuffer-dstBuffer-00192", "text": " If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00188", + "text": " srcImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189", + "text": " srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice" + }, { "vuid": "VUID-vkCmdCopyImageToBuffer-imageSubresource-01703", "text": " The imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created" @@ -17079,12 +22549,76 @@ "text": " The imageOffset and imageExtent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" }, { - "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-04054", + "vuid": "VUID-vkCmdCopyImageToBuffer-imageOffset-00197", + "text": " For each element of pRegions, imageOffset.x and (imageExtent.width + imageOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified imageSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageOffset-00198", + "text": " For each element of pRegions, imageOffset.y and (imageExtent.height + imageOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified imageSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00199", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, imageOffset.y must be 0 and imageExtent.height must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageOffset-00200", + "text": " For each element of pRegions, imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00201", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, imageOffset.z must be 0 and imageExtent.depth must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-bufferRowLength-00203", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferRowLength must be a multiple of the compressed texel block width" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-bufferImageHeight-00204", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferImageHeight must be a multiple of the compressed texel block height" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageOffset-00205", + "text": " If {imageparam} is a blocked image, for each element of pRegions, all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-bufferOffset-00206", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferOffset must be a multiple of the compressed texel block size in bytes" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageExtent-00207", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) must equal the width of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageExtent-00208", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) must equal the height of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageExtent-00209", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) must equal the depth of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-aspectMask-00211", + "text": " For each element of pRegions, imageSubresource.aspectMask must specify aspects present in {imageparam}" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-baseArrayLayer-00213", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_3D, for each element of pRegions, imageSubresource.baseArrayLayer must be 0 and imageSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-04725", + "text": " If {imageparam} is not a blocked image, for each element of pRegions, bufferRowLength multiplied by the texel block size of {imageparam} must be less than or equal to 231-1" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-04726", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferRowLength divided by the compressed texel block width and then multiplied by the texel block size of {imageparam} must be less than or equal to 231-1" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-04052", "text": " If the queue family used to create the VkCommandPool which commandBuffer was allocated from does not support VK_QUEUE_GRAPHICS_BIT or VK_QUEUE_COMPUTE_BIT, the bufferOffset member of any element of pRegions must be a multiple of 4" }, { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-04055", - "text": " If srcImage has a depth/stencil format, the bufferOffset member of any element of pRegions must be a multiple of 4" + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-04053", + "text": " If {imageparam} has a depth/stencil format, the bufferOffset member of any element of pRegions must be a multiple of 4" }, { "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-parameter", @@ -17142,7 +22676,7 @@ "(VK_KHR_shared_presentable_image)": [ { "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-01397", - "text": " srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" } ], "(VK_EXT_fragment_density_map)": [ @@ -17150,85 +22684,29 @@ "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-02544", "text": " srcImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT" } - ] - }, - "VkBufferImageCopy": { + ], "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ { - "vuid": "VUID-VkBufferImageCopy-bufferOffset-00193", - "text": " If the calling command’s VkImage parameter’s format is not a depth/stencil format, then bufferOffset must be a multiple of the format’s texel block size" - }, - { - "vuid": "VUID-VkBufferImageCopy-bufferRowLength-00203", - "text": " If the calling command’s VkImage parameter is a compressed image, bufferRowLength must be a multiple of the compressed texel block width" - }, - { - "vuid": "VUID-VkBufferImageCopy-bufferImageHeight-00204", - "text": " If the calling command’s VkImage parameter is a compressed image, bufferImageHeight must be a multiple of the compressed texel block height" - }, - { - "vuid": "VUID-VkBufferImageCopy-imageOffset-00205", - "text": " If the calling command’s VkImage parameter is a compressed image, all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block" - }, - { - "vuid": "VUID-VkBufferImageCopy-bufferOffset-00206", - "text": " If the calling command’s VkImage parameter is a compressed image, bufferOffset must be a multiple of the compressed texel block size in bytes" - }, - { - "vuid": "VUID-VkBufferImageCopy-imageExtent-00207", - "text": " If the calling command’s VkImage parameter is a compressed image, imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) must equal the image subresource width" - }, - { - "vuid": "VUID-VkBufferImageCopy-imageExtent-00208", - "text": " If the calling command’s VkImage parameter is a compressed image, imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) must equal the image subresource height" - }, - { - "vuid": "VUID-VkBufferImageCopy-imageExtent-00209", - "text": " If the calling command’s VkImage parameter is a compressed image, imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) must equal the image subresource depth" + "vuid": "VUID-vkCmdCopyImageToBuffer-bufferOffset-00193", + "text": " If {imageparam} does not have a depth/stencil format, then for each element of pRegions, bufferOffset must be a multiple of the format’s texel block size" } ], "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ { - "vuid": "VUID-VkBufferImageCopy-bufferOffset-01558", - "text": " If the calling command’s VkImage parameter’s format is not a depth/stencil format or a multi-planar format, then bufferOffset must be a multiple of the format’s texel block size" + "vuid": "VUID-vkCmdCopyImageToBuffer-bufferOffset-01558", + "text": " If {imageparam} does not have either a depth/stencil or a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the format’s texel block size" }, { - "vuid": "VUID-VkBufferImageCopy-bufferOffset-01559", - "text": " If the calling command’s VkImage parameter’s format is a multi-planar format, then bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in Compatible formats of planes of multi-planar formats" + "vuid": "VUID-vkCmdCopyImageToBuffer-bufferOffset-01559", + "text": " If {imageparam} has a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in Compatible formats of planes of multi-planar formats" }, { - "vuid": "VUID-VkBufferImageCopy-None-01735", - "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, bufferRowLength must be a multiple of the compressed texel block width" - }, - { - "vuid": "VUID-VkBufferImageCopy-None-01736", - "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, bufferImageHeight must be a multiple of the compressed texel block height" - }, - { - "vuid": "VUID-VkBufferImageCopy-None-01737", - "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block" - }, - { - "vuid": "VUID-VkBufferImageCopy-None-01738", - "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, bufferOffset must be a multiple of the compressed texel block size in bytes" - }, - { - "vuid": "VUID-VkBufferImageCopy-None-01739", - "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) must equal the image subresource width" - }, - { - "vuid": "VUID-VkBufferImageCopy-None-01740", - "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) must equal the image subresource height" - }, - { - "vuid": "VUID-VkBufferImageCopy-None-01741", - "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) must equal the image subresource depth" - }, - { - "vuid": "VUID-VkBufferImageCopy-aspectMask-01560", - "text": " If the calling command’s VkImage parameter’s format is a multi-planar format, then the aspectMask member of imageSubresource must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (with VK_IMAGE_ASPECT_PLANE_2_BIT valid only for image formats with three planes)" + "vuid": "VUID-vkCmdCopyImageToBuffer-aspectMask-01560", + "text": " If {imageparam} has a multi-planar format, then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (with VK_IMAGE_ASPECT_PLANE_2_BIT valid only for image formats with three planes)" } - ], + ] + }, + "VkBufferImageCopy": { "core": [ { "vuid": "VUID-VkBufferImageCopy-bufferRowLength-00195", @@ -17238,47 +22716,583 @@ "vuid": "VUID-VkBufferImageCopy-bufferImageHeight-00196", "text": " bufferImageHeight must be 0, or greater than or equal to the height member of imageExtent" }, - { - "vuid": "VUID-VkBufferImageCopy-imageOffset-00197", - "text": " imageOffset.x and (imageExtent.width + imageOffset.x) must both be greater than or equal to 0 and less than or equal to the image subresource width" - }, - { - "vuid": "VUID-VkBufferImageCopy-imageOffset-00198", - "text": " imageOffset.y and (imageExtent.height + imageOffset.y) must both be greater than or equal to 0 and less than or equal to the image subresource height" - }, - { - "vuid": "VUID-VkBufferImageCopy-srcImage-00199", - "text": " If the calling command’s srcImage (vkCmdCopyImageToBuffer) or dstImage (vkCmdCopyBufferToImage) is of type VK_IMAGE_TYPE_1D, then imageOffset.y must be 0 and imageExtent.height must be 1" - }, - { - "vuid": "VUID-VkBufferImageCopy-imageOffset-00200", - "text": " imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the image subresource depth" - }, - { - "vuid": "VUID-VkBufferImageCopy-srcImage-00201", - "text": " If the calling command’s srcImage (vkCmdCopyImageToBuffer) or dstImage (vkCmdCopyBufferToImage) is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then imageOffset.z must be 0 and imageExtent.depth must be 1" - }, - { - "vuid": "VUID-VkBufferImageCopy-aspectMask-00211", - "text": " The aspectMask member of imageSubresource must specify aspects present in the calling command’s VkImage parameter" - }, { "vuid": "VUID-VkBufferImageCopy-aspectMask-00212", "text": " The aspectMask member of imageSubresource must only have a single bit set" }, - { - "vuid": "VUID-VkBufferImageCopy-baseArrayLayer-00213", - "text": " If the calling command’s VkImage parameter is of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of imageSubresource must be 0 and 1, respectively" - }, { "vuid": "VUID-VkBufferImageCopy-imageSubresource-parameter", "text": " imageSubresource must be a valid VkImageSubresourceLayers structure" } - ], - "!(VK_EXT_depth_range_unrestricted)": [ + ] + }, + "vkCmdCopyBufferToImage2KHR": { + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1)": [ { - "vuid": "VUID-VkBufferImageCopy-None-00214", - "text": " When copying to the depth aspect of an image subresource, the data in the source buffer must be in the range [0,1]" + "vuid": "VUID-vkCmdCopyBufferToImage2KHR-commandBuffer-01828", + "text": " If commandBuffer is an unprotected command buffer, then srcBuffer must not be a protected buffer" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage2KHR-commandBuffer-01829", + "text": " If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage2KHR-commandBuffer-01830", + "text": " If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image" + } + ], + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-vkCmdCopyBufferToImage2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage2KHR-pCopyBufferToImageInfo-parameter", + "text": " pCopyBufferToImageInfo must be a valid pointer to a valid VkCopyBufferToImageInfo2KHR structure" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage2KHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, + "VkCopyBufferToImageInfo2KHR": { + "(VK_KHR_copy_commands2)+!(VK_QCOM_rotated_copy_commands)": [ + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pRegions-00172", + "text": " The image region specified by each element of pRegions must be a region that is contained within dstImage" + } + ], + "(VK_KHR_copy_commands2)+(VK_QCOM_rotated_copy_commands)": [ + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pRegions-04565", + "text": " If the image region specified by each element of pRegions does not contain VkCopyCommandTransformInfoQCOM in its pNext chain, it must be a region that is contained within dstImage" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pRegions-04554", + "text": " If the image region specified by each element of pRegions does contain VkCopyCommandTransformInfoQCOM in its pNext chain, the rotated destination region as described in Buffer and Image Addressing with Rotation must be contained within dstImage." + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pRegions-04555", + "text": " If any element of pRegions contains VkCopyCommandTransformInfoQCOM in its pNext chain, then the dstImage must not be a blocked image." + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pRegions-04556", + "text": " If any element of pRegions contains VkCopyCommandTransformInfoQCOM in its pNext chain, then the dstImage must be of type VK_IMAGE_TYPE_2D and must not be a multi-planar format." + } + ], + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pRegions-00171", + "text": " srcBuffer must be large enough to contain all buffer locations that are accessed according to Buffer and Image Addressing, for each element of pRegions" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pRegions-00173", + "text": " The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-srcBuffer-00174", + "text": " srcBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_SRC_BIT usage flag" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-srcBuffer-00176", + "text": " If srcBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-dstImage-00177", + "text": " dstImage must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-dstImage-00178", + "text": " If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-dstImage-00179", + "text": " dstImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-dstImageLayout-00180", + "text": " dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-imageSubresource-01701", + "text": " The imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-imageSubresource-01702", + "text": " The imageSubresource.baseArrayLayer + imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-imageOffset-01793", + "text": " The imageOffset and imageExtent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-commandBuffer-04477", + "text": " If the queue family used to create the VkCommandPool which commandBuffer was allocated from does not support VK_QUEUE_GRAPHICS_BIT, for each element of pRegions, the aspectMask member of imageSubresource must not be VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT." + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-imageOffset-00197", + "text": " For each element of pRegions" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-imageOffset-00198", + "text": " For each element of pRegions" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-srcImage-00199", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, imageOffset.y must be 0 and imageExtent.height must be 1" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-imageOffset-00200", + "text": " For each element of pRegions, imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-srcImage-00201", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, imageOffset.z must be 0 and imageExtent.depth must be 1" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-bufferRowLength-00203", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferRowLength must be a multiple of the compressed texel block width" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-bufferImageHeight-00204", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferImageHeight must be a multiple of the compressed texel block height" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-imageOffset-00205", + "text": " If {imageparam} is a blocked image, for each element of pRegions, all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-bufferOffset-00206", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferOffset must be a multiple of the compressed texel block size in bytes" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-imageExtent-00207", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) must equal the width of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-imageExtent-00208", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) must equal the height of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-imageExtent-00209", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) must equal the depth of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-aspectMask-00211", + "text": " For each element of pRegions, imageSubresource.aspectMask must specify aspects present in {imageparam}" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-baseArrayLayer-00213", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_3D, for each element of pRegions, imageSubresource.baseArrayLayer must be 0 and imageSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pRegions-04725", + "text": " If {imageparam} is not a blocked image, for each element of pRegions, bufferRowLength multiplied by the texel block size of {imageparam} must be less than or equal to 231-1" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pRegions-04726", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferRowLength divided by the compressed texel block width and then multiplied by the texel block size of {imageparam} must be less than or equal to 231-1" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-commandBuffer-04052", + "text": " If the queue family used to create the VkCommandPool which commandBuffer was allocated from does not support VK_QUEUE_GRAPHICS_BIT or VK_QUEUE_COMPUTE_BIT, the bufferOffset member of any element of pRegions must be a multiple of 4" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-srcImage-04053", + "text": " If {imageparam} has a depth/stencil format, the bufferOffset member of any element of pRegions must be a multiple of 4" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-srcBuffer-parameter", + "text": " srcBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-dstImage-parameter", + "text": " dstImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-dstImageLayout-parameter", + "text": " dstImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkBufferImageCopy2KHR structures" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-commonparent", + "text": " Both of dstImage, and srcBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-dstImage-01997", + "text": " The format features of dstImage must contain VK_FORMAT_FEATURE_TRANSFER_DST_BIT" + } + ], + "(VK_KHR_copy_commands2)+!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-dstImageLayout-00181", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_copy_commands2)+(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-dstImageLayout-01396", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" + } + ], + "(VK_KHR_copy_commands2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-dstImage-02543", + "text": " dstImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT" + } + ], + "(VK_KHR_copy_commands2)+!(VK_EXT_depth_range_unrestricted)": [ + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-None-00214", + "text": " For each element of pRegions whose imageSubresource contains a depth aspect, the data in srcBuffer must be in the range [0,1]" + } + ], + "(VK_KHR_copy_commands2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-bufferOffset-00193", + "text": " If {imageparam} does not have a depth/stencil format, then for each element of pRegions, bufferOffset must be a multiple of the format’s texel block size" + } + ], + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-bufferOffset-01558", + "text": " If {imageparam} does not have either a depth/stencil or a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the format’s texel block size" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-bufferOffset-01559", + "text": " If {imageparam} has a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in Compatible formats of planes of multi-planar formats" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2KHR-aspectMask-01560", + "text": " If {imageparam} has a multi-planar format, then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (with VK_IMAGE_ASPECT_PLANE_2_BIT valid only for image formats with three planes)" + } + ] + }, + "vkCmdCopyImageToBuffer2KHR": { + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdCopyImageToBuffer2KHR-commandBuffer-01831", + "text": " If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer2KHR-commandBuffer-01832", + "text": " If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer2KHR-commandBuffer-01833", + "text": " If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer" + } + ], + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-vkCmdCopyImageToBuffer2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer2KHR-pCopyImageToBufferInfo-parameter", + "text": " pCopyImageToBufferInfo must be a valid pointer to a valid VkCopyImageToBufferInfo2KHR structure" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer2KHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, + "VkCopyImageToBufferInfo2KHR": { + "(VK_KHR_copy_commands2)+!(VK_QCOM_rotated_copy_commands)": [ + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pRegions-00182", + "text": " The image region specified by each element of pRegions must be a region that is contained within srcImage" + } + ], + "(VK_KHR_copy_commands2)+(VK_QCOM_rotated_copy_commands)": [ + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pRegions-04566", + "text": " If the image region specified by each element of pRegions does not contain VkCopyCommandTransformInfoQCOM in its pNext chain, it must be a region that is contained within srcImage" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pRegions-04557", + "text": " If the image region specified by each element of pRegions does contain VkCopyCommandTransformInfoQCOM in its pNext chain, the rotated source region as described in Buffer and Image Addressing with Rotation must be contained within srcImage." + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pRegions-04558", + "text": " If any element of pRegions contains VkCopyCommandTransformInfoQCOM in its pNext chain, then the srcImage must not be a blocked image" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pRegions-04559", + "text": " If any element of pRegions contains VkCopyCommandTransformInfoQCOM in its pNext chain, then the srcImage must be of type VK_IMAGE_TYPE_2D, and must not be a multi-planar format." + } + ], + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pRegions-00183", + "text": " dstBuffer must be large enough to contain all buffer locations that are accessed according to Buffer and Image Addressing, for each element of pRegions" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pRegions-00184", + "text": " The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImage-00186", + "text": " srcImage must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT usage flag" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImage-00187", + "text": " If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-dstBuffer-00191", + "text": " dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-dstBuffer-00192", + "text": " If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImage-00188", + "text": " srcImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImageLayout-00189", + "text": " srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-imageSubresource-01703", + "text": " The imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-imageSubresource-01704", + "text": " The imageSubresource.baseArrayLayer + imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-imageOffset-01794", + "text": " The imageOffset and imageExtent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-imageOffset-00197", + "text": " For each element of pRegions" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-imageOffset-00198", + "text": " For each element of pRegions" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImage-00199", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, imageOffset.y must be 0 and imageExtent.height must be 1" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-imageOffset-00200", + "text": " For each element of pRegions, imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImage-00201", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, imageOffset.z must be 0 and imageExtent.depth must be 1" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-bufferRowLength-00203", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferRowLength must be a multiple of the compressed texel block width" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-bufferImageHeight-00204", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferImageHeight must be a multiple of the compressed texel block height" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-imageOffset-00205", + "text": " If {imageparam} is a blocked image, for each element of pRegions, all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-bufferOffset-00206", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferOffset must be a multiple of the compressed texel block size in bytes" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-imageExtent-00207", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) must equal the width of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-imageExtent-00208", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) must equal the height of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-imageExtent-00209", + "text": " If {imageparam} is a blocked image, for each element of pRegions, imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) must equal the depth of the specified imageSubresource of {imageparam}" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-aspectMask-00211", + "text": " For each element of pRegions, imageSubresource.aspectMask must specify aspects present in {imageparam}" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-baseArrayLayer-00213", + "text": " If {imageparam} is of type VK_IMAGE_TYPE_3D, for each element of pRegions, imageSubresource.baseArrayLayer must be 0 and imageSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pRegions-04725", + "text": " If {imageparam} is not a blocked image, for each element of pRegions, bufferRowLength multiplied by the texel block size of {imageparam} must be less than or equal to 231-1" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pRegions-04726", + "text": " If {imageparam} is a blocked image, for each element of pRegions, bufferRowLength divided by the compressed texel block width and then multiplied by the texel block size of {imageparam} must be less than or equal to 231-1" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-commandBuffer-04052", + "text": " If the queue family used to create the VkCommandPool which commandBuffer was allocated from does not support VK_QUEUE_GRAPHICS_BIT or VK_QUEUE_COMPUTE_BIT, the bufferOffset member of any element of pRegions must be a multiple of 4" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImage-04053", + "text": " If {imageparam} has a depth/stencil format, the bufferOffset member of any element of pRegions must be a multiple of 4" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImage-parameter", + "text": " srcImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImageLayout-parameter", + "text": " srcImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-dstBuffer-parameter", + "text": " dstBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkBufferImageCopy2KHR structures" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-commonparent", + "text": " Both of dstBuffer, and srcImage must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImage-01998", + "text": " The format features of srcImage must contain VK_FORMAT_FEATURE_TRANSFER_SRC_BIT" + } + ], + "(VK_KHR_copy_commands2)+!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImageLayout-00190", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_copy_commands2)+(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImageLayout-01397", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" + } + ], + "(VK_KHR_copy_commands2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-srcImage-02544", + "text": " srcImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT" + } + ], + "(VK_KHR_copy_commands2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-bufferOffset-00193", + "text": " If {imageparam} does not have a depth/stencil format, then for each element of pRegions, bufferOffset must be a multiple of the format’s texel block size" + } + ], + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-bufferOffset-01558", + "text": " If {imageparam} does not have either a depth/stencil or a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the format’s texel block size" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-bufferOffset-01559", + "text": " If {imageparam} has a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in Compatible formats of planes of multi-planar formats" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2KHR-aspectMask-01560", + "text": " If {imageparam} has a multi-planar format, then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (with VK_IMAGE_ASPECT_PLANE_2_BIT valid only for image formats with three planes)" + } + ] + }, + "VkBufferImageCopy2KHR": { + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkBufferImageCopy2KHR-bufferRowLength-00195", + "text": " bufferRowLength must be 0, or greater than or equal to the width member of imageExtent" + }, + { + "vuid": "VUID-VkBufferImageCopy2KHR-bufferImageHeight-00196", + "text": " bufferImageHeight must be 0, or greater than or equal to the height member of imageExtent" + }, + { + "vuid": "VUID-VkBufferImageCopy2KHR-aspectMask-00212", + "text": " The aspectMask member of imageSubresource must only have a single bit set" + }, + { + "vuid": "VUID-VkBufferImageCopy2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR" + }, + { + "vuid": "VUID-VkBufferImageCopy2KHR-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkCopyCommandTransformInfoQCOM" + }, + { + "vuid": "VUID-VkBufferImageCopy2KHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkBufferImageCopy2KHR-imageSubresource-parameter", + "text": " imageSubresource must be a valid VkImageSubresourceLayers structure" + } + ] + }, + "VkCopyCommandTransformInfoQCOM": { + "(VK_QCOM_rotated_copy_commands)": [ + { + "vuid": "VUID-VkCopyCommandTransformInfoQCOM-transform-04560", + "text": " transform must be VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, or VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR" + }, + { + "vuid": "VUID-VkCopyCommandTransformInfoQCOM-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM" } ] }, @@ -17386,6 +23400,58 @@ "vuid": "VUID-vkCmdBlitImage-dstSubresource-01708", "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00240", + "text": " If either srcImage or dstImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, srcSubresource.baseArrayLayer and dstSubresource.baseArrayLayer must each be 0, and srcSubresource.layerCount and dstSubresource.layerCount must each be 1." + }, + { + "vuid": "VUID-vkCmdBlitImage-aspectMask-00241", + "text": " For each element of pRegions, srcSubresource.aspectMask must specify aspects present in srcImage" + }, + { + "vuid": "VUID-vkCmdBlitImage-aspectMask-00242", + "text": " For each element of pRegions, dstSubresource.aspectMask must specify aspects present in dstImage" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcOffset-00243", + "text": " For each element of pRegions, srcOffsets[0].x and srcOffsets[1].x must both be greater than or equal to 0 and less than or equal to the width of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcOffset-00244", + "text": " For each element of pRegions, srcOffsets[0].y and srcOffsets[1].y must both be greater than or equal to 0 and less than or equal to the height of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00245", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, srcOffsets[0].y must be 0 and srcOffsets[1].y must be 1" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcOffset-00246", + "text": " For each element of pRegions, srcOffsets[0].z and srcOffsets[1].z must both be greater than or equal to 0 and less than or equal to the depth of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00247", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, srcOffsets[0].z must be 0 and srcOffsets[1].z must be 1" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstOffset-00248", + "text": " For each element of pRegions, dstOffsets[0].x and dstOffsets[1].x must both be greater than or equal to 0 and less than or equal to the width of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstOffset-00249", + "text": " For each element of pRegions, dstOffsets[0].y and dstOffsets[1].y must both be greater than or equal to 0 and less than or equal to the height of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImage-00250", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, dstOffsets[0].y must be 0 and dstOffsets[1].y must be 1" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstOffset-00251", + "text": " For each element of pRegions, dstOffsets[0].z and dstOffsets[1].z must both be greater than or equal to 0 and less than or equal to the depth of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImage-00252", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, dstOffsets[0].z must be 0 and dstOffsets[1].z must be 1" + }, { "vuid": "VUID-vkCmdBlitImage-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -17472,7 +23538,7 @@ }, { "vuid": "VUID-vkCmdBlitImage-filter-00237", - "text": " If filter is VK_FILTER_CUBIC_EXT, srcImage must have a VkImageType of VK_IMAGE_TYPE_2D" + "text": " If filter is VK_FILTER_CUBIC_EXT, srcImage must be of type VK_IMAGE_TYPE_2D" } ], "(VK_EXT_fragment_density_map)": [ @@ -17492,58 +23558,6 @@ "vuid": "VUID-VkImageBlit-layerCount-00239", "text": " The layerCount member of srcSubresource and dstSubresource must match" }, - { - "vuid": "VUID-VkImageBlit-srcImage-00240", - "text": " If either of the calling command’s srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of both srcSubresource and dstSubresource must be 0 and 1, respectively" - }, - { - "vuid": "VUID-VkImageBlit-aspectMask-00241", - "text": " The aspectMask member of srcSubresource must specify aspects present in the calling command’s srcImage" - }, - { - "vuid": "VUID-VkImageBlit-aspectMask-00242", - "text": " The aspectMask member of dstSubresource must specify aspects present in the calling command’s dstImage" - }, - { - "vuid": "VUID-VkImageBlit-srcOffset-00243", - "text": " srcOffset[0].x and srcOffset[1].x must both be greater than or equal to 0 and less than or equal to the source image subresource width" - }, - { - "vuid": "VUID-VkImageBlit-srcOffset-00244", - "text": " srcOffset[0].y and srcOffset[1].y must both be greater than or equal to 0 and less than or equal to the source image subresource height" - }, - { - "vuid": "VUID-VkImageBlit-srcImage-00245", - "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset[0].y must be 0 and srcOffset[1].y must be 1" - }, - { - "vuid": "VUID-VkImageBlit-srcOffset-00246", - "text": " srcOffset[0].z and srcOffset[1].z must both be greater than or equal to 0 and less than or equal to the source image subresource depth" - }, - { - "vuid": "VUID-VkImageBlit-srcImage-00247", - "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then srcOffset[0].z must be 0 and srcOffset[1].z must be 1" - }, - { - "vuid": "VUID-VkImageBlit-dstOffset-00248", - "text": " dstOffset[0].x and dstOffset[1].x must both be greater than or equal to 0 and less than or equal to the destination image subresource width" - }, - { - "vuid": "VUID-VkImageBlit-dstOffset-00249", - "text": " dstOffset[0].y and dstOffset[1].y must both be greater than or equal to 0 and less than or equal to the destination image subresource height" - }, - { - "vuid": "VUID-VkImageBlit-dstImage-00250", - "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset[0].y must be 0 and dstOffset[1].y must be 1" - }, - { - "vuid": "VUID-VkImageBlit-dstOffset-00251", - "text": " dstOffset[0].z and dstOffset[1].z must both be greater than or equal to 0 and less than or equal to the destination image subresource depth" - }, - { - "vuid": "VUID-VkImageBlit-dstImage-00252", - "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then dstOffset[0].z must be 0 and dstOffset[1].z must be 1" - }, { "vuid": "VUID-VkImageBlit-srcSubresource-parameter", "text": " srcSubresource must be a valid VkImageSubresourceLayers structure" @@ -17554,6 +23568,316 @@ } ] }, + "vkCmdBlitImage2KHR": { + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdBlitImage2KHR-commandBuffer-01834", + "text": " If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdBlitImage2KHR-commandBuffer-01835", + "text": " If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdBlitImage2KHR-commandBuffer-01836", + "text": " If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image" + } + ], + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-vkCmdBlitImage2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBlitImage2KHR-pBlitImageInfo-parameter", + "text": " pBlitImageInfo must be a valid pointer to a valid VkBlitImageInfo2KHR structure" + }, + { + "vuid": "VUID-vkCmdBlitImage2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdBlitImage2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdBlitImage2KHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, + "VkBlitImageInfo2KHR": { + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkBlitImageInfo2KHR-pRegions-00215", + "text": " The source region specified by each element of pRegions must be a region that is contained within srcImage" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-pRegions-00216", + "text": " The destination region specified by each element of pRegions must be a region that is contained within dstImage" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-pRegions-00217", + "text": " The union of all destination regions, specified by the elements of pRegions, must not overlap in memory with any texel that may be sampled during the blit operation" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-01999", + "text": " The format features of srcImage must contain VK_FORMAT_FEATURE_BLIT_SRC_BIT" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-00219", + "text": " srcImage must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT usage flag" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-00220", + "text": " If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImageLayout-00221", + "text": " srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImage-02000", + "text": " The format features of dstImage must contain VK_FORMAT_FEATURE_BLIT_DST_BIT" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImage-00224", + "text": " dstImage must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImage-00225", + "text": " If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImageLayout-00226", + "text": " dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-00229", + "text": " If either of srcImage or dstImage was created with a signed integer VkFormat, the other must also have been created with a signed integer VkFormat" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-00230", + "text": " If either of srcImage or dstImage was created with an unsigned integer VkFormat, the other must also have been created with an unsigned integer VkFormat" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-00231", + "text": " If either of srcImage or dstImage was created with a depth/stencil format, the other must have exactly the same format" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-00232", + "text": " If srcImage was created with a depth/stencil format, filter must be VK_FILTER_NEAREST" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-00233", + "text": " srcImage must have been created with a samples value of VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImage-00234", + "text": " dstImage must have been created with a samples value of VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-filter-02001", + "text": " If filter is VK_FILTER_LINEAR, then the format features of srcImage must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcSubresource-01705", + "text": " The srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstSubresource-01706", + "text": " The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcSubresource-01707", + "text": " The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstSubresource-01708", + "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-00240", + "text": " If either srcImage or dstImage is of type VK_IMAGE_TYPE_3D, then for each element of pRegions, srcSubresource.baseArrayLayer and dstSubresource.baseArrayLayer must each be 0, and srcSubresource.layerCount and dstSubresource.layerCount must each be 1." + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-aspectMask-00241", + "text": " For each element of pRegions, srcSubresource.aspectMask must specify aspects present in srcImage" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-aspectMask-00242", + "text": " For each element of pRegions, dstSubresource.aspectMask must specify aspects present in dstImage" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcOffset-00243", + "text": " For each element of pRegions, srcOffsets[0].x and srcOffsets[1].x must both be greater than or equal to 0 and less than or equal to the width of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcOffset-00244", + "text": " For each element of pRegions, srcOffsets[0].y and srcOffsets[1].y must both be greater than or equal to 0 and less than or equal to the height of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-00245", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, srcOffsets[0].y must be 0 and srcOffsets[1].y must be 1" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcOffset-00246", + "text": " For each element of pRegions, srcOffsets[0].z and srcOffsets[1].z must both be greater than or equal to 0 and less than or equal to the depth of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-00247", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, srcOffsets[0].z must be 0 and srcOffsets[1].z must be 1" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstOffset-00248", + "text": " For each element of pRegions, dstOffsets[0].x and dstOffsets[1].x must both be greater than or equal to 0 and less than or equal to the width of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstOffset-00249", + "text": " For each element of pRegions, dstOffsets[0].y and dstOffsets[1].y must both be greater than or equal to 0 and less than or equal to the height of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImage-00250", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, dstOffsets[0].y must be 0 and dstOffsets[1].y must be 1" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstOffset-00251", + "text": " For each element of pRegions, dstOffsets[0].z and dstOffsets[1].z must both be greater than or equal to 0 and less than or equal to the depth of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImage-00252", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, dstOffsets[0].z must be 0 and dstOffsets[1].z must be 1" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-parameter", + "text": " srcImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImageLayout-parameter", + "text": " srcImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImage-parameter", + "text": " dstImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImageLayout-parameter", + "text": " dstImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkImageBlit2KHR structures" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-filter-parameter", + "text": " filter must be a valid VkFilter value" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-commonparent", + "text": " Both of dstImage, and srcImage must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImage-01561", + "text": " srcImage must not use a format listed in Formats requiring sampler Y′CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImage-01562", + "text": " dstImage must not use a format listed in Formats requiring sampler Y′CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views" + } + ], + "(VK_KHR_copy_commands2)+!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImageLayout-00222", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImageLayout-00227", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_copy_commands2)+(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkBlitImageInfo2KHR-srcImageLayout-01398", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImageLayout-01399", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_copy_commands2)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-VkBlitImageInfo2KHR-filter-02002", + "text": " If filter is VK_FILTER_CUBIC_EXT, then the format features of srcImage must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-filter-00237", + "text": " If filter is VK_FILTER_CUBIC_EXT, srcImage must be of type VK_IMAGE_TYPE_2D" + } + ], + "(VK_KHR_copy_commands2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-VkBlitImageInfo2KHR-dstImage-02545", + "text": " dstImage and srcImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT" + } + ], + "(VK_KHR_copy_commands2)+(VK_QCOM_rotated_copy_commands)": [ + { + "vuid": "VUID-VkBlitImageInfo2KHR-pRegions-04561", + "text": " If any element of pRegions contains VkCopyCommandTransformInfoQCOM in its pNext chain, then srcImage and dstImage must not be a block-compressed image." + }, + { + "vuid": "VUID-VkBlitImageInfo2KHR-pRegions-04562", + "text": " If any element of pRegions contains VkCopyCommandTransformInfoQCOM in its pNext chain, then the srcImage must be of type VK_IMAGE_TYPE_2D and must not be a multi-planar format." + } + ] + }, + "VkImageBlit2KHR": { + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkImageBlit2KHR-aspectMask-00238", + "text": " The aspectMask member of srcSubresource and dstSubresource must match" + }, + { + "vuid": "VUID-VkImageBlit2KHR-layerCount-00239", + "text": " The layerCount member of srcSubresource and dstSubresource must match" + }, + { + "vuid": "VUID-VkImageBlit2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR" + }, + { + "vuid": "VUID-VkImageBlit2KHR-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkCopyCommandTransformInfoQCOM" + }, + { + "vuid": "VUID-VkImageBlit2KHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkImageBlit2KHR-srcSubresource-parameter", + "text": " srcSubresource must be a valid VkImageSubresourceLayers structure" + }, + { + "vuid": "VUID-VkImageBlit2KHR-dstSubresource-parameter", + "text": " dstSubresource must be a valid VkImageSubresourceLayers structure" + } + ] + }, "vkCmdResolveImage": { "(VK_VERSION_1_1)": [ { @@ -17622,6 +23946,54 @@ "vuid": "VUID-vkCmdResolveImage-dstSubresource-01712", "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" }, + { + "vuid": "VUID-vkCmdResolveImage-srcImage-04446", + "text": " If either srcImage or dstImage are of type VK_IMAGE_TYPE_3D, then for each element of pRegions, srcSubresource.baseArrayLayer must be 0 and srcSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcImage-04447", + "text": " If either srcImage or dstImage are of type VK_IMAGE_TYPE_3D, then for each element of pRegions, dstSubresource.baseArrayLayer must be 0 and dstSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcOffset-00269", + "text": " For each element of pRegions, srcOffset.x and (extent.width + srcOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcOffset-00270", + "text": " For each element of pRegions, srcOffset.y and (extent.height + srcOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcImage-00271", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, srcOffset.y must be 0 and extent.height must be 1" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcOffset-00272", + "text": " For each element of pRegions, srcOffset.z and (extent.depth + srcOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcImage-00273", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, srcOffset.z must be 0 and extent.depth must be 1" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstOffset-00274", + "text": " For each element of pRegions, dstOffset.x and (extent.width + dstOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstOffset-00275", + "text": " For each element of pRegions, dstOffset.y and (extent.height + dstOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImage-00276", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, dstOffset.y must be 0 and extent.height must be 1" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstOffset-00277", + "text": " For each element of pRegions, dstOffset.z and (extent.depth + dstOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImage-00278", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, dstOffset.z must be 0 and extent.depth must be 1" + }, { "vuid": "VUID-vkCmdResolveImage-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -17704,50 +24076,6 @@ "vuid": "VUID-VkImageResolve-layerCount-00267", "text": " The layerCount member of srcSubresource and dstSubresource must match" }, - { - "vuid": "VUID-VkImageResolve-srcImage-00268", - "text": " If either of the calling command’s srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of both srcSubresource and dstSubresource must be 0 and 1, respectively" - }, - { - "vuid": "VUID-VkImageResolve-srcOffset-00269", - "text": " srcOffset.x and (extent.width + srcOffset.x) must both be greater than or equal to 0 and less than or equal to the source image subresource width" - }, - { - "vuid": "VUID-VkImageResolve-srcOffset-00270", - "text": " srcOffset.y and (extent.height + srcOffset.y) must both be greater than or equal to 0 and less than or equal to the source image subresource height" - }, - { - "vuid": "VUID-VkImageResolve-srcImage-00271", - "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset.y must be 0 and extent.height must be 1" - }, - { - "vuid": "VUID-VkImageResolve-srcOffset-00272", - "text": " srcOffset.z and (extent.depth + srcOffset.z) must both be greater than or equal to 0 and less than or equal to the source image subresource depth" - }, - { - "vuid": "VUID-VkImageResolve-srcImage-00273", - "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then srcOffset.z must be 0 and extent.depth must be 1" - }, - { - "vuid": "VUID-VkImageResolve-dstOffset-00274", - "text": " dstOffset.x and (extent.width + dstOffset.x) must both be greater than or equal to 0 and less than or equal to the destination image subresource width" - }, - { - "vuid": "VUID-VkImageResolve-dstOffset-00275", - "text": " dstOffset.y and (extent.height + dstOffset.y) must both be greater than or equal to 0 and less than or equal to the destination image subresource height" - }, - { - "vuid": "VUID-VkImageResolve-dstImage-00276", - "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset.y must be 0 and extent.height must be 1" - }, - { - "vuid": "VUID-VkImageResolve-dstOffset-00277", - "text": " dstOffset.z and (extent.depth + dstOffset.z) must both be greater than or equal to 0 and less than or equal to the destination image subresource depth" - }, - { - "vuid": "VUID-VkImageResolve-dstImage-00278", - "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then dstOffset.z must be 0 and extent.depth must be 1" - }, { "vuid": "VUID-VkImageResolve-srcSubresource-parameter", "text": " srcSubresource must be a valid VkImageSubresourceLayers structure" @@ -17758,8 +24086,110 @@ } ] }, + "vkCmdWriteBufferMarker2AMD": { + "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03929", + "text": " If the geometry shaders feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03930", + "text": " If the tessellation shaders feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR or VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-synchronization2-03893", + "text": " The synchronization2 feature must be enabled" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03894", + "text": " stage must include only a single pipeline stage" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03895", + "text": " stage must include only stages that are valid for the queue family that was used to create the command pool that commandBuffer was allocated from" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-dstOffset-03896", + "text": " dstOffset must be less than or equal to the size of dstBuffer minus 4." + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-dstBuffer-03897", + "text": " dstBuffer must have been created with the VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-dstBuffer-03898", + "text": " If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-dstOffset-03899", + "text": " dstOffset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-parameter", + "text": " stage must be a valid combination of VkPipelineStageFlagBits2KHR values" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-requiredbitmask", + "text": " stage must not be 0" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-dstBuffer-parameter", + "text": " dstBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-commonparent", + "text": " Both of commandBuffer, and dstBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2)+(VK_EXT_conditional_rendering)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03931", + "text": " If the conditional rendering feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT" + } + ], + "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03932", + "text": " If the fragment density map feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + } + ], + "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2)+(VK_EXT_transform_feedback)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03933", + "text": " If the transform feedback feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT" + } + ], + "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2)+(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03934", + "text": " If the mesh shaders feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03935", + "text": " If the task shaders feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV" + } + ], + "(VK_AMD_buffer_marker)+(VK_KHR_synchronization2)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-03936", + "text": " If the shading rate image feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV" + } + ] + }, "vkCmdWriteBufferMarkerAMD": { - "core": [ + "(VK_AMD_buffer_marker)": [ { "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04074", "text": " pipelineStage must be a valid stage for the queue family that was used to create the command pool that commandBuffer was allocated from" @@ -17787,39 +24217,7 @@ { "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstOffset-01801", "text": " dstOffset must be a multiple of 4" - } - ], - "(VK_EXT_conditional_rendering)": [ - { - "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04077", - "text": " If the conditional rendering feature is not enabled, pipelineStage must not be VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT" - } - ], - "(VK_EXT_fragment_density_map)": [ - { - "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04078", - "text": " If the fragment density map feature is not enabled, pipelineStage must not be VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT" - } - ], - "(VK_EXT_transform_feedback)": [ - { - "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04079", - "text": " If the transform feedback feature is not enabled, pipelineStage must not be VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT" - } - ], - "(VK_NV_mesh_shader)": [ - { - "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04080", - "text": " If the mesh shaders feature is not enabled, pipelineStage must not be VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV or VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV" - } - ], - "(VK_NV_shading_rate_image)": [ - { - "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04081", - "text": " If the shading rate image feature is not enabled, pipelineStage must not be VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV" - } - ], - "(VK_AMD_buffer_marker)": [ + }, { "vuid": "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -17844,6 +24242,268 @@ "vuid": "VUID-vkCmdWriteBufferMarkerAMD-commonparent", "text": " Both of commandBuffer, and dstBuffer must have been created, allocated, or retrieved from the same VkDevice" } + ], + "(VK_AMD_buffer_marker)+(VK_EXT_conditional_rendering)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04077", + "text": " If the conditional rendering feature is not enabled, pipelineStage must not be VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT" + } + ], + "(VK_AMD_buffer_marker)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04078", + "text": " If the fragment density map feature is not enabled, pipelineStage must not be VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT" + } + ], + "(VK_AMD_buffer_marker)+(VK_EXT_transform_feedback)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04079", + "text": " If the transform feedback feature is not enabled, pipelineStage must not be VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT" + } + ], + "(VK_AMD_buffer_marker)+(VK_NV_mesh_shader)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04080", + "text": " If the mesh shaders feature is not enabled, pipelineStage must not be VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV or VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV" + } + ], + "(VK_AMD_buffer_marker)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-04081", + "text": " If the shading rate image feature is not enabled, pipelineStage must not be VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV" + } + ] + }, + "vkCmdResolveImage2KHR": { + "(VK_KHR_copy_commands2)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdResolveImage2KHR-commandBuffer-01837", + "text": " If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdResolveImage2KHR-commandBuffer-01838", + "text": " If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdResolveImage2KHR-commandBuffer-01839", + "text": " If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image" + } + ], + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-vkCmdResolveImage2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdResolveImage2KHR-pResolveImageInfo-parameter", + "text": " pResolveImageInfo must be a valid pointer to a valid VkResolveImageInfo2KHR structure" + }, + { + "vuid": "VUID-vkCmdResolveImage2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdResolveImage2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdResolveImage2KHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, + "VkResolveImageInfo2KHR": { + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkResolveImageInfo2KHR-pRegions-00255", + "text": " The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImage-00256", + "text": " If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImage-00257", + "text": " srcImage must have a sample count equal to any valid sample count value other than VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImage-00258", + "text": " If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImage-00259", + "text": " dstImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImageLayout-00260", + "text": " srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImageLayout-00262", + "text": " dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImage-02003", + "text": " The format features of dstImage must contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImage-01386", + "text": " srcImage and dstImage must have been created with the same image format" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcSubresource-01709", + "text": " The srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstSubresource-01710", + "text": " The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcSubresource-01711", + "text": " The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstSubresource-01712", + "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImage-04446", + "text": " If either srcImage or dstImage are of type VK_IMAGE_TYPE_3D, then for each element of pRegions, srcSubresource.baseArrayLayer must be 0 and srcSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImage-04447", + "text": " If either srcImage or dstImage are of type VK_IMAGE_TYPE_3D, then for each element of pRegions, dstSubresource.baseArrayLayer must be 0 and dstSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcOffset-00269", + "text": " For each element of pRegions, srcOffset.x and (extent.width + srcOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcOffset-00270", + "text": " For each element of pRegions, srcOffset.y and (extent.height + srcOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImage-00271", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, srcOffset.y must be 0 and extent.height must be 1" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcOffset-00272", + "text": " For each element of pRegions, srcOffset.z and (extent.depth + srcOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified srcSubresource of srcImage" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImage-00273", + "text": " If srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, srcOffset.z must be 0 and extent.depth must be 1" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstOffset-00274", + "text": " For each element of pRegions, dstOffset.x and (extent.width + dstOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstOffset-00275", + "text": " For each element of pRegions, dstOffset.y and (extent.height + dstOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImage-00276", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, dstOffset.y must be 0 and extent.height must be 1" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstOffset-00277", + "text": " For each element of pRegions, dstOffset.z and (extent.depth + dstOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified dstSubresource of dstImage" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImage-00278", + "text": " If dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, dstOffset.z must be 0 and extent.depth must be 1" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImage-parameter", + "text": " srcImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImageLayout-parameter", + "text": " srcImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImage-parameter", + "text": " dstImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImageLayout-parameter", + "text": " dstImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkImageResolve2KHR structures" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-commonparent", + "text": " Both of dstImage, and srcImage must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_KHR_copy_commands2)+!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImageLayout-00261", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImageLayout-00263", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_copy_commands2)+(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkResolveImageInfo2KHR-srcImageLayout-01400", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImageLayout-01401", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_copy_commands2)+(VK_EXT_fragment_density_map)": [ + { + "vuid": "VUID-VkResolveImageInfo2KHR-dstImage-02546", + "text": " dstImage and srcImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT" + } + ] + }, + "VkImageResolve2KHR": { + "(VK_KHR_copy_commands2)": [ + { + "vuid": "VUID-VkImageResolve2KHR-aspectMask-00266", + "text": " The aspectMask member of srcSubresource and dstSubresource must only contain VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkImageResolve2KHR-layerCount-00267", + "text": " The layerCount member of srcSubresource and dstSubresource must match" + }, + { + "vuid": "VUID-VkImageResolve2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR" + }, + { + "vuid": "VUID-VkImageResolve2KHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkImageResolve2KHR-srcSubresource-parameter", + "text": " srcSubresource must be a valid VkImageSubresourceLayers structure" + }, + { + "vuid": "VUID-VkImageResolve2KHR-dstSubresource-parameter", + "text": " dstSubresource must be a valid VkImageSubresourceLayers structure" + } ] }, "VkPipelineInputAssemblyStateCreateInfo": { @@ -17876,6 +24536,32 @@ "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-parameter", "text": " topology must be a valid VkPrimitiveTopology value" } + ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-triangleFans-04452", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::triangleFans is VK_FALSE, topology must not be VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN." + } + ] + }, + "vkCmdSetPrimitiveRestartEnableEXT": { + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdSetPrimitiveRestartEnableEXT-None-04866", + "text": " The extendedDynamicState2 feature must be enabled" + }, + { + "vuid": "VUID-vkCmdSetPrimitiveRestartEnableEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetPrimitiveRestartEnableEXT-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetPrimitiveRestartEnableEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } ] }, "vkCmdSetPrimitiveTopologyEXT": { @@ -17945,7 +24631,7 @@ "text": " Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice" } ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ { "vuid": "VUID-vkCmdBindIndexBuffer-indexType-02507", "text": " indexType must not be VK_INDEX_TYPE_NONE_KHR" @@ -17961,8 +24647,12 @@ "vkCmdDraw": { "core": [ { - "vuid": "VUID-vkCmdDraw-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDraw-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDraw-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDraw-None-02691", @@ -17986,7 +24676,7 @@ }, { "vuid": "VUID-vkCmdDraw-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDraw-None-02859", @@ -18014,7 +24704,11 @@ }, { "vuid": "VUID-vkCmdDraw-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDraw-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDraw-renderPass-02684", @@ -18029,8 +24723,16 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdDraw-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdDraw-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdDraw-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdDraw-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdDraw-None-04007", @@ -18100,7 +24802,37 @@ }, { "vuid": "VUID-vkCmdDraw-commandBuffer-02713", - "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" + "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point used by this command must not write to any resource" + }, + { + "vuid": "VUID-vkCmdDraw-commandBuffer-04617", + "text": " If any of the shader stages of the VkPipeline bound to the pipeline bind point used by this command uses the RayQueryKHR capability, then commandBuffer must not be a protected command buffer" + } + ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDraw-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDraw-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDraw-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDraw-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDraw-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDraw-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." } ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ @@ -18118,27 +24850,113 @@ "(VK_EXT_extended_dynamic_state)": [ { "vuid": "VUID-vkCmdDraw-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" }, { "vuid": "VUID-vkCmdDraw-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" }, { "vuid": "VUID-vkCmdDraw-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" }, { "vuid": "VUID-vkCmdDraw-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdDraw-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDraw-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdDraw-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDraw-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdDraw-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdDraw-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdDraw-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDraw-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDraw-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDraw-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdDraw-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDraw-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDraw-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdDraw-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDraw-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDraw-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" } ] }, "vkCmdDrawIndexed": { "core": [ { - "vuid": "VUID-vkCmdDrawIndexed-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDrawIndexed-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDrawIndexed-None-02691", @@ -18162,7 +24980,7 @@ }, { "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDrawIndexed-None-02859", @@ -18190,7 +25008,11 @@ }, { "vuid": "VUID-vkCmdDrawIndexed-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDrawIndexed-renderPass-02684", @@ -18205,8 +25027,16 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdDrawIndexed-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdDrawIndexed-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdDrawIndexed-None-04007", @@ -18280,7 +25110,37 @@ }, { "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-02713", - "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" + "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point used by this command must not write to any resource" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-04617", + "text": " If any of the shader stages of the VkPipeline bound to the pipeline bind point used by this command uses the RayQueryKHR capability, then commandBuffer must not be a protected command buffer" + } + ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." } ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ @@ -18298,27 +25158,113 @@ "(VK_EXT_extended_dynamic_state)": [ { "vuid": "VUID-vkCmdDrawIndexed-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" }, { "vuid": "VUID-vkCmdDrawIndexed-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" }, { "vuid": "VUID-vkCmdDrawIndexed-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" }, { "vuid": "VUID-vkCmdDrawIndexed-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" } ] }, "vkCmdDrawIndirect": { "core": [ { - "vuid": "VUID-vkCmdDrawIndirect-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDrawIndirect-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDrawIndirect-None-02691", @@ -18342,7 +25288,7 @@ }, { "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDrawIndirect-None-02859", @@ -18370,7 +25316,11 @@ }, { "vuid": "VUID-vkCmdDrawIndirect-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDrawIndirect-renderPass-02684", @@ -18385,8 +25335,16 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdDrawIndirect-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdDrawIndirect-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdDrawIndirect-None-04007", @@ -18499,6 +25457,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawIndirect-maxMultiviewInstanceIndex-02688", @@ -18514,19 +25498,101 @@ "(VK_EXT_extended_dynamic_state)": [ { "vuid": "VUID-vkCmdDrawIndirect-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" }, { "vuid": "VUID-vkCmdDrawIndirect-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" }, { "vuid": "VUID-vkCmdDrawIndirect-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" }, { "vuid": "VUID-vkCmdDrawIndirect-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" } ] }, @@ -18543,10 +25609,14 @@ ] }, "vkCmdDrawIndirectCount": { - "core": [ + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)": [ { - "vuid": "VUID-vkCmdDrawIndirectCount-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDrawIndirectCount-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-02691", @@ -18570,7 +25640,7 @@ }, { "vuid": "VUID-vkCmdDrawIndirectCount-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-02859", @@ -18598,7 +25668,11 @@ }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDrawIndirectCount-renderPass-02684", @@ -18613,8 +25687,16 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdDrawIndirectCount-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdDrawIndirectCount-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-04007", @@ -18656,6 +25738,10 @@ "vuid": "VUID-vkCmdDrawIndirectCount-countBuffer-02717", "text": " The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount" }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-countBufferOffset-04129", + "text": " (countBufferOffset + sizeof(uint32_t)) must be less than or equal to the size of countBuffer" + }, { "vuid": "VUID-vkCmdDrawIndirectCount-stride-03110", "text": " stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand)" @@ -18671,83 +25757,7 @@ { "vuid": "VUID-vkCmdDrawIndirectCount-countBuffer-03122", "text": " If the count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawIndirectCount-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawIndirectCount-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawIndirectCount-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" }, - { - "vuid": "VUID-vkCmdDrawIndirectCount-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdDrawIndirectCount-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdDrawIndirectCount-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" - }, - { - "vuid": "VUID-vkCmdDrawIndirectCount-commandBuffer-02711", - "text": " commandBuffer must not be a protected command buffer" - } - ], - "(VK_VERSION_1_1,VK_KHR_multiview)": [ - { - "vuid": "VUID-vkCmdDrawIndirectCount-maxMultiviewInstanceIndex-02688", - "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" - } - ], - "(VK_EXT_sample_locations)": [ - { - "vuid": "VUID-vkCmdDrawIndirectCount-sampleLocationsEnable-02689", - "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" - } - ], - "(VK_EXT_extended_dynamic_state)": [ - { - "vuid": "VUID-vkCmdDrawIndirectCount-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawIndirectCount-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawIndirectCount-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" - }, - { - "vuid": "VUID-vkCmdDrawIndirectCount-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" - } - ], - "(VK_VERSION_1_2)": [ - { - "vuid": "VUID-vkCmdDrawIndirectCount-None-02836", - "text": " If drawIndirectCount is not enabled this function must not be used" - } - ], - "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)": [ { "vuid": "VUID-vkCmdDrawIndirectCount-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -18776,13 +25786,199 @@ "vuid": "VUID-vkCmdDrawIndirectCount-commonparent", "text": " Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice" } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-commandBuffer-02711", + "text": " commandBuffer must not be a protected command buffer" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-maxMultiviewInstanceIndex-02688", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-sampleLocationsEnable-02689", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-viewportCount-03417", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-scissorCount-03418", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-viewportCount-03419", + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-primitiveTopology-03420", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_VERSION_1_2)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-04445", + "text": " If drawIndirectCount is not enabled this function must not be used" + } ] }, "vkCmdDrawIndexedIndirect": { "core": [ { - "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDrawIndexedIndirect-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02691", @@ -18806,7 +26002,7 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02859", @@ -18834,7 +26030,11 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-renderPass-02684", @@ -18849,8 +26049,16 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04007", @@ -18963,6 +26171,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawIndexedIndirect-maxMultiviewInstanceIndex-02688", @@ -18978,25 +26212,101 @@ "(VK_EXT_extended_dynamic_state)": [ { "vuid": "VUID-vkCmdDrawIndexedIndirect-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" } ], - "(VK_VERSION_1_2)": [ + "(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ { - "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02837", - "text": " If drawIndirectCount is not enabled this function must not be used" + "vuid": "VUID-vkCmdDrawIndexedIndirect-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" } ] }, @@ -19017,10 +26327,14 @@ ] }, "vkCmdDrawIndexedIndirectCount": { - "core": [ + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)": [ { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-02691", @@ -19044,7 +26358,7 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-02859", @@ -19072,7 +26386,11 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-renderPass-02684", @@ -19087,8 +26405,16 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04007", @@ -19130,6 +26456,10 @@ "vuid": "VUID-vkCmdDrawIndexedIndirectCount-countBuffer-02717", "text": " The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount" }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-countBufferOffset-04129", + "text": " (countBufferOffset + sizeof(uint32_t)) must be less than or equal to the size of countBuffer" + }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-stride-03142", "text": " stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndexedIndirectCommand)" @@ -19145,77 +26475,7 @@ { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-countBuffer-03154", "text": " If count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" }, - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" - }, - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-commandBuffer-02711", - "text": " commandBuffer must not be a protected command buffer" - } - ], - "(VK_VERSION_1_1,VK_KHR_multiview)": [ - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-maxMultiviewInstanceIndex-02688", - "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" - } - ], - "(VK_EXT_sample_locations)": [ - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-sampleLocationsEnable-02689", - "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" - } - ], - "(VK_EXT_extended_dynamic_state)": [ - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" - }, - { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" - } - ], - "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)": [ { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -19244,13 +26504,199 @@ "vuid": "VUID-vkCmdDrawIndexedIndirectCount-commonparent", "text": " Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice" } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-commandBuffer-02711", + "text": " commandBuffer must not be a protected command buffer" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-maxMultiviewInstanceIndex-02688", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-sampleLocationsEnable-02689", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-viewportCount-03417", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-scissorCount-03418", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-viewportCount-03419", + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-primitiveTopology-03420", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + } + ], + "(VK_VERSION_1_2,VK_KHR_draw_indirect_count)+(VK_VERSION_1_2)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04445", + "text": " If drawIndirectCount is not enabled this function must not be used" + } ] }, "vkCmdDrawIndirectByteCountEXT": { - "core": [ + "(VK_EXT_transform_feedback)": [ { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02691", @@ -19274,7 +26720,7 @@ }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02859", @@ -19302,7 +26748,11 @@ }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-renderPass-02684", @@ -19317,8 +26767,16 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04007", @@ -19344,80 +26802,18 @@ "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-vertexStride-02289", "text": " vertexStride must be greater than 0 and less than or equal to VkPhysicalDeviceLimits::maxTransformFeedbackBufferDataStride" }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-04567", + "text": " If counterBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-02290", "text": " counterBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" }, { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-counterBufferOffset-04568", + "text": " counterBufferOffset must be a multiple of 4" }, - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02646", - "text": " commandBuffer must not be a protected command buffer" - } - ], - "(VK_VERSION_1_1,VK_KHR_multiview)": [ - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-maxMultiviewInstanceIndex-02688", - "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" - } - ], - "(VK_EXT_sample_locations)": [ - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-sampleLocationsEnable-02689", - "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" - } - ], - "(VK_EXT_extended_dynamic_state)": [ - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" - }, - { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" - } - ], - "(VK_EXT_transform_feedback)": [ { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -19442,6 +26838,182 @@ "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commonparent", "text": " Both of commandBuffer, and counterBuffer must have been created, allocated, or retrieved from the same VkDevice" } + ], + "(VK_EXT_transform_feedback)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_EXT_transform_feedback)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_EXT_transform_feedback)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_EXT_transform_feedback)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_EXT_transform_feedback)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02646", + "text": " commandBuffer must not be a protected command buffer" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], + "(VK_EXT_transform_feedback)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-maxMultiviewInstanceIndex-02688", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-sampleLocationsEnable-02689", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-viewportCount-03417", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-scissorCount-03418", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-viewportCount-03419", + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-primitiveTopology-03420", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_EXT_transform_feedback)+(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_EXT_transform_feedback)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + } ] }, "vkCmdBeginConditionalRenderingEXT": { @@ -19533,10 +27105,14 @@ ] }, "vkCmdDrawMeshTasksNV": { - "core": [ + "(VK_NV_mesh_shader)": [ { - "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDrawMeshTasksNV-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02691", @@ -19560,7 +27136,7 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02859", @@ -19588,7 +27164,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-renderPass-02684", @@ -19603,79 +27183,21 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-taskCount-02119", "text": " taskCount must be less than or equal to VkPhysicalDeviceMeshShaderPropertiesNV::maxDrawMeshTasksCount" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" }, - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" - } - ], - "(VK_VERSION_1_1,VK_KHR_multiview)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-maxMultiviewInstanceIndex-02688", - "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" - } - ], - "(VK_EXT_sample_locations)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-sampleLocationsEnable-02689", - "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" - } - ], - "(VK_EXT_extended_dynamic_state)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksNV-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" - } - ], - "(VK_NV_mesh_shader)": [ { "vuid": "VUID-vkCmdDrawMeshTasksNV-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -19692,13 +27214,189 @@ "vuid": "VUID-vkCmdDrawMeshTasksNV-renderpass", "text": " This command must only be called inside of a render pass instance" } + ], + "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_NV_mesh_shader)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_NV_mesh_shader)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], + "(VK_NV_mesh_shader)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-maxMultiviewInstanceIndex-02688", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-sampleLocationsEnable-02689", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-viewportCount-03417", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-scissorCount-03418", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-viewportCount-03419", + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-primitiveTopology-03420", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_NV_mesh_shader)+(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + } ] }, "vkCmdDrawMeshTasksIndirectNV": { - "core": [ + "(VK_NV_mesh_shader)": [ { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02691", @@ -19722,7 +27420,7 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02859", @@ -19750,7 +27448,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02684", @@ -19765,8 +27467,16 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02708", @@ -19799,77 +27509,7 @@ { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02157", "text": " If drawCount is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" }, - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02711", - "text": " commandBuffer must not be a protected command buffer" - } - ], - "(VK_VERSION_1_1,VK_KHR_multiview)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-maxMultiviewInstanceIndex-02688", - "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" - } - ], - "(VK_EXT_sample_locations)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-sampleLocationsEnable-02689", - "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" - } - ], - "(VK_EXT_extended_dynamic_state)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" - } - ], - "(VK_NV_mesh_shader)": [ { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -19894,6 +27534,182 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commonparent", "text": " Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice" } + ], + "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_NV_mesh_shader)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_NV_mesh_shader)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02711", + "text": " commandBuffer must not be a protected command buffer" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], + "(VK_NV_mesh_shader)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-maxMultiviewInstanceIndex-02688", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-sampleLocationsEnable-02689", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-viewportCount-03417", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-scissorCount-03418", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-viewportCount-03419", + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-primitiveTopology-03420", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_NV_mesh_shader)+(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + } ] }, "VkDrawMeshTasksIndirectCommandNV": { @@ -19905,10 +27721,14 @@ ] }, "vkCmdDrawMeshTasksIndirectCountNV": { - "core": [ + "(VK_NV_mesh_shader)": [ { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02691", @@ -19932,7 +27752,7 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02859", @@ -19960,7 +27780,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02684", @@ -19975,8 +27799,16 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02708", @@ -20006,6 +27838,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02717", "text": " The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-04129", + "text": " (countBufferOffset + sizeof(uint32_t)) must be less than or equal to the size of countBuffer" + }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-stride-02182", "text": " stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawMeshTasksIndirectCommandNV)" @@ -20021,77 +27857,7 @@ { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02192", "text": " If the count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" }, - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02711", - "text": " commandBuffer must not be a protected command buffer" - } - ], - "(VK_VERSION_1_1,VK_KHR_multiview)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-maxMultiviewInstanceIndex-02688", - "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" - } - ], - "(VK_EXT_sample_locations)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-sampleLocationsEnable-02689", - "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" - } - ], - "(VK_EXT_extended_dynamic_state)": [ - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" - }, - { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" - } - ], - "(VK_NV_mesh_shader)": [ { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -20120,6 +27886,188 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent", "text": " Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice" } + ], + "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_NV_mesh_shader)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_NV_mesh_shader)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02711", + "text": " commandBuffer must not be a protected command buffer" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], + "(VK_NV_mesh_shader)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-maxMultiviewInstanceIndex-02688", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-sampleLocationsEnable-02689", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-viewportCount-03417", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-scissorCount-03418", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-viewportCount-03419", + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-primitiveTopology-03420", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_NV_mesh_shader)+(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_NV_mesh_shader)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + } + ], + "(VK_NV_mesh_shader)+(VK_VERSION_1_2)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04445", + "text": " If drawIndirectCount is not enabled this function must not be used" + } ] }, "VkPipelineVertexInputStateCreateInfo": { @@ -20184,6 +28132,12 @@ "vuid": "VUID-VkVertexInputBindingDescription-inputRate-parameter", "text": " inputRate must be a valid VkVertexInputRate value" } + ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkVertexInputBindingDescription-stride-04456", + "text": " If the [VK_KHR_portability_subset] extension is enabled, stride must be a multiple of, and at least as large as, VkPhysicalDevicePortabilitySubsetPropertiesKHR::minVertexInputBindingStrideAlignment." + } ] }, "VkVertexInputAttributeDescription": { @@ -20208,6 +28162,130 @@ "vuid": "VUID-VkVertexInputAttributeDescription-format-parameter", "text": " format must be a valid VkFormat value" } + ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkVertexInputAttributeDescription-vertexAttributeAccessBeyondStride-04457", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::vertexAttributeAccessBeyondStride is VK_FALSE, the sum of offset plus the size of the vertex attribute data described by format must not be greater than stride in the VkVertexInputBindingDescription referenced in binding." + } + ] + }, + "vkCmdSetVertexInputEXT": { + "(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdSetVertexInputEXT-None-04790", + "text": " The vertexInputDynamicState feature must be enabled" + }, + { + "vuid": "VUID-vkCmdSetVertexInputEXT-vertexBindingDescriptionCount-04791", + "text": " vertexBindingDescriptionCount must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings" + }, + { + "vuid": "VUID-vkCmdSetVertexInputEXT-vertexAttributeDescriptionCount-04792", + "text": " vertexAttributeDescriptionCount must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputAttributes" + }, + { + "vuid": "VUID-vkCmdSetVertexInputEXT-binding-04793", + "text": " For every binding specified by each element of pVertexAttributeDescriptions, a VkVertexInputBindingDescription2EXT must exist in pVertexBindingDescriptions with the same value of binding" + }, + { + "vuid": "VUID-vkCmdSetVertexInputEXT-pVertexBindingDescriptions-04794", + "text": " All elements of pVertexBindingDescriptions must describe distinct binding numbers" + }, + { + "vuid": "VUID-vkCmdSetVertexInputEXT-pVertexAttributeDescriptions-04795", + "text": " All elements of pVertexAttributeDescriptions must describe distinct attribute locations" + }, + { + "vuid": "VUID-vkCmdSetVertexInputEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetVertexInputEXT-pVertexBindingDescriptions-parameter", + "text": " If vertexBindingDescriptionCount is not 0, pVertexBindingDescriptions must be a valid pointer to an array of vertexBindingDescriptionCount valid VkVertexInputBindingDescription2EXT structures" + }, + { + "vuid": "VUID-vkCmdSetVertexInputEXT-pVertexAttributeDescriptions-parameter", + "text": " If vertexAttributeDescriptionCount is not 0, pVertexAttributeDescriptions must be a valid pointer to an array of vertexAttributeDescriptionCount valid VkVertexInputAttributeDescription2EXT structures" + }, + { + "vuid": "VUID-vkCmdSetVertexInputEXT-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetVertexInputEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, + "VkVertexInputBindingDescription2EXT": { + "(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-VkVertexInputBindingDescription2EXT-binding-04796", + "text": " binding must be less than VkPhysicalDeviceLimits::maxVertexInputBindings" + }, + { + "vuid": "VUID-VkVertexInputBindingDescription2EXT-stride-04797", + "text": " stride must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputBindingStride" + }, + { + "vuid": "VUID-VkVertexInputBindingDescription2EXT-divisor-04798", + "text": " If the vertexAttributeInstanceRateZeroDivisor feature is not enabled, divisor must not be 0" + }, + { + "vuid": "VUID-VkVertexInputBindingDescription2EXT-divisor-04799", + "text": " If the vertexAttributeInstanceRateDivisor feature is not enabled, divisor must be 1" + }, + { + "vuid": "VUID-VkVertexInputBindingDescription2EXT-divisor-04800", + "text": " divisor must be a value between 0 and VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::maxVertexAttribDivisor, inclusive" + }, + { + "vuid": "VUID-VkVertexInputBindingDescription2EXT-divisor-04801", + "text": " If divisor is not 1 then inputRate must be of type VK_VERTEX_INPUT_RATE_INSTANCE" + }, + { + "vuid": "VUID-VkVertexInputBindingDescription2EXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT" + }, + { + "vuid": "VUID-VkVertexInputBindingDescription2EXT-inputRate-parameter", + "text": " inputRate must be a valid VkVertexInputRate value" + } + ] + }, + "VkVertexInputAttributeDescription2EXT": { + "(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-VkVertexInputAttributeDescription2EXT-location-04802", + "text": " location must be less than VkPhysicalDeviceLimits::maxVertexInputAttributes" + }, + { + "vuid": "VUID-VkVertexInputAttributeDescription2EXT-binding-04803", + "text": " binding must be less than VkPhysicalDeviceLimits::maxVertexInputBindings" + }, + { + "vuid": "VUID-VkVertexInputAttributeDescription2EXT-offset-04804", + "text": " offset must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputAttributeOffset" + }, + { + "vuid": "VUID-VkVertexInputAttributeDescription2EXT-format-04805", + "text": " format must be allowed as a vertex buffer format, as specified by the VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-VkVertexInputAttributeDescription2EXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT" + }, + { + "vuid": "VUID-VkVertexInputAttributeDescription2EXT-format-parameter", + "text": " format must be a valid VkFormat value" + } + ], + "(VK_EXT_vertex_input_dynamic_state)+(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkVertexInputAttributeDescription2EXT-vertexAttributeAccessBeyondStride-04806", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::vertexAttributeAccessBeyondStride is VK_FALSE, the sum of offset plus the size of the vertex attribute data described by format must not be greater than stride in the VkVertexInputBindingDescription2EXT referenced in binding." + } ] }, "vkCmdBindVertexBuffers": { @@ -20302,17 +28380,13 @@ "vuid": "VUID-vkCmdBindVertexBuffers2EXT-pBuffers-04111", "text": " If the nullDescriptor feature is not enabled, all elements of pBuffers must not be VK_NULL_HANDLE" }, - { - "vuid": "VUID-vkCmdBindVertexBuffers2EXT-pStrides-03361", - "text": " If the bound pipeline state object was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled then pStrides must not be NULL, otherwise pStrides must be NULL" - }, { "vuid": "VUID-vkCmdBindVertexBuffers2EXT-pStrides-03362", "text": " If pStrides is not NULL each element of pStrides must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputBindingStride" }, { "vuid": "VUID-vkCmdBindVertexBuffers2EXT-pStrides-03363", - "text": " If pStrides is not NULL each element of pStrides must be greater than or equal to the maximum extent of of all vertex input attributes fetched from the corresponding binding, where the extent is calculated as the VkVertexInputAttributeDescription::offset plus VkVertexInputAttributeDescription::format size" + "text": " If pStrides is not NULL each element of pStrides must be greater than or equal to the maximum extent of all vertex input attributes fetched from the corresponding binding, where the extent is calculated as the VkVertexInputAttributeDescription::offset plus VkVertexInputAttributeDescription::format size" }, { "vuid": "VUID-vkCmdBindVertexBuffers2EXT-commandBuffer-parameter", @@ -20466,11 +28540,11 @@ }, { "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pSizes-02362", - "text": " All elements of pSizes must be less than or equal to the size of the corresponding buffer in pBuffers" + "text": " All elements of pSizes must be either VK_WHOLE_SIZE, or less than or equal to the size of the corresponding buffer in pBuffers" }, { "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02363", - "text": " All elements of pOffsets plus pSizes, where the pSizes, element is not VK_WHOLE_SIZE, must be less than or equal to the size of the corresponding element in pBuffers" + "text": " All elements of pOffsets plus pSizes, where the pSizes, element is not VK_WHOLE_SIZE, must be less than or equal to the size of the corresponding buffer in pBuffers" }, { "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-02364", @@ -20544,6 +28618,10 @@ "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-pCounterBuffers-02372", "text": " For each buffer handle in the pCounterBuffers array that is not VK_NULL_HANDLE it must have been created with a usage value containing VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT" }, + { + "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-None-04128", + "text": " The last vertex processing stage of the bound graphics pipeline must have been declared with the Xfb execution mode" + }, { "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -20640,7 +28718,7 @@ "(VK_NV_viewport_swizzle)": [ { "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-01215", - "text": " viewportCount must match the viewportCount set in VkPipelineViewportStateCreateInfo" + "text": " viewportCount must be greater than or equal to the viewportCount set in VkPipelineViewportStateCreateInfo" }, { "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-sType-sType", @@ -20680,6 +28758,22 @@ } ] }, + "VkPipelineRasterizationProvokingVertexStateCreateInfoEXT": { + "(VK_EXT_provoking_vertex)": [ + { + "vuid": "VUID-VkPipelineRasterizationProvokingVertexStateCreateInfoEXT-provokingVertexMode-04883", + "text": " If provokingVertexMode is VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT, then the provokingVertexLast feature must be enabled" + }, + { + "vuid": "VUID-VkPipelineRasterizationProvokingVertexStateCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkPipelineRasterizationProvokingVertexStateCreateInfoEXT-provokingVertexMode-parameter", + "text": " provokingVertexMode must be a valid VkProvokingVertexModeEXT value" + } + ] + }, "VkPipelineViewportWScalingStateCreateInfoNV": { "(VK_NV_clip_space_w_scaling)": [ { @@ -20694,10 +28788,6 @@ }, "vkCmdSetViewportWScalingNV": { "(VK_NV_clip_space_w_scaling)": [ - { - "vuid": "VUID-vkCmdSetViewportWScalingNV-firstViewport-01323", - "text": " firstViewport must be less than VkPhysicalDeviceLimits::maxViewports" - }, { "vuid": "VUID-vkCmdSetViewportWScalingNV-firstViewport-01324", "text": " The sum of firstViewport and viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" @@ -20728,23 +28818,19 @@ "core": [ { "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", - "text": " If the multiple viewports feature is not enabled, viewportCount must be 1" + "text": " If the multiple viewports feature is not enabled, viewportCount must not be greater than 1" }, { "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217", - "text": " If the multiple viewports feature is not enabled, scissorCount must be 1" + "text": " If the multiple viewports feature is not enabled, scissorCount must not be greater than 1" }, { "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218", - "text": " viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" + "text": " viewportCount must be less than or equal to VkPhysicalDeviceLimits::maxViewports" }, { "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219", - "text": " scissorCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" - }, - { - "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220", - "text": " scissorCount and viewportCount must be identical" + "text": " scissorCount must be less than or equal to VkPhysicalDeviceLimits::maxViewports" }, { "vuid": "VUID-VkPipelineViewportStateCreateInfo-x-02821", @@ -20773,6 +28859,12 @@ { "vuid": "VUID-VkPipelineViewportStateCreateInfo-flags-zerobitmask", "text": " flags must be 0" + } + ], + "!(VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220", + "text": " scissorCount and viewportCount must be identical" }, { "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength", @@ -20783,10 +28875,24 @@ "text": " scissorCount must be greater than 0" } ], + "(VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-04134", + "text": " If the graphics pipeline is being created without VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT set then scissorCount and viewportCount must be identical" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportCount-04135", + "text": " If the graphics pipeline is being created with VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT set then viewportCount must be 0, otherwise it must be greater than 0" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-04136", + "text": " If the graphics pipeline is being created with VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT set then scissorCount must be 0, otherwise it must be greater than 0" + } + ], "(VK_NV_clip_space_w_scaling)": [ { "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportWScalingEnable-01726", - "text": " If the viewportWScalingEnable member of a VkPipelineViewportWScalingStateCreateInfoNV structure included in the pNext chain is VK_TRUE, the viewportCount member of the VkPipelineViewportWScalingStateCreateInfoNV structure must be equal to viewportCount" + "text": " If the viewportWScalingEnable member of a VkPipelineViewportWScalingStateCreateInfoNV structure included in the pNext chain is VK_TRUE, the viewportCount member of the VkPipelineViewportWScalingStateCreateInfoNV structure must be greater than or equal to VkPipelineViewportStateCreateInfo::viewportCount" } ] }, @@ -20824,6 +28930,12 @@ "vuid": "VUID-vkCmdSetViewportWithCountEXT-viewportCount-arraylength", "text": " viewportCount must be greater than 0" } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_inherited_viewport_scissor)": [ + { + "vuid": "VUID-vkCmdSetViewportWithCountEXT-commandBuffer-04819", + "text": " commandBuffer must not have VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled." + } ] }, "vkCmdSetScissorWithCountEXT": { @@ -20872,14 +28984,16 @@ "vuid": "VUID-vkCmdSetScissorWithCountEXT-scissorCount-arraylength", "text": " scissorCount must be greater than 0" } + ], + "(VK_EXT_extended_dynamic_state)+(VK_NV_inherited_viewport_scissor)": [ + { + "vuid": "VUID-vkCmdSetScissorWithCountEXT-commandBuffer-04820", + "text": " commandBuffer must not have VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled." + } ] }, "vkCmdSetViewport": { "core": [ - { - "vuid": "VUID-vkCmdSetViewport-firstViewport-01222", - "text": " firstViewport must be less than VkPhysicalDeviceLimits::maxViewports" - }, { "vuid": "VUID-vkCmdSetViewport-firstViewport-01223", "text": " The sum of firstViewport and viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" @@ -20912,6 +29026,12 @@ "vuid": "VUID-vkCmdSetViewport-viewportCount-arraylength", "text": " viewportCount must be greater than 0" } + ], + "(VK_NV_inherited_viewport_scissor)": [ + { + "vuid": "VUID-vkCmdSetViewport-commandBuffer-04821", + "text": " commandBuffer must not have VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled." + } ] }, "VkViewport": { @@ -20994,7 +29114,7 @@ }, { "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineRasterizationConservativeStateCreateInfoEXT, VkPipelineRasterizationDepthClipStateCreateInfoEXT, VkPipelineRasterizationLineStateCreateInfoEXT, VkPipelineRasterizationStateRasterizationOrderAMD, or VkPipelineRasterizationStateStreamCreateInfoEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineRasterizationConservativeStateCreateInfoEXT, VkPipelineRasterizationDepthClipStateCreateInfoEXT, VkPipelineRasterizationLineStateCreateInfoEXT, VkPipelineRasterizationProvokingVertexStateCreateInfoEXT, VkPipelineRasterizationStateRasterizationOrderAMD, or VkPipelineRasterizationStateStreamCreateInfoEXT" }, { "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-sType-unique", @@ -21032,6 +29152,12 @@ "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414", "text": " If the VK_NV_fill_rectangle extension is not enabled, polygonMode must not be VK_POLYGON_MODE_FILL_RECTANGLE_NV" } + ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-pointPolygons-04458", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::pointPolygons is VK_FALSE, and rasterizerDiscardEnable is VK_FALSE, polygonMode must not be VK_POLYGON_MODE_POINT." + } ] }, "VkPipelineRasterizationDepthClipStateCreateInfoEXT": { @@ -21088,7 +29214,27 @@ "(VK_NV_framebuffer_mixed_samples)": [ { "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415", - "text": " If the VK_NV_framebuffer_mixed_samples extension is enabled, and if the subpass has any color attachments and rasterizationSamples is greater than the number of color samples, then sampleShadingEnable must be VK_FALSE" + "text": " If the VK_NV_framebuffer_mixed_samples extension is enabled, and if the subpass has any color attachments and rasterizationSamples is greater than the number of color samples, then sampleShadingEnable must be VK_FALSE" + } + ] + }, + "vkCmdSetRasterizerDiscardEnableEXT": { + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdSetRasterizerDiscardEnableEXT-None-04871", + "text": " The extendedDynamicState2 feature must be enabled" + }, + { + "vuid": "VUID-vkCmdSetRasterizerDiscardEnableEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetRasterizerDiscardEnableEXT-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetRasterizerDiscardEnableEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" } ] }, @@ -21154,10 +29300,6 @@ "vuid": "VUID-VkSampleLocationsInfoEXT-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT" }, - { - "vuid": "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter", - "text": " If sampleLocationsPerPixel is not 0, sampleLocationsPerPixel must be a valid VkSampleCountFlagBits value" - }, { "vuid": "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter", "text": " If sampleLocationsCount is not 0, pSampleLocations must be a valid pointer to an array of sampleLocationsCount VkSampleLocationEXT structures" @@ -21192,6 +29334,194 @@ } ] }, + "vkGetPhysicalDeviceFragmentShadingRatesKHR": { + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceFragmentShadingRatesKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceFragmentShadingRatesKHR-pFragmentShadingRateCount-parameter", + "text": " pFragmentShadingRateCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceFragmentShadingRatesKHR-pFragmentShadingRates-parameter", + "text": " If the value referenced by pFragmentShadingRateCount is not 0, and pFragmentShadingRates is not NULL, pFragmentShadingRates must be a valid pointer to an array of pFragmentShadingRateCount VkPhysicalDeviceFragmentShadingRateKHR structures" + } + ] + }, + "VkPhysicalDeviceFragmentShadingRateKHR": { + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-VkPhysicalDeviceFragmentShadingRateKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR" + }, + { + "vuid": "VUID-VkPhysicalDeviceFragmentShadingRateKHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "VkPipelineFragmentShadingRateStateCreateInfoKHR": { + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-VkPipelineFragmentShadingRateStateCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkPipelineFragmentShadingRateStateCreateInfoKHR-combinerOps-parameter", + "text": " Any given element of combinerOps must be a valid VkFragmentShadingRateCombinerOpKHR value" + } + ] + }, + "vkCmdSetFragmentShadingRateKHR": { + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-pipelineFragmentShadingRate-04507", + "text": " If pipelineFragmentShadingRate is not enabled, pFragmentSize->width must be 1" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-pipelineFragmentShadingRate-04508", + "text": " If pipelineFragmentShadingRate is not enabled, pFragmentSize->height must be 1" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-pipelineFragmentShadingRate-04509", + "text": " One of pipelineFragmentShadingRate, primitiveFragmentShadingRate, or attachmentFragmentShadingRate must be enabled" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-primitiveFragmentShadingRate-04510", + "text": " If the primitiveFragmentShadingRate feature is not enabled, combinerOps[0] must be VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-attachmentFragmentShadingRate-04511", + "text": " If the attachmentFragmentShadingRate feature is not enabled, combinerOps[1] must be VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-fragmentSizeNonTrivialCombinerOps-04512", + "text": " If the fragmentSizeNonTrivialCombinerOps limit is not supported, elements of combinerOps must be either VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR or VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04513", + "text": " pFragmentSize->width must be greater than or equal to 1" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04514", + "text": " pFragmentSize->height must be greater than or equal to 1" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04515", + "text": " pFragmentSize->width must be a power-of-two value" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04516", + "text": " pFragmentSize->height must be a power-of-two value" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04517", + "text": " pFragmentSize->width must be less than or equal to 4" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04518", + "text": " pFragmentSize->height must be less than or equal to 4" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-parameter", + "text": " pFragmentSize must be a valid pointer to a valid VkExtent2D structure" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-combinerOps-parameter", + "text": " Any given element of combinerOps must be a valid VkFragmentShadingRateCombinerOpKHR value" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, + "VkPipelineFragmentShadingRateEnumStateCreateInfoNV": { + "(VK_NV_fragment_shading_rate_enums)": [ + { + "vuid": "VUID-VkPipelineFragmentShadingRateEnumStateCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkPipelineFragmentShadingRateEnumStateCreateInfoNV-shadingRateType-parameter", + "text": " shadingRateType must be a valid VkFragmentShadingRateTypeNV value" + }, + { + "vuid": "VUID-VkPipelineFragmentShadingRateEnumStateCreateInfoNV-shadingRate-parameter", + "text": " shadingRate must be a valid VkFragmentShadingRateNV value" + }, + { + "vuid": "VUID-VkPipelineFragmentShadingRateEnumStateCreateInfoNV-combinerOps-parameter", + "text": " Any given element of combinerOps must be a valid VkFragmentShadingRateCombinerOpKHR value" + } + ] + }, + "vkCmdSetFragmentShadingRateEnumNV": { + "(VK_NV_fragment_shading_rate_enums)": [ + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-pipelineFragmentShadingRate-04576", + "text": " If pipelineFragmentShadingRate is not enabled, shadingRate must be VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-supersampleFragmentShadingRates-04577", + "text": " If supersampleFragmentShadingRates is not enabled, shadingRate must not be VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV, VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV, VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV, or VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-noInvocationFragmentShadingRates-04578", + "text": " If noInvocationFragmentShadingRates is not enabled, shadingRate must not be VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-fragmentShadingRateEnums-04579", + "text": " fragmentShadingRateEnums must be enabled" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-pipelineFragmentShadingRate-04580", + "text": " One of pipelineFragmentShadingRate, primitiveFragmentShadingRate, or attachmentFragmentShadingRate must be enabled" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-primitiveFragmentShadingRate-04581", + "text": " If the primitiveFragmentShadingRate feature is not enabled, combinerOps[0] must be VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-attachmentFragmentShadingRate-04582", + "text": " If the attachmentFragmentShadingRate feature is not enabled, combinerOps[1] must be VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-fragmentSizeNonTrivialCombinerOps-04583", + "text": " If the fragmentSizeNonTrivialCombinerOps limit is not supported, elements of combinerOps must be either VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR or VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-shadingRate-parameter", + "text": " shadingRate must be a valid VkFragmentShadingRateNV value" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-combinerOps-parameter", + "text": " Any given element of combinerOps must be a valid VkFragmentShadingRateCombinerOpKHR value" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, "VkPipelineViewportShadingRateImageStateCreateInfoNV": { "(VK_NV_shading_rate_image)": [ { @@ -21204,15 +29534,11 @@ }, { "vuid": "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-shadingRateImageEnable-02056", - "text": " If shadingRateImageEnable is VK_TRUE, viewportCount must be equal to the viewportCount member of VkPipelineViewportStateCreateInfo" + "text": " If shadingRateImageEnable is VK_TRUE, viewportCount must be greater or equal to the viewportCount member of VkPipelineViewportStateCreateInfo" }, { "vuid": "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV" - }, - { - "vuid": "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-arraylength", - "text": " viewportCount must be greater than 0" } ] }, @@ -21274,10 +29600,6 @@ "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-None-02064", "text": " The shading rate image feature must be enabled" }, - { - "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02066", - "text": " firstViewport must be less than VkPhysicalDeviceLimits::maxViewports" - }, { "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02067", "text": " The sum of firstViewport and viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" @@ -21584,6 +29906,26 @@ } ] }, + "vkCmdSetDepthBiasEnableEXT": { + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdSetDepthBiasEnableEXT-None-04872", + "text": " The extendedDynamicState2 feature must be enabled" + }, + { + "vuid": "VUID-vkCmdSetDepthBiasEnableEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetDepthBiasEnableEXT-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetDepthBiasEnableEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, "VkPipelineRasterizationConservativeStateCreateInfoEXT": { "(VK_EXT_conservative_rasterization)": [ { @@ -21662,14 +30004,16 @@ "vuid": "VUID-vkCmdSetDiscardRectangleEXT-discardRectangleCount-arraylength", "text": " discardRectangleCount must be greater than 0" } + ], + "(VK_EXT_discard_rectangles)+(VK_NV_inherited_viewport_scissor)": [ + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-viewportScissor2D-04788", + "text": " If this command is recorded in a secondary command buffer with VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled, then this function must not be called." + } ] }, "vkCmdSetScissor": { "core": [ - { - "vuid": "VUID-vkCmdSetScissor-firstScissor-00591", - "text": " firstScissor must be less than VkPhysicalDeviceLimits::maxViewports" - }, { "vuid": "VUID-vkCmdSetScissor-firstScissor-00592", "text": " The sum of firstScissor and scissorCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" @@ -21714,6 +30058,12 @@ "vuid": "VUID-vkCmdSetScissor-scissorCount-arraylength", "text": " scissorCount must be greater than 0" } + ], + "(VK_NV_inherited_viewport_scissor)": [ + { + "vuid": "VUID-vkCmdSetScissor-viewportScissor2D-04789", + "text": " If this command is recorded in a secondary command buffer with VkCommandBufferInheritanceViewportScissorInfoNV::viewportScissor2D enabled, then this function must not be called." + } ] }, "VkPipelineViewportExclusiveScissorStateCreateInfoNV": { @@ -21728,7 +30078,7 @@ }, { "vuid": "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029", - "text": " exclusiveScissorCount must be 0 or identical to the viewportCount member of VkPipelineViewportStateCreateInfo" + "text": " exclusiveScissorCount must be 0 or greater than or equal to the viewportCount member of VkPipelineViewportStateCreateInfo" }, { "vuid": "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-sType-sType", @@ -21742,10 +30092,6 @@ "vuid": "VUID-vkCmdSetExclusiveScissorNV-None-02031", "text": " The exclusive scissor feature must be enabled" }, - { - "vuid": "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02033", - "text": " firstExclusiveScissor must be less than VkPhysicalDeviceLimits::maxViewports" - }, { "vuid": "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02034", "text": " The sum of firstExclusiveScissor and exclusiveScissorCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" @@ -21822,6 +30168,12 @@ "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-back-parameter", "text": " back must be a valid VkStencilOpState structure" } + ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-separateStencilMaskRef-04453", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::separateStencilMaskRef is VK_FALSE, and the value of VkPipelineDepthStencilStateCreateInfo::stencilTestEnable is VK_TRUE, and the value of VkPipelineRasterizationStateCreateInfo::cullMode is VK_CULL_MODE_NONE, the value of reference in each of the VkStencilOpState structs in front and back must be the same." + } ] }, "vkCmdSetDepthBoundsTestEnableEXT": { @@ -22208,7 +30560,7 @@ }, { "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkPipelineColorBlendAdvancedStateCreateInfoEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineColorBlendAdvancedStateCreateInfoEXT or VkPipelineColorWriteCreateInfoEXT" }, { "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-sType-unique", @@ -22290,7 +30642,17 @@ }, { "vuid": "VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-01410", - "text": " If colorBlendOp or alphaBlendOp is an advanced blend operation, then VkSubpassDescription::colorAttachmentCount of the subpass this pipeline is compiled against must be less than or equal to VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::advancedBlendMaxColorAttachments" + "text": " If colorBlendOp or alphaBlendOp is an advanced blend operation, then colorAttachmentCount of the subpass this pipeline is compiled against must be less than or equal to VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::advancedBlendMaxColorAttachments" + } + ], + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-constantAlphaColorBlendFactors-04454", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::constantAlphaColorBlendFactors is VK_FALSE, srcColorBlendFactor must not be VK_BLEND_FACTOR_CONSTANT_ALPHA or VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA." + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-constantAlphaColorBlendFactors-04455", + "text": " If the [VK_KHR_portability_subset] extension is enabled, and VkPhysicalDevicePortabilitySubsetFeaturesKHR::constantAlphaColorBlendFactors is VK_FALSE, dstColorBlendFactor must not be VK_BLEND_FACTOR_CONSTANT_ALPHA or VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA." } ] }, @@ -22334,11 +30696,91 @@ } ] }, + "vkCmdSetLogicOpEXT": { + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdSetLogicOpEXT-None-04867", + "text": " The extendedDynamicState2LogicOp feature must be enabled" + }, + { + "vuid": "VUID-vkCmdSetLogicOpEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetLogicOpEXT-logicOp-parameter", + "text": " logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdSetLogicOpEXT-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetLogicOpEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, + "VkPipelineColorWriteCreateInfoEXT": { + "(VK_EXT_color_write_enable)": [ + { + "vuid": "VUID-VkPipelineColorWriteCreateInfoEXT-pAttachments-04801", + "text": " If the colorWriteEnable feature is not enabled, all elements of pColorWriteEnables must be VK_TRUE" + }, + { + "vuid": "VUID-VkPipelineColorWriteCreateInfoEXT-attachmentCount-04802", + "text": " attachmentCount must be equal to the attachmentCount member of the VkPipelineColorBlendStateCreateInfo structure specified during pipeline creation" + }, + { + "vuid": "VUID-VkPipelineColorWriteCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkPipelineColorWriteCreateInfoEXT-pColorWriteEnables-parameter", + "text": " If attachmentCount is not 0, pColorWriteEnables must be a valid pointer to an array of attachmentCount VkBool32 values" + } + ] + }, + "vkCmdSetColorWriteEnableEXT": { + "(VK_EXT_color_write_enable)": [ + { + "vuid": "VUID-vkCmdSetColorWriteEnableEXT-None-04803", + "text": " The colorWriteEnable feature must be enabled" + }, + { + "vuid": "VUID-vkCmdSetColorWriteEnableEXT-attachmentCount-04804", + "text": " attachmentCount must be equal to the attachmentCount member of the VkPipelineColorBlendStateCreateInfo structure specified during pipeline creation" + }, + { + "vuid": "VUID-vkCmdSetColorWriteEnableEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetColorWriteEnableEXT-pColorWriteEnables-parameter", + "text": " pColorWriteEnables must be a valid pointer to an array of attachmentCount VkBool32 values" + }, + { + "vuid": "VUID-vkCmdSetColorWriteEnableEXT-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdSetColorWriteEnableEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdSetColorWriteEnableEXT-attachmentCount-arraylength", + "text": " attachmentCount must be greater than 0" + } + ] + }, "vkCmdDispatch": { "core": [ { - "vuid": "VUID-vkCmdDispatch-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDispatch-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDispatch-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDispatch-None-02691", @@ -22362,7 +30804,7 @@ }, { "vuid": "VUID-vkCmdDispatch-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDispatch-None-02859", @@ -22390,7 +30832,11 @@ }, { "vuid": "VUID-vkCmdDispatch-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDispatch-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDispatch-groupCountX-00386", @@ -22460,15 +30906,49 @@ }, { "vuid": "VUID-vkCmdDispatch-commandBuffer-02713", - "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" + "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point used by this command must not write to any resource" + }, + { + "vuid": "VUID-vkCmdDispatch-commandBuffer-04617", + "text": " If any of the shader stages of the VkPipeline bound to the pipeline bind point used by this command uses the RayQueryKHR capability, then commandBuffer must not be a protected command buffer" + } + ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDispatch-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatch-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatch-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatch-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatch-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDispatch-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." } ] }, "vkCmdDispatchIndirect": { "core": [ { - "vuid": "VUID-vkCmdDispatchIndirect-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDispatchIndirect-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDispatchIndirect-None-02691", @@ -22492,7 +30972,7 @@ }, { "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDispatchIndirect-None-02859", @@ -22520,7 +31000,11 @@ }, { "vuid": "VUID-vkCmdDispatchIndirect-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDispatchIndirect-buffer-02708", @@ -22600,6 +31084,32 @@ "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-02711", "text": " commandBuffer must not be a protected command buffer" } + ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDispatchIndirect-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } ] }, "VkDispatchIndirectCommand": { @@ -22619,10 +31129,14 @@ ] }, "vkCmdDispatchBase": { - "core": [ + "(VK_VERSION_1_1,VK_KHR_device_group)": [ { - "vuid": "VUID-vkCmdDispatchBase-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdDispatchBase-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchBase-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdDispatchBase-None-02691", @@ -22646,7 +31160,7 @@ }, { "vuid": "VUID-vkCmdDispatchBase-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdDispatchBase-None-02859", @@ -22674,7 +31188,11 @@ }, { "vuid": "VUID-vkCmdDispatchBase-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDispatchBase-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdDispatchBase-baseGroupX-00421", @@ -22682,7 +31200,7 @@ }, { "vuid": "VUID-vkCmdDispatchBase-baseGroupX-00422", - "text": " baseGroupX must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1]" + "text": " baseGroupY must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1]" }, { "vuid": "VUID-vkCmdDispatchBase-baseGroupZ-00423", @@ -22703,43 +31221,7 @@ { "vuid": "VUID-vkCmdDispatchBase-baseGroupX-00427", "text": " If any of baseGroupX, baseGroupY, or baseGroupZ are not zero, then the bound compute pipeline must have been created with the VK_PIPELINE_CREATE_DISPATCH_BASE flag" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDispatchBase-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDispatchBase-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdDispatchBase-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" }, - { - "vuid": "VUID-vkCmdDispatchBase-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdDispatchBase-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdDispatchBase-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" - } - ], - "(VK_VERSION_1_1,VK_KHR_device_group)": [ { "vuid": "VUID-vkCmdDispatchBase-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -22756,13 +31238,85 @@ "vuid": "VUID-vkCmdDispatchBase-renderpass", "text": " This command must only be called outside of a render pass instance" } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDispatchBase-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDispatchBase-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDispatchBase-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDispatchBase-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdDispatchBase-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDispatchBase-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdDispatchBase-commandBuffer-02712", + "text": " If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource" + }, + { + "vuid": "VUID-vkCmdDispatchBase-commandBuffer-02713", + "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point used by this command must not write to any resource" + }, + { + "vuid": "VUID-vkCmdDispatchBase-commandBuffer-04617", + "text": " If any of the shader stages of the VkPipeline bound to the pipeline bind point used by this command uses the RayQueryKHR capability, then commandBuffer must not be a protected command buffer" + } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDispatchBase-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatchBase-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatchBase-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatchBase-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatchBase-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDispatchBase-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } ] }, "vkCreateIndirectCommandsLayoutNV": { "(VK_NV_device_generated_commands)": [ { "vuid": "VUID-vkCreateIndirectCommandsLayoutNV-deviceGeneratedCommands-02929", - "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" }, { "vuid": "VUID-vkCreateIndirectCommandsLayoutNV-device-parameter", @@ -22828,10 +31382,6 @@ "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-flags-parameter", "text": " flags must be a valid combination of VkIndirectCommandsLayoutUsageFlagBitsNV values" }, - { - "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-flags-requiredbitmask", - "text": " flags must not be 0" - }, { "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pipelineBindPoint-parameter", "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" @@ -22870,7 +31420,7 @@ }, { "vuid": "VUID-vkDestroyIndirectCommandsLayoutNV-deviceGeneratedCommands-02941", - "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" }, { "vuid": "VUID-vkDestroyIndirectCommandsLayoutNV-device-parameter", @@ -22994,11 +31544,11 @@ }, { "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-tokenType-02983", - "text": " If tokenType is VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, for each byte in the range specified by pushconstantOffset and pushconstantSize and for each push constant range that overlaps that byte, pushconstantShaderStageFlags must include all stages in that push constant range’s VkPushConstantRange::pushconstantShaderStageFlags" + "text": " If tokenType is VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, for each byte in the range specified by pushconstantOffset and pushconstantSize and for each push constant range that overlaps that byte, pushconstantShaderStageFlags must include all stages in that push constant range’s VkPushConstantRange::stageFlags" }, { "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-tokenType-02984", - "text": " If tokenType is VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, indirectStateFlags must not be ´0´" + "text": " If tokenType is VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, indirectStateFlags must not be 0" }, { "vuid": "VUID-VkIndirectCommandsLayoutTokenNV-sType-sType", @@ -23038,7 +31588,7 @@ "(VK_NV_device_generated_commands)": [ { "vuid": "VUID-vkGetGeneratedCommandsMemoryRequirementsNV-deviceGeneratedCommands-02906", - "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" }, { "vuid": "VUID-vkGetGeneratedCommandsMemoryRequirementsNV-device-parameter", @@ -23087,10 +31637,14 @@ ] }, "vkCmdExecuteGeneratedCommandsNV": { - "core": [ + "(VK_NV_device_generated_commands)": [ { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02691", @@ -23114,7 +31668,7 @@ }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02859", @@ -23142,7 +31696,11 @@ }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-renderPass-02684", @@ -23157,8 +31715,16 @@ "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" }, { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02687", - "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command" + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04584", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command, except for cases involving read-only access to depth/stencil attachments as described in the Render Pass chapter" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-blendEnable-04727", + "text": " If rasterization is not disabled in the bound graphics pipeline, then for each color attachment in the subpass, if the corresponding image view’s format features do not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, then the blendEnable member of the corresponding element of the pAttachments member of pColorBlendState must be VK_FALSE" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-rasterizationSamples-04740", + "text": " If rasterization is not disabled in the bound graphics pipeline, and neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, then VkPipelineMultisampleStateCreateInfo::rasterizationSamples must be the same as the current subpass color and/or depth/stencil attachments" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04007", @@ -23182,84 +31748,8 @@ }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-deviceGeneratedCommands-02911", - "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" }, - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" - }, - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-commandBuffer-02970", - "text": " commandBuffer must not be a protected command buffer" - } - ], - "(VK_VERSION_1_1,VK_KHR_multiview)": [ - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-maxMultiviewInstanceIndex-02688", - "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" - } - ], - "(VK_EXT_sample_locations)": [ - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-sampleLocationsEnable-02689", - "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" - } - ], - "(VK_EXT_extended_dynamic_state)": [ - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-viewportCount-03417", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-scissorCount-03418", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" - }, - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-viewportCount-03419", - "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this draw command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" - }, - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-primitiveTopology-03420", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this draw command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" - } - ], - "(VK_EXT_transform_feedback)": [ - { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02910", - "text": " Transform feedback must not be active" - } - ], - "(VK_NV_device_generated_commands)": [ { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -23280,6 +31770,188 @@ "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-renderpass", "text": " This command must only be called inside of a render pass instance" } + ], + "(VK_NV_device_generated_commands)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_NV_device_generated_commands)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_NV_device_generated_commands)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_NV_device_generated_commands)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_NV_device_generated_commands)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-commandBuffer-02970", + "text": " commandBuffer must not be a protected command buffer" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], + "(VK_NV_device_generated_commands)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-maxMultiviewInstanceIndex-02688", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-sampleLocationsEnable-02689", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-viewportCount-03417", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the VkPipelineViewportStateCreateInfo::scissorCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-scissorCount-03418", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, then vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the scissorCount parameter of vkCmdSetScissorWithCountEXT must match the VkPipelineViewportStateCreateInfo::viewportCount of the pipeline" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-viewportCount-03419", + "text": " If the bound graphics pipeline state was created with both the VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic states enabled then both vkCmdSetViewportWithCountEXT and vkCmdSetScissorWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must match the scissorCount parameter of vkCmdSetScissorWithCountEXT" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-primitiveTopology-03420", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT dynamic state enabled then vkCmdSetPrimitiveTopologyEXT must have been called in the current command buffer prior to this drawing command, and the primitiveTopology parameter of vkCmdSetPrimitiveTopologyEXT must be of the same topology class as the pipeline VkPipelineInputAssemblyStateCreateInfo::topology state" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state)+(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-viewportCount-04137", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportWScalingStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-viewportCount-04138", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportWScalingNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state)+(VK_NV_shading_rate_image)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-viewportCount-04139", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled, then the bound graphics pipeline must have been created with VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-viewportCount-04140", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT and VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic states enabled then the viewportCount parameter in the last call to vkCmdSetViewportShadingRatePaletteNV must be greater than or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state)+(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-VkPipelineVieportCreateInfo-04141", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportSwizzleStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportSwizzleStateCreateInfoNV::viewportCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state)+(VK_NV_scissor_exclusive)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-VkPipelineVieportCreateInfo-04142", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled and an instance of VkPipelineViewportExclusiveScissorStateCreateInfoNV chained from VkPipelineVieportCreateInfo, then the bound graphics pipeline must have been created with VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount greater or equal to the viewportCount parameter in the last call to vkCmdSetViewportWithCountEXT" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04875", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state enabled then vkCmdSetPatchControlPointsEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04876", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT dynamic state enabled then vkCmdSetRasterizerDiscardEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04877", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT dynamic state enabled then vkCmdSetDepthBiasEnableEXT must have been called in the current command buffer prior to this drawing command" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-logicOp-04878", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_LOGIC_OP_EXT dynamic state enabled then vkCmdSetLogicOpEXT must have been called in the current command buffer prior to this drawing command and the logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04879", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT dynamic state enabled then vkCmdSetPrimitiveRestartEnableEXT must have been called in the current command buffer prior to this drawing command" + } + ], + "(VK_NV_device_generated_commands)+(VK_KHR_fragment_shading_rate+VK_EXT_extended_dynamic_state)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-primitiveFragmentShadingRateWithMultipleViewports-04552", + "text": " If the primitiveFragmentShadingRateWithMultipleViewports limit is not supported, the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT dynamic state enabled, and any of the shader stages of the bound graphics pipeline write to the PrimitiveShadingRateKHR built-in, then vkCmdSetViewportWithCountEXT must have been called in the current command buffer prior to this drawing command, and the viewportCount parameter of vkCmdSetViewportWithCountEXT must be 1" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04912", + "text": " If the bound graphics pipeline was created with both the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT and VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic states enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-pStrides-04913", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, but not the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this draw command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_extended_dynamic_state)+!(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-pStrides-04884", + "text": " If the bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT dynamic state enabled, then vkCmdBindVertexBuffers2EXT must have been called in the current command buffer prior to this drawing command, and the pStrides parameter of vkCmdBindVertexBuffers2EXT must not be NULL" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04914", + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, then vkCmdSetVertexInputEXT must have been called in the current command buffer prior to this draw command" + } + ], + "(VK_NV_device_generated_commands)+(VK_EXT_transform_feedback)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02910", + "text": " Transform feedback must not be active" + } ] }, "VkGeneratedCommandsInfoNV": { @@ -23416,7 +32088,7 @@ }, { "vuid": "VUID-vkCmdPreprocessGeneratedCommandsNV-deviceGeneratedCommands-02928", - "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" }, { "vuid": "VUID-vkCmdPreprocessGeneratedCommandsNV-commandBuffer-parameter", @@ -23824,7 +32496,7 @@ }, { "vuid": "VUID-vkQueueBindSparse-pWaitSemaphores-01117", - "text": " All elements of the pWaitSemaphores member of all elements of pBindInfo member referring to a binary semaphore must be semaphores that are signaled, or have semaphore signal operations previously submitted for execution" + "text": " All elements of the pWaitSemaphores member of all elements of the pBindInfo parameter referring to a binary semaphore must be semaphores that are signaled, or have semaphore signal operations previously submitted for execution" }, { "vuid": "VUID-vkQueueBindSparse-queue-parameter", @@ -23870,15 +32542,15 @@ }, { "vuid": "VUID-VkBindSparseInfo-pSignalSemaphores-03249", - "text": " For each element of pSignalSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pSignalSemaphoreValues must have a value greater than the current value of the semaphore when the semaphore signal operation is executed" + "text": " For each element of pSignalSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pSignalSemaphoreValues must have a value greater than the current value of the semaphore when the semaphore signal operation is executed" }, { "vuid": "VUID-VkBindSparseInfo-pWaitSemaphores-03250", - "text": " For each element of pWaitSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pWaitSemaphoreValues must have a value which does not differ from the current value of the semaphore or from the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference" + "text": " For each element of pWaitSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pWaitSemaphoreValues must have a value which does not differ from the current value of the semaphore or from the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference" }, { "vuid": "VUID-VkBindSparseInfo-pSignalSemaphores-03251", - "text": " For each element of pSignalSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pSignalSemaphoreValues must have a value which does not differ from the current value of the semaphore or from the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference" + "text": " For each element of pSignalSemaphores created with a VkSemaphoreType of VK_SEMAPHORE_TYPE_TIMELINE the corresponding element of VkTimelineSemaphoreSubmitInfo::pSignalSemaphoreValues must have a value which does not differ from the current value of the semaphore or from the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference" } ], "core": [ @@ -24152,6 +32824,50 @@ } ] }, + "vkCreateDirectFBSurfaceEXT": { + "(VK_KHR_surface)+(VK_EXT_directfb_surface)": [ + { + "vuid": "VUID-vkCreateDirectFBSurfaceEXT-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateDirectFBSurfaceEXT-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkDirectFBSurfaceCreateInfoEXT structure" + }, + { + "vuid": "VUID-vkCreateDirectFBSurfaceEXT-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateDirectFBSurfaceEXT-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkDirectFBSurfaceCreateInfoEXT": { + "(VK_KHR_surface)+(VK_EXT_directfb_surface)": [ + { + "vuid": "VUID-VkDirectFBSurfaceCreateInfoEXT-dfb-04117", + "text": " dfb must point to a valid DirectFB IDirectFB" + }, + { + "vuid": "VUID-VkDirectFBSurfaceCreateInfoEXT-surface-04118", + "text": " surface must point to a valid DirectFB IDirectFBSurface" + }, + { + "vuid": "VUID-VkDirectFBSurfaceCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkDirectFBSurfaceCreateInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDirectFBSurfaceCreateInfoEXT-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, "vkCreateImagePipeSurfaceFUCHSIA": { "(VK_KHR_surface)+(VK_FUCHSIA_imagepipe_surface)": [ { @@ -24175,7 +32891,7 @@ "VkImagePipeSurfaceCreateInfoFUCHSIA": { "(VK_KHR_surface)+(VK_FUCHSIA_imagepipe_surface)": [ { - "vuid": "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-imagePipeHandle-00000", + "vuid": "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-imagePipeHandle-04863", "text": " imagePipeHandle must be a valid zx_handle_t" }, { @@ -24254,9 +32970,13 @@ }, "VkIOSSurfaceCreateInfoMVK": { "(VK_KHR_surface)+(VK_MVK_ios_surface)": [ + { + "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-pView-04143", + "text": " If pView is a CAMetalLayer object, it must be a valid CAMetalLayer." + }, { "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-pView-01316", - "text": " pView must be a valid UIView and must be backed by a CALayer instance of type CAMetalLayer" + "text": " If pView is a UIView object, it must be a valid UIView, must be backed by a CALayer object of type CAMetalLayer, and vkCreateIOSSurfaceMVK must be called on the main thread." }, { "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-sType-sType", @@ -24294,9 +33014,13 @@ }, "VkMacOSSurfaceCreateInfoMVK": { "(VK_KHR_surface)+(VK_MVK_macos_surface)": [ + { + "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-pView-04144", + "text": " If pView is a CAMetalLayer object, it must be a valid CAMetalLayer." + }, { "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-pView-01317", - "text": " pView must be a valid NSView and must be backed by a CALayer instance of type CAMetalLayer" + "text": " If pView is an NSView object, it must be a valid NSView, must be backed by a CALayer object of type CAMetalLayer, and vkCreateMacOSSurfaceMVK must be called on the main thread." }, { "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-sType-sType", @@ -24388,6 +33112,50 @@ } ] }, + "vkCreateScreenSurfaceQNX": { + "(VK_KHR_surface)+(VK_QNX_screen_surface)": [ + { + "vuid": "VUID-vkCreateScreenSurfaceQNX-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateScreenSurfaceQNX-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkScreenSurfaceCreateInfoQNX structure" + }, + { + "vuid": "VUID-vkCreateScreenSurfaceQNX-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateScreenSurfaceQNX-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkScreenSurfaceCreateInfoQNX": { + "(VK_KHR_surface)+(VK_QNX_screen_surface)": [ + { + "vuid": "VUID-VkScreenSurfaceCreateInfoQNX-context-04741", + "text": " context must point to a valid QNX Screen struct _screen_context" + }, + { + "vuid": "VUID-VkScreenSurfaceCreateInfoQNX-window-04742", + "text": " window must point to a valid QNX Screen struct _screen_window" + }, + { + "vuid": "VUID-VkScreenSurfaceCreateInfoQNX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX" + }, + { + "vuid": "VUID-VkScreenSurfaceCreateInfoQNX-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkScreenSurfaceCreateInfoQNX-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, "vkDestroySurfaceKHR": { "(VK_KHR_surface)": [ { @@ -24500,6 +33268,34 @@ } ] }, + "vkAcquireWinrtDisplayNV": { + "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_direct_mode_display)+(VK_NV_acquire_winrt_display)": [ + { + "vuid": "VUID-vkAcquireWinrtDisplayNV-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkAcquireWinrtDisplayNV-display-parameter", + "text": " display must be a valid VkDisplayKHR handle" + }, + { + "vuid": "VUID-vkAcquireWinrtDisplayNV-display-parent", + "text": " display must have been created, allocated, or retrieved from physicalDevice" + } + ] + }, + "vkGetWinrtDisplayNV": { + "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_direct_mode_display)+(VK_NV_acquire_winrt_display)": [ + { + "vuid": "VUID-vkGetWinrtDisplayNV-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetWinrtDisplayNV-pDisplay-parameter", + "text": " pDisplay must be a valid pointer to a VkDisplayKHR handle" + } + ] + }, "vkReleaseDisplayEXT": { "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_direct_mode_display)": [ { @@ -24836,11 +33632,11 @@ }, { "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-01255", - "text": " alphaMode must be 0 or one of the bits present in the supportedAlpha member of VkDisplayPlaneCapabilitiesKHR returned by vkGetDisplayPlaneCapabilitiesKHR for the display plane corresponding to displayMode" + "text": " alphaMode must be one of the bits present in the supportedAlpha member of VkDisplayPlaneCapabilitiesKHR for the display plane corresponding to displayMode" }, { "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-width-01256", - "text": " The width and height members of imageExtent must be less than the maxImageDimensions2D member of VkPhysicalDeviceLimits" + "text": " The width and height members of imageExtent must be less than or equal to VkPhysicalDeviceLimits::maxImageDimension2D" }, { "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-sType-sType", @@ -24988,6 +33784,38 @@ } ] }, + "vkGetPhysicalDeviceDirectFBPresentationSupportEXT": { + "(VK_KHR_surface)+(VK_EXT_directfb_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceDirectFBPresentationSupportEXT-queueFamilyIndex-04119", + "text": " queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceDirectFBPresentationSupportEXT-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceDirectFBPresentationSupportEXT-dfb-parameter", + "text": " dfb must be a valid pointer to an IDirectFB value" + } + ] + }, + "vkGetPhysicalDeviceScreenPresentationSupportQNX": { + "(VK_KHR_surface)+(VK_QNX_screen_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceScreenPresentationSupportQNX-queueFamilyIndex-04743", + "text": " queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceScreenPresentationSupportQNX-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceScreenPresentationSupportQNX-window-parameter", + "text": " window must be a valid pointer to a _screen_window value" + } + ] + }, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR": { "(VK_KHR_surface)": [ { @@ -25152,7 +33980,7 @@ "(VK_KHR_surface)+(VK_EXT_display_surface_counter)": [ { "vuid": "VUID-VkSurfaceCapabilities2EXT-supportedSurfaceCounters-01246", - "text": " supportedSurfaceCounters must not include VK_SURFACE_COUNTER_VBLANK_EXT unless the surface queried is a display surface" + "text": " supportedSurfaceCounters must not include VK_SURFACE_COUNTER_VBLANK_BIT_EXT unless the surface queried is a display surface" }, { "vuid": "VUID-VkSurfaceCapabilities2EXT-sType-sType", @@ -25652,7 +34480,7 @@ }, { "vuid": "VUID-VkSwapchainCreateInfoKHR-flags-04100", - "text": " If flags dose not contain VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR and the pNext chain include a VkImageFormatListCreateInfo structure then VkImageFormatListCreateInfo::viewFormatCount must be 0 or 1" + "text": " If flags does not contain VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR and the pNext chain include a VkImageFormatListCreateInfo structure then VkImageFormatListCreateInfo::viewFormatCount must be 0 or 1" } ], "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_surface_protected_capabilities)": [ @@ -25691,7 +34519,7 @@ "text": " sType must be VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD" }, { - "vuid": "VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-localDimmingEnable-XXXXX", + "vuid": "VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-localDimmingEnable-04449", "text": " It is only valid to set localDimmingEnable to VK_TRUE if VkDisplayNativeHdrSurfaceCapabilitiesAMD::localDimmingSupport is supported" } ] @@ -25711,8 +34539,8 @@ "text": " Both of device, and swapChain must have been created, allocated, or retrieved from the same VkInstance" }, { - "vuid": "VUID-vkSetLocalDimmingAMD-XXXXX", - "text": " It is only valid to call vkSetLocalDimmingAMD if VkDisplayNativeHdrSurfaceCapabilitiesAMD::localDimmingSupport is supported" + "vuid": "VUID-vkSetLocalDimmingAMD-localDimmingSupport-04618", + "text": " VkDisplayNativeHdrSurfaceCapabilitiesAMD::localDimmingSupport must be supported" } ] }, @@ -26041,12 +34869,6 @@ "text": " Each element of pImageIndices must be the index of a presentable image acquired from the swapchain specified by the corresponding element of the pSwapchains array, and the presented image subresource must be in the VK_IMAGE_LAYOUT_PRESENT_SRC_KHR or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR layout at the time the operation is executed on a VkDevice" } ], - "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_2,VK_KHR_timeline_semaphore)": [ - { - "vuid": "VUID-VkPresentInfoKHR-pWaitSemaphores-03269", - "text": " All elements of the pWaitSemaphores must have a VkSemaphoreType of VK_SEMAPHORE_TYPE_BINARY" - } - ], "(VK_KHR_surface)+(VK_KHR_swapchain)": [ { "vuid": "VUID-VkPresentInfoKHR-sType-sType", @@ -26117,8 +34939,8 @@ "VkRectLayerKHR": { "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_incremental_present)": [ { - "vuid": "VUID-VkRectLayerKHR-offset-01261", - "text": " The sum of offset and extent must be no greater than the imageExtent member of the VkSwapchainCreateInfoKHR structure passed to vkCreateSwapchainKHR" + "vuid": "VUID-VkRectLayerKHR-offset-04864", + "text": " The sum of offset and extent, after being transformed according to the preTransform member of the VkSwapchainCreateInfoKHR structure, must be no greater than the imageExtent member of the VkSwapchainCreateInfoKHR structure passed to vkCreateSwapchainKHR." }, { "vuid": "VUID-VkRectLayerKHR-layer-01262", @@ -26138,7 +34960,7 @@ }, { "vuid": "VUID-VkDisplayPresentInfoKHR-persistentContent-01259", - "text": " If the persistentContent member of the VkDisplayPropertiesKHR structure returned by vkGetPhysicalDeviceDisplayPropertiesKHR for the display the present operation targets then persistent must be VK_FALSE" + "text": " If the persistentContent member of the VkDisplayPropertiesKHR structure returned by vkGetPhysicalDeviceDisplayPropertiesKHR for the display the present operation targets is VK_FALSE, then persistent must be VK_FALSE" }, { "vuid": "VUID-VkDisplayPresentInfoKHR-sType-sType", @@ -26258,18 +35080,6 @@ } ] }, - "VkDeferredOperationInfoKHR": { - "(VK_KHR_deferred_host_operations)": [ - { - "vuid": "VUID-VkDeferredOperationInfoKHR-operationHandle-03433", - "text": " Any previous deferred operation that was associated with operationHandle must be complete" - }, - { - "vuid": "VUID-VkDeferredOperationInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_DEFERRED_OPERATION_INFO_KHR" - } - ] - }, "vkCreateDeferredOperationKHR": { "(VK_KHR_deferred_host_operations)": [ { @@ -26368,6 +35178,10 @@ }, "vkCreatePrivateDataSlotEXT": { "(VK_EXT_private_data)": [ + { + "vuid": "VUID-vkCreatePrivateDataSlotEXT-privateData-04564", + "text": " The privateData feature must be enabled" + }, { "vuid": "VUID-vkCreatePrivateDataSlotEXT-device-parameter", "text": " device must be a valid VkDevice handle" @@ -26462,7 +35276,7 @@ "(VK_EXT_private_data)": [ { "vuid": "VUID-vkGetPrivateDataEXT-objectType-04018", - "text": " objectType must be VkDevice or an object type whose parent is VkDevice" + "text": " objectType must be VK_OBJECT_TYPE_DEVICE, or an object type whose parent is VkDevice" }, { "vuid": "VUID-vkGetPrivateDataEXT-device-parameter", @@ -26486,11 +35300,1833 @@ } ] }, - "vkCmdTraceRaysNV": { - "core": [ + "vkCmdBuildAccelerationStructureNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ { - "vuid": "VUID-vkCmdTraceRaysNV-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241", + "text": " geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-dst-02488", + "text": " dst must have been created with compatible VkAccelerationStructureInfoNV where VkAccelerationStructureInfoNV::type and VkAccelerationStructureInfoNV::flags are identical, VkAccelerationStructureInfoNV::instanceCount and VkAccelerationStructureInfoNV::geometryCount for dst are greater than or equal to the build size and each geometry in VkAccelerationStructureInfoNV::pGeometries for dst has greater than or equal to the number of vertices, indices, and AABBs" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02489", + "text": " If update is VK_TRUE, src must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02490", + "text": " If update is VK_TRUE, src must have been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV set in VkAccelerationStructureInfoNV::flags" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02491", + "text": " If update is VK_FALSE, the size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and VkAccelerationStructureMemoryRequirementsInfoNV::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV must be less than or equal to the size of scratch minus scratchOffset" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02492", + "text": " If update is VK_TRUE, the size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and VkAccelerationStructureMemoryRequirementsInfoNV::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV must be less than or equal to the size of scratch minus scratchOffset" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-scratch-03522", + "text": " scratch must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-instanceData-03523", + "text": " If instanceData is not VK_NULL_HANDLE, instanceData must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-accelerationStructureReference-03786", + "text": " Each VkAccelerationStructureInstanceKHR::accelerationStructureReference value in instanceData must be a valid device address containing a value obtained from vkGetAccelerationStructureHandleNV" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-03524", + "text": " If update is VK_TRUE, then objects that were previously active must not be made inactive as per Inactive Primitives and Instances" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-03525", + "text": " If update is VK_TRUE, then objects that were previously inactive must not be made active as per Inactive Primitives and Instances" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-03526", + "text": " If update is VK_TRUE, the src and dst objects must either be the same object or not have any memory aliasing" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureInfoNV structure" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter", + "text": " If instanceData is not VK_NULL_HANDLE, instanceData must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-dst-parameter", + "text": " dst must be a valid VkAccelerationStructureNV handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-src-parameter", + "text": " If src is not VK_NULL_HANDLE, src must be a valid VkAccelerationStructureNV handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter", + "text": " scratch must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commonparent", + "text": " Each of commandBuffer, dst, instanceData, scratch, and src that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdBuildAccelerationStructuresKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-mode-04628", + "text": " The mode member of each element of pInfos must be a valid VkBuildAccelerationStructureModeKHR value" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-srcAccelerationStructure-04629", + "text": " If the srcAccelerationStructure member of any element of pInfos is not VK_NULL_HANDLE, the srcAccelerationStructure member must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-04630", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its srcAccelerationStructure member must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03403", + "text": " The srcAccelerationStructure member of any element of pInfos must not be the same acceleration structure as the dstAccelerationStructure member of any other element of pInfos" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-dstAccelerationStructure-03698", + "text": " The dstAccelerationStructure member of any element of pInfos must not be the same acceleration structure as the dstAccelerationStructure member of any other element of pInfos" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-dstAccelerationStructure-03800", + "text": " The dstAccelerationStructure member of any element of pInfos must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03699", + "text": " For each element of pInfos, if its type member is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, its dstAccelerationStructure member must have been created with a value of VkAccelerationStructureCreateInfoKHR::type equal to either VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR or VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03700", + "text": " For each element of pInfos, if its type member is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, its dstAccelerationStructure member must have been created with a value of VkAccelerationStructureCreateInfoKHR::type equal to either VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR or VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03663", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, inactive primitives in its srcAccelerationStructure member must not be made active" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03664", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, active primitives in its srcAccelerationStructure member must not be made inactive" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-None-03407", + "text": " The dstAccelerationStructure member of any element of pInfos must not be referenced by the geometry.instances.data member of any element of pGeometries or ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR in any other element of pInfos" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-dstAccelerationStructure-03701", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing the srcAccelerationStructure member of any other element of pInfos with a mode equal to VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-dstAccelerationStructure-03702", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing the dstAccelerationStructure member of any other element of pInfos, which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-dstAccelerationStructure-03703", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing the scratchData member of any element of pInfos (including the same element), which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-scratchData-03704", + "text": " The range of memory backing the scratchData member of any element of pInfos that is accessed by this command must not overlap the memory backing the scratchData member of any other element of pInfos, which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-scratchData-03705", + "text": " The range of memory backing the scratchData member of any element of pInfos that is accessed by this command must not overlap the memory backing the srcAccelerationStructure member of any element of pInfos with a mode equal to VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR (including the same element), which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-dstAccelerationStructure-03706", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing any acceleration structure referenced by the geometry.instances.data member of any element of pGeometries or ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR in any other element of pInfos, which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03667", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its srcAccelerationStructure member must have been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR set in VkAccelerationStructureBuildGeometryInfoKHR::flags" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03668", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its srcAccelerationStructure and dstAccelerationStructure members must either be the same VkAccelerationStructureKHR, or not have any memory aliasing" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03758", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its geometryCount member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03759", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its flags member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03760", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its type member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03761", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, its geometryType member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03762", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, its flags member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03763", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, its geometry.triangles.vertexFormat member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03764", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, its geometry.triangles.maxVertex member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03765", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, its geometry.triangles.indexType member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03766", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, if its geometry.triangles.transformData address was NULL when srcAccelerationStructure was last built, then it must be NULL" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03767", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, if its geometry.triangles.transformData address was not NULL when srcAccelerationStructure was last built, then it must not be NULL" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03768", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, and geometry.triangles.indexType is not VK_INDEX_TYPE_NONE_KHR, then the value of each index referenced must be the same as the corresponding index value when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-primitiveCount-03769", + "text": " For each VkAccelerationStructureBuildRangeInfoKHR referenced by this command, its primitiveCount member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-firstVertex-03770", + "text": " For each VkAccelerationStructureBuildRangeInfoKHR referenced by this command, if the corresponding geometry uses indices, its firstVertex member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03801", + "text": " For each element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, the corresponding {maxinstancecheck} must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxInstanceCount" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03707", + "text": " For each element of pInfos, the buffer used to create its dstAccelerationStructure member must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03708", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR the buffer used to create its srcAccelerationStructure member must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03709", + "text": " For each element of pInfos, the buffer used to create each acceleration structure referenced by the geometry.instances.data member of any element of pGeometries or ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03671", + "text": " If pInfos[i].mode is VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR, all addresses between pInfos[i].scratchData.deviceAddress and pInfos[i].scratchData.deviceAddress + N - 1 must be in the buffer device address range of the same buffer, where N is given by the buildScratchSize member of the VkAccelerationStructureBuildSizesInfoKHR structure returned from a call to vkGetAccelerationStructureBuildSizesKHR with an identical VkAccelerationStructureBuildGeometryInfoKHR structure and primitive count" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03672", + "text": " If pInfos[i].mode is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, all addresses between pInfos[i].scratchData.deviceAddress and pInfos[i].scratchData.deviceAddress + N - 1 must be in the buffer device address range of the same buffer, where N is given by the updateScratchSize member of the VkAccelerationStructureBuildSizesInfoKHR structure returned from a call to vkGetAccelerationStructureBuildSizesKHR with an identical VkAccelerationStructureBuildGeometryInfoKHR structure and primitive count" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-geometry-03673", + "text": " The buffers from which the buffer device addresses for all of the geometry.triangles.vertexData, geometry.triangles.indexData, geometry.triangles.transformData, geometry.aabbs.data, and geometry.instances.data members of all pInfos[i].pGeometries and pInfos[i].ppGeometries are queried must have been created with the VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR usage flag" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03674", + "text": " The buffer from which the buffer device address pInfos[i].scratchData.deviceAddress is queried must have been created with VK_BUFFER_USAGE_STORAGE_BUFFER_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03802", + "text": " For each element of pInfos, its scratchData.deviceAddress member must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03803", + "text": " For each element of pInfos, if scratchData.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03710", + "text": " For each element of pInfos, its scratchData.deviceAddress member must be a multiple of VkPhysicalDeviceAccelerationStructurePropertiesKHR::minAccelerationStructureScratchOffsetAlignment" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03804", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, geometry.triangles.vertexData.deviceAddress must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03805", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.vertexData.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03711", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, geometry.triangles.vertexData.deviceAddress must be aligned to the size in bytes of the smallest component of the format in vertexFormat" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03806", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.indexType is not VK_INDEX_TYPE_NONE_KHR, geometry.triangles.indexData.deviceAddress must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03807", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.indexType is not VK_INDEX_TYPE_NONE_KHR, if geometry.triangles.indexData.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03712", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, and with geometry.triangles.indexType not equal to VK_INDEX_TYPE_NONE_KHR, geometry.triangles.indexData.deviceAddress must be aligned to the size in bytes of the type in indexType" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03808", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.transformData.deviceAddress is not 0, it must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03809", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.transformData.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03810", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.transformData.deviceAddress is not 0, it must be aligned to 16 bytes" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03811", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_AABBS_KHR, geometry.aabbs.data.deviceAddress must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03812", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_AABBS_KHR, if geometry.aabbs.data.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03714", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_AABBS_KHR, geometry.aabbs.data.deviceAddress must be aligned to 8 bytes" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03715", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, if geometry.arrayOfPointers is VK_FALSE, geometry.instances.data.deviceAddress must be aligned to 16 bytes" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03716", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, if geometry.arrayOfPointers is VK_TRUE, geometry.instances.data.deviceAddress must be aligned to 8 bytes" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03717", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, if geometry.arrayOfPointers is VK_TRUE, each element of geometry.instances.data.deviceAddress in device memory must be aligned to 16 bytes" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03813", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, geometry.instances.data.deviceAddress must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03814", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, if geometry.instances.data.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03815", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, each VkAccelerationStructureInstanceKHR::accelerationStructureReference value in geometry.instances.data.deviceAddress must be a valid device address containing a value obtained from vkGetAccelerationStructureDeviceAddressKHR" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-03675", + "text": " For each pInfos[i], dstAccelerationStructure must have been created with a value of VkAccelerationStructureCreateInfoKHR::size greater than or equal to the memory size required by the build operation, as returned by vkGetAccelerationStructureBuildSizesKHR with pBuildInfo = pInfos[i] and with each element of the pMaxPrimitiveCounts array greater than or equal to the equivalent ppBuildRangeInfos[i][j].primitiveCount values for j in [0,pInfos[i].geometryCount)" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-ppBuildRangeInfos-03676", + "text": " Each element of ppBuildRangeInfos[i] must be a valid pointer to an array of pInfos[i].geometryCount VkAccelerationStructureBuildRangeInfoKHR structures" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-pInfos-parameter", + "text": " pInfos must be a valid pointer to an array of infoCount valid VkAccelerationStructureBuildGeometryInfoKHR structures" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-ppBuildRangeInfos-parameter", + "text": " ppBuildRangeInfos must be a valid pointer to an array of infoCount VkAccelerationStructureBuildRangeInfoKHR structures" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresKHR-infoCount-arraylength", + "text": " infoCount must be greater than 0" + } + ] + }, + "vkCmdBuildAccelerationStructuresIndirectKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-mode-04628", + "text": " The mode member of each element of pInfos must be a valid VkBuildAccelerationStructureModeKHR value" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-srcAccelerationStructure-04629", + "text": " If the srcAccelerationStructure member of any element of pInfos is not VK_NULL_HANDLE, the srcAccelerationStructure member must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-04630", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its srcAccelerationStructure member must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03403", + "text": " The srcAccelerationStructure member of any element of pInfos must not be the same acceleration structure as the dstAccelerationStructure member of any other element of pInfos" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-dstAccelerationStructure-03698", + "text": " The dstAccelerationStructure member of any element of pInfos must not be the same acceleration structure as the dstAccelerationStructure member of any other element of pInfos" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-dstAccelerationStructure-03800", + "text": " The dstAccelerationStructure member of any element of pInfos must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03699", + "text": " For each element of pInfos, if its type member is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, its dstAccelerationStructure member must have been created with a value of VkAccelerationStructureCreateInfoKHR::type equal to either VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR or VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03700", + "text": " For each element of pInfos, if its type member is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, its dstAccelerationStructure member must have been created with a value of VkAccelerationStructureCreateInfoKHR::type equal to either VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR or VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03663", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, inactive primitives in its srcAccelerationStructure member must not be made active" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03664", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, active primitives in its srcAccelerationStructure member must not be made inactive" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-None-03407", + "text": " The dstAccelerationStructure member of any element of pInfos must not be referenced by the geometry.instances.data member of any element of pGeometries or ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR in any other element of pInfos" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-dstAccelerationStructure-03701", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing the srcAccelerationStructure member of any other element of pInfos with a mode equal to VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-dstAccelerationStructure-03702", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing the dstAccelerationStructure member of any other element of pInfos, which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-dstAccelerationStructure-03703", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing the scratchData member of any element of pInfos (including the same element), which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-scratchData-03704", + "text": " The range of memory backing the scratchData member of any element of pInfos that is accessed by this command must not overlap the memory backing the scratchData member of any other element of pInfos, which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-scratchData-03705", + "text": " The range of memory backing the scratchData member of any element of pInfos that is accessed by this command must not overlap the memory backing the srcAccelerationStructure member of any element of pInfos with a mode equal to VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR (including the same element), which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-dstAccelerationStructure-03706", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing any acceleration structure referenced by the geometry.instances.data member of any element of pGeometries or ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR in any other element of pInfos, which is accessed by this command" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03667", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its srcAccelerationStructure member must have been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR set in VkAccelerationStructureBuildGeometryInfoKHR::flags" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03668", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its srcAccelerationStructure and dstAccelerationStructure members must either be the same VkAccelerationStructureKHR, or not have any memory aliasing" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03758", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its geometryCount member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03759", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its flags member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03760", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its type member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03761", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, its geometryType member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03762", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, its flags member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03763", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, its geometry.triangles.vertexFormat member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03764", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, its geometry.triangles.maxVertex member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03765", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, its geometry.triangles.indexType member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03766", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, if its geometry.triangles.transformData address was NULL when srcAccelerationStructure was last built, then it must be NULL" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03767", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, if its geometry.triangles.transformData address was not NULL when srcAccelerationStructure was last built, then it must not be NULL" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03768", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, and geometry.triangles.indexType is not VK_INDEX_TYPE_NONE_KHR, then the value of each index referenced must be the same as the corresponding index value when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-primitiveCount-03769", + "text": " For each VkAccelerationStructureBuildRangeInfoKHR referenced by this command, its primitiveCount member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-firstVertex-03770", + "text": " For each VkAccelerationStructureBuildRangeInfoKHR referenced by this command, if the corresponding geometry uses indices, its firstVertex member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03801", + "text": " For each element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, the corresponding {maxinstancecheck} must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxInstanceCount" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03707", + "text": " For each element of pInfos, the buffer used to create its dstAccelerationStructure member must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03708", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR the buffer used to create its srcAccelerationStructure member must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03709", + "text": " For each element of pInfos, the buffer used to create each acceleration structure referenced by the geometry.instances.data member of any element of pGeometries or ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03671", + "text": " If pInfos[i].mode is VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR, all addresses between pInfos[i].scratchData.deviceAddress and pInfos[i].scratchData.deviceAddress + N - 1 must be in the buffer device address range of the same buffer, where N is given by the buildScratchSize member of the VkAccelerationStructureBuildSizesInfoKHR structure returned from a call to vkGetAccelerationStructureBuildSizesKHR with an identical VkAccelerationStructureBuildGeometryInfoKHR structure and primitive count" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03672", + "text": " If pInfos[i].mode is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, all addresses between pInfos[i].scratchData.deviceAddress and pInfos[i].scratchData.deviceAddress + N - 1 must be in the buffer device address range of the same buffer, where N is given by the updateScratchSize member of the VkAccelerationStructureBuildSizesInfoKHR structure returned from a call to vkGetAccelerationStructureBuildSizesKHR with an identical VkAccelerationStructureBuildGeometryInfoKHR structure and primitive count" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-geometry-03673", + "text": " The buffers from which the buffer device addresses for all of the geometry.triangles.vertexData, geometry.triangles.indexData, geometry.triangles.transformData, geometry.aabbs.data, and geometry.instances.data members of all pInfos[i].pGeometries and pInfos[i].ppGeometries are queried must have been created with the VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR usage flag" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03674", + "text": " The buffer from which the buffer device address pInfos[i].scratchData.deviceAddress is queried must have been created with VK_BUFFER_USAGE_STORAGE_BUFFER_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03802", + "text": " For each element of pInfos, its scratchData.deviceAddress member must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03803", + "text": " For each element of pInfos, if scratchData.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03710", + "text": " For each element of pInfos, its scratchData.deviceAddress member must be a multiple of VkPhysicalDeviceAccelerationStructurePropertiesKHR::minAccelerationStructureScratchOffsetAlignment" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03804", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, geometry.triangles.vertexData.deviceAddress must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03805", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.vertexData.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03711", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, geometry.triangles.vertexData.deviceAddress must be aligned to the size in bytes of the smallest component of the format in vertexFormat" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03806", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.indexType is not VK_INDEX_TYPE_NONE_KHR, geometry.triangles.indexData.deviceAddress must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03807", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.indexType is not VK_INDEX_TYPE_NONE_KHR, if geometry.triangles.indexData.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03712", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, and with geometry.triangles.indexType not equal to VK_INDEX_TYPE_NONE_KHR, geometry.triangles.indexData.deviceAddress must be aligned to the size in bytes of the type in indexType" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03808", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.transformData.deviceAddress is not 0, it must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03809", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.transformData.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03810", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.transformData.deviceAddress is not 0, it must be aligned to 16 bytes" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03811", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_AABBS_KHR, geometry.aabbs.data.deviceAddress must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03812", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_AABBS_KHR, if geometry.aabbs.data.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03714", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_AABBS_KHR, geometry.aabbs.data.deviceAddress must be aligned to 8 bytes" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03715", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, if geometry.arrayOfPointers is VK_FALSE, geometry.instances.data.deviceAddress must be aligned to 16 bytes" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03716", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, if geometry.arrayOfPointers is VK_TRUE, geometry.instances.data.deviceAddress must be aligned to 8 bytes" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03717", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, if geometry.arrayOfPointers is VK_TRUE, each element of geometry.instances.data.deviceAddress in device memory must be aligned to 16 bytes" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03813", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, geometry.instances.data.deviceAddress must be a valid device address obtained from vkGetBufferDeviceAddress" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03814", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, if geometry.instances.data.deviceAddress is the address of a non-sparse buffer then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03815", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, each VkAccelerationStructureInstanceKHR::accelerationStructureReference value in geometry.instances.data.deviceAddress must be a valid device address containing a value obtained from vkGetAccelerationStructureDeviceAddressKHR" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pIndirectDeviceAddresses-03645", + "text": " For any element of pIndirectDeviceAddresses, if the buffer from which it was queried is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pIndirectDeviceAddresses-03646", + "text": " For any element of pIndirectDeviceAddresses[i], all device addresses between pIndirectDeviceAddresses[i] and pIndirectDeviceAddresses[i] + (pInfos[i].geometryCount {times} pIndirectStrides[i]) - 1 must be in the buffer device address range of the same buffer" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pIndirectDeviceAddresses-03647", + "text": " For any element of pIndirectDeviceAddresses, the buffer from which it was queried must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pIndirectDeviceAddresses-03648", + "text": " Each element of pIndirectDeviceAddresses must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pIndirectStrides-03787", + "text": " Each element of pIndirectStrides must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-commandBuffer-03649", + "text": " commandBuffer must not be a protected command buffer" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-accelerationStructureIndirectBuild-03650", + "text": " The VkPhysicalDeviceAccelerationStructureFeaturesKHR::accelerationStructureIndirectBuild feature must be enabled" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pIndirectDeviceAddresses-03651", + "text": " Each VkAccelerationStructureBuildRangeInfoKHR structure referenced by any element of pIndirectDeviceAddresses must be a valid VkAccelerationStructureBuildRangeInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-03652", + "text": " pInfos[i].dstAccelerationStructure must have been created with a value of VkAccelerationStructureCreateInfoKHR::size greater than or equal to the memory size required by the build operation, as returned by vkGetAccelerationStructureBuildSizesKHR with pBuildInfo = pInfos[i] and pMaxPrimitiveCounts = ppMaxPrimitiveCounts[i]" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-ppMaxPrimitiveCounts-03653", + "text": " Each ppMaxPrimitiveCounts[i][j] must be greater than or equal to the the primitiveCount value specified by the VkAccelerationStructureBuildRangeInfoKHR structure located at pIndirectDeviceAddresses[i] + (j {times} pIndirectStrides[i])" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pInfos-parameter", + "text": " pInfos must be a valid pointer to an array of infoCount valid VkAccelerationStructureBuildGeometryInfoKHR structures" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pIndirectDeviceAddresses-parameter", + "text": " pIndirectDeviceAddresses must be a valid pointer to an array of infoCount VkDeviceAddress values" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-pIndirectStrides-parameter", + "text": " pIndirectStrides must be a valid pointer to an array of infoCount uint32_t values" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-ppMaxPrimitiveCounts-parameter", + "text": " ppMaxPrimitiveCounts must be a valid pointer to an array of infoCount uint32_t values" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdBuildAccelerationStructuresIndirectKHR-infoCount-arraylength", + "text": " infoCount must be greater than 0" + } + ] + }, + "VkAccelerationStructureBuildGeometryInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-03654", + "text": " type must not be VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-pGeometries-03788", + "text": " Only one of pGeometries or ppGeometries can be a valid pointer, the other must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-03789", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, the geometryType member of elements of either pGeometries or ppGeometries must be VK_GEOMETRY_TYPE_INSTANCES_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-03790", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, geometryCount must be 1" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-03791", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR the geometryType member of elements of either pGeometries or ppGeometries must not be VK_GEOMETRY_TYPE_INSTANCES_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-03792", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR then the geometryType member of each geometry in either pGeometries or ppGeometries must be the same" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-03793", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR then geometryCount must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxGeometryCount" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-03794", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR and the geometryType member of either pGeometries or ppGeometries is VK_GEOMETRY_TYPE_AABBS_KHR, the total number of AABBs in all geometries must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxPrimitiveCount" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-03795", + "text": " If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR and the geometryType member of either pGeometries or ppGeometries is VK_GEOMETRY_TYPE_TRIANGLES_KHR, the total number of triangles in all geometries must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxPrimitiveCount" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-flags-03796", + "text": " If flags has the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR bit set, then it must not have the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR bit set" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-parameter", + "text": " type must be a valid VkAccelerationStructureTypeKHR value" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkBuildAccelerationStructureFlagBitsKHR values" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-pGeometries-parameter", + "text": " If geometryCount is not 0, and pGeometries is not NULL, pGeometries must be a valid pointer to an array of geometryCount valid VkAccelerationStructureGeometryKHR structures" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-ppGeometries-parameter", + "text": " If geometryCount is not 0, and ppGeometries is not NULL, ppGeometries must be a valid pointer to an array of geometryCount valid pointers to valid VkAccelerationStructureGeometryKHR structures" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-commonparent", + "text": " Both of dstAccelerationStructure, and srcAccelerationStructure that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkAccelerationStructureGeometryKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-geometryType-parameter", + "text": " geometryType must be a valid VkGeometryTypeKHR value" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-triangles-parameter", + "text": " If geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, the triangles member of geometry must be a valid VkAccelerationStructureGeometryTrianglesDataKHR structure" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-aabbs-parameter", + "text": " If geometryType is VK_GEOMETRY_TYPE_AABBS_KHR, the aabbs member of geometry must be a valid VkAccelerationStructureGeometryAabbsDataKHR structure" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-instances-parameter", + "text": " If geometryType is VK_GEOMETRY_TYPE_INSTANCES_KHR, the instances member of geometry must be a valid VkAccelerationStructureGeometryInstancesDataKHR structure" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryKHR-flags-parameter", + "text": " flags must be a valid combination of VkGeometryFlagBitsKHR values" + } + ] + }, + "VkAccelerationStructureGeometryTrianglesDataKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexStride-03735", + "text": " vertexStride must be a multiple of the size in bytes of the smallest component of vertexFormat" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexStride-03819", + "text": " vertexStride must be less than or equal to 232-1" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexFormat-03797", + "text": " vertexFormat must support the VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR in VkFormatProperties::bufferFeatures as returned by vkGetPhysicalDeviceFormatProperties2" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-indexType-03798", + "text": " indexType must be VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_UINT32, or VK_INDEX_TYPE_NONE_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexFormat-parameter", + "text": " vertexFormat must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-indexType-parameter", + "text": " indexType must be a valid VkIndexType value" + } + ] + }, + "VkTransformMatrixKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkTransformMatrixKHR-matrix-03799", + "text": " The first three columns of matrix must define an invertible 3x3 matrix" + } + ] + }, + "VkAccelerationStructureGeometryAabbsDataKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-stride-03545", + "text": " stride must be a multiple of 8" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-stride-03820", + "text": " stride must be less than or equal to 232-1" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "VkAabbPositionsKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAabbPositionsKHR-minX-03546", + "text": " minX must be less than or equal to maxX" + }, + { + "vuid": "VUID-VkAabbPositionsKHR-minY-03547", + "text": " minY must be less than or equal to maxY" + }, + { + "vuid": "VUID-VkAabbPositionsKHR-minZ-03548", + "text": " minZ must be less than or equal to maxZ" + } + ] + }, + "VkAccelerationStructureGeometryInstancesDataKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "VkAccelerationStructureInstanceKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureInstanceKHR-flags-parameter", + "text": " flags must be a valid combination of VkGeometryInstanceFlagBitsKHR values" + } + ] + }, + "VkAccelerationStructureBuildRangeInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureBuildRangeInfoKHR-primitiveOffset-03656", + "text": " For geometries of type VK_GEOMETRY_TYPE_TRIANGLES_KHR, if the geometry uses indices, the offset primitiveOffset from VkAccelerationStructureGeometryTrianglesDataKHR::indexData must be a multiple of the element size of VkAccelerationStructureGeometryTrianglesDataKHR::indexType" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildRangeInfoKHR-primitiveOffset-03657", + "text": " For geometries of type VK_GEOMETRY_TYPE_TRIANGLES_KHR, if the geometry does not use indices, the offset primitiveOffset from VkAccelerationStructureGeometryTrianglesDataKHR::vertexData must be a multiple of the component size of VkAccelerationStructureGeometryTrianglesDataKHR::vertexFormat" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildRangeInfoKHR-transformOffset-03658", + "text": " For geometries of type VK_GEOMETRY_TYPE_TRIANGLES_KHR, the offset transformOffset from VkAccelerationStructureGeometryTrianglesDataKHR::transformData must be a multiple of 16" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildRangeInfoKHR-primitiveOffset-03659", + "text": " For geometries of type VK_GEOMETRY_TYPE_AABBS_KHR, the offset primitiveOffset from VkAccelerationStructureGeometryAabbsDataKHR::data must be a multiple of 8" + }, + { + "vuid": "VUID-VkAccelerationStructureBuildRangeInfoKHR-primitiveOffset-03660", + "text": " For geometries of type VK_GEOMETRY_TYPE_INSTANCES_KHR, the offset primitiveOffset from VkAccelerationStructureGeometryInstancesDataKHR::data must be a multiple of 16" + } + ] + }, + "vkCmdWriteAccelerationStructuresPropertiesKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryPool-02493", + "text": " queryPool must have been created with a queryType matching queryType" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryPool-02494", + "text": " The queries identified by queryPool and firstQuery must be unavailable" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-buffer-03736", + "text": " The buffer used to create each acceleration structure in pAccelerationStructures must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-query-04880", + "text": " The sum of query plus accelerationStructureCount must be less than or equal to the number of queries in queryPool" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-accelerationStructures-03431", + "text": " All acceleration structures in pAccelerationStructures must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryType-03432", + "text": " queryType must be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR or VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-parameter", + "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureKHR handles" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryType-parameter", + "text": " queryType must be a valid VkQueryType value" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryPool-parameter", + "text": " queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-accelerationStructureCount-arraylength", + "text": " accelerationStructureCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commonparent", + "text": " Each of commandBuffer, queryPool, and the elements of pAccelerationStructures must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdWriteAccelerationStructuresPropertiesNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-03755", + "text": " queryPool must have been created with a queryType matching queryType" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-03756", + "text": " The queries identified by queryPool and firstQuery must be unavailable" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructure-03757", + "text": " accelerationStructure must be bound completely and contiguously to a single VkDeviceMemory object via vkBindAccelerationStructureMemoryNV" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructures-03431", + "text": " All acceleration structures in pAccelerationStructures must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-03432", + "text": " queryType must be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-pAccelerationStructures-parameter", + "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureNV handles" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-parameter", + "text": " queryType must be a valid VkQueryType value" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-parameter", + "text": " queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructureCount-arraylength", + "text": " accelerationStructureCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent", + "text": " Each of commandBuffer, queryPool, and the elements of pAccelerationStructures must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdCopyAccelerationStructureNV": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-mode-03410", + "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR or VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-src-03411", + "text": " If mode is VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, src must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-buffer-03718", + "text": " The buffer used to create src must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-buffer-03719", + "text": " The buffer used to create dst must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-dst-parameter", + "text": " dst must be a valid VkAccelerationStructureNV handle" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-src-parameter", + "text": " src must be a valid VkAccelerationStructureNV handle" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-mode-parameter", + "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commonparent", + "text": " Each of commandBuffer, dst, and src must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdCopyAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-buffer-03737", + "text": " The buffer used to create pInfo->src must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-buffer-03738", + "text": " The buffer used to create pInfo->dst must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, + "VkCopyAccelerationStructureInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-mode-03410", + "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR or VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-src-03411", + "text": " If mode is VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, src must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-buffer-03718", + "text": " The buffer used to create src must be bound to device memory" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-buffer-03719", + "text": " The buffer used to create dst must be bound to device memory" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-src-parameter", + "text": " src must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-dst-parameter", + "text": " dst must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-mode-parameter", + "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-commonparent", + "text": " Both of dst, and src must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdCopyAccelerationStructureToMemoryKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-pInfo-03739", + "text": " pInfo->dst.deviceAddress must be a valid device address for a buffer bound to device memory" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-pInfo-03740", + "text": " pInfo->dst.deviceAddress must be aligned to 256 bytes" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-pInfo-03741", + "text": " If the buffer pointed to by pInfo->dst.deviceAddress is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-None-03559", + "text": " The buffer used to create pInfo->src must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureToMemoryInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, + "VkCopyAccelerationStructureToMemoryInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-dst-03561", + "text": " The memory pointed to by dst must be at least as large as the serialization size of src, as reported by vkWriteAccelerationStructuresPropertiesKHR or vkCmdWriteAccelerationStructuresPropertiesKHR with a query type of VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-mode-03412", + "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-src-parameter", + "text": " src must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-mode-parameter", + "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" + } + ] + }, + "vkCmdCopyMemoryToAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-pInfo-03742", + "text": " pInfo->src.deviceAddress must be a valid device address for a buffer bound to device memory." + }, + { + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-pInfo-03743", + "text": " pInfo->src.deviceAddress must be aligned to 256 bytes" + }, + { + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-pInfo-03744", + "text": " If the buffer pointed to by pInfo->src.deviceAddress is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-buffer-03745", + "text": " The buffer used to create pInfo->dst must be bound to device memory" + }, + { + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyMemoryToAccelerationStructureInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, + "VkCopyMemoryToAccelerationStructureInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-mode-03413", + "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-pInfo-03414", + "text": " The data in src must have a format compatible with the destination physical device as returned by vkGetDeviceAccelerationStructureCompatibilityKHR" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-dst-03746", + "text": " dst must have been created with a size greater than or equal to that used to serialize the data in src" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-dst-parameter", + "text": " dst must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-mode-parameter", + "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" + } + ] + }, + "vkGetDeviceAccelerationStructureCompatibilityKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkGetDeviceAccelerationStructureCompatibilityKHR-rayTracingPipeline-03661", + "text": " The rayTracingPipeline or rayQuery feature must be enabled" + }, + { + "vuid": "VUID-vkGetDeviceAccelerationStructureCompatibilityKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeviceAccelerationStructureCompatibilityKHR-pVersionInfo-parameter", + "text": " pVersionInfo must be a valid pointer to a valid VkAccelerationStructureVersionInfoKHR structure" + }, + { + "vuid": "VUID-vkGetDeviceAccelerationStructureCompatibilityKHR-pCompatibility-parameter", + "text": " pCompatibility must be a valid pointer to a VkAccelerationStructureCompatibilityKHR value" + } + ] + }, + "VkAccelerationStructureVersionInfoKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-VkAccelerationStructureVersionInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR" + }, + { + "vuid": "VUID-VkAccelerationStructureVersionInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAccelerationStructureVersionInfoKHR-pVersionData-parameter", + "text": " pVersionData must be a valid pointer to an array of 2*VK_UUID_SIZE uint8_t values" + } + ] + }, + "vkBuildAccelerationStructuresKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-mode-04628", + "text": " The mode member of each element of pInfos must be a valid VkBuildAccelerationStructureModeKHR value" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-srcAccelerationStructure-04629", + "text": " If the srcAccelerationStructure member of any element of pInfos is not VK_NULL_HANDLE, the srcAccelerationStructure member must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-04630", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its srcAccelerationStructure member must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03403", + "text": " The srcAccelerationStructure member of any element of pInfos must not be the same acceleration structure as the dstAccelerationStructure member of any other element of pInfos" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-dstAccelerationStructure-03698", + "text": " The dstAccelerationStructure member of any element of pInfos must not be the same acceleration structure as the dstAccelerationStructure member of any other element of pInfos" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-dstAccelerationStructure-03800", + "text": " The dstAccelerationStructure member of any element of pInfos must be a valid VkAccelerationStructureKHR handle" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03699", + "text": " For each element of pInfos, if its type member is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, its dstAccelerationStructure member must have been created with a value of VkAccelerationStructureCreateInfoKHR::type equal to either VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR or VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03700", + "text": " For each element of pInfos, if its type member is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, its dstAccelerationStructure member must have been created with a value of VkAccelerationStructureCreateInfoKHR::type equal to either VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR or VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03663", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, inactive primitives in its srcAccelerationStructure member must not be made active" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03664", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, active primitives in its srcAccelerationStructure member must not be made inactive" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-None-03407", + "text": " The dstAccelerationStructure member of any element of pInfos must not be referenced by the geometry.instances.data member of any element of pGeometries or ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR in any other element of pInfos" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-dstAccelerationStructure-03701", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing the srcAccelerationStructure member of any other element of pInfos with a mode equal to VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, which is accessed by this command" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-dstAccelerationStructure-03702", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing the dstAccelerationStructure member of any other element of pInfos, which is accessed by this command" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-dstAccelerationStructure-03703", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing the scratchData member of any element of pInfos (including the same element), which is accessed by this command" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-scratchData-03704", + "text": " The range of memory backing the scratchData member of any element of pInfos that is accessed by this command must not overlap the memory backing the scratchData member of any other element of pInfos, which is accessed by this command" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-scratchData-03705", + "text": " The range of memory backing the scratchData member of any element of pInfos that is accessed by this command must not overlap the memory backing the srcAccelerationStructure member of any element of pInfos with a mode equal to VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR (including the same element), which is accessed by this command" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-dstAccelerationStructure-03706", + "text": " The range of memory backing the dstAccelerationStructure member of any element of pInfos that is accessed by this command must not overlap the memory backing any acceleration structure referenced by the geometry.instances.data member of any element of pGeometries or ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR in any other element of pInfos, which is accessed by this command" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03667", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its srcAccelerationStructure member must have been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR set in VkAccelerationStructureBuildGeometryInfoKHR::flags" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03668", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its srcAccelerationStructure and dstAccelerationStructure members must either be the same VkAccelerationStructureKHR, or not have any memory aliasing" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03758", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its geometryCount member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03759", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its flags member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03760", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, its type member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03761", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, its geometryType member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03762", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, its flags member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03763", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, its geometry.triangles.vertexFormat member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03764", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, its geometry.triangles.maxVertex member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03765", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, its geometry.triangles.indexType member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03766", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, if its geometry.triangles.transformData address was NULL when srcAccelerationStructure was last built, then it must be NULL" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03767", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, if its geometry.triangles.transformData address was not NULL when srcAccelerationStructure was last built, then it must not be NULL" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03768", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, then for each VkAccelerationStructureGeometryKHR structure referred to by its pGeometries or ppGeometries members, if geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, and geometry.triangles.indexType is not VK_INDEX_TYPE_NONE_KHR, then the value of each index referenced must be the same as the corresponding index value when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-primitiveCount-03769", + "text": " For each VkAccelerationStructureBuildRangeInfoKHR referenced by this command, its primitiveCount member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-firstVertex-03770", + "text": " For each VkAccelerationStructureBuildRangeInfoKHR referenced by this command, if the corresponding geometry uses indices, its firstVertex member must have the same value which was specified when srcAccelerationStructure was last built" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03801", + "text": " For each element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, the corresponding {maxinstancecheck} must be less than or equal to VkPhysicalDeviceAccelerationStructurePropertiesKHR::maxInstanceCount" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03675", + "text": " For each pInfos[i], dstAccelerationStructure must have been created with a value of VkAccelerationStructureCreateInfoKHR::size greater than or equal to the memory size required by the build operation, as returned by vkGetAccelerationStructureBuildSizesKHR with pBuildInfo = pInfos[i] and with each element of the pMaxPrimitiveCounts array greater than or equal to the equivalent ppBuildRangeInfos[i][j].primitiveCount values for j in [0,pInfos[i].geometryCount)" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-ppBuildRangeInfos-03676", + "text": " Each element of ppBuildRangeInfos[i] must be a valid pointer to an array of pInfos[i].geometryCount VkAccelerationStructureBuildRangeInfoKHR structures" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-deferredOperation-03677", + "text": " If deferredOperation is not VK_NULL_HANDLE, it must be a valid VkDeferredOperationKHR object" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-deferredOperation-03678", + "text": " Any previous deferred operation that was associated with deferredOperation must be complete" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03722", + "text": " For each element of pInfos, the buffer used to create its dstAccelerationStructure member must be bound to host-visible device memory" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03723", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR the buffer used to create its srcAccelerationStructure member must be bound to host-visible device memory" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03724", + "text": " For each element of pInfos, the buffer used to create each acceleration structure referenced by the geometry.instances.data member of any element of pGeometries or ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR must be bound to host-visible device memory" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-accelerationStructureHostCommands-03581", + "text": " The VkPhysicalDeviceAccelerationStructureFeaturesKHR::accelerationStructureHostCommands feature must be enabled" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03725", + "text": " If pInfos[i].mode is VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR, all addresses between pInfos[i].scratchData.hostAddress and pInfos[i].scratchData.hostAddress + N - 1 must be valid host memory, where N is given by the buildScratchSize member of the VkAccelerationStructureBuildSizesInfoKHR structure returned from a call to vkGetAccelerationStructureBuildSizesKHR with an identical VkAccelerationStructureBuildGeometryInfoKHR structure and primitive count" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03726", + "text": " If pInfos[i].mode is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR, all addresses between pInfos[i].scratchData.hostAddress and pInfos[i].scratchData.hostAddress + N - 1 must be valid host memory, where N is given by the updateScratchSize member of the VkAccelerationStructureBuildSizesInfoKHR structure returned from a call to vkGetAccelerationStructureBuildSizesKHR with an identical VkAccelerationStructureBuildGeometryInfoKHR structure and primitive count" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03771", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, geometry.triangles.vertexData.hostAddress must be a valid host address" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03772", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.indexType is not VK_INDEX_TYPE_NONE_KHR, geometry.triangles.indexData.hostAddress must be a valid host address" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03773", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR, if geometry.triangles.transformData.hostAddress is not 0, it must be a valid host address" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03774", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_AABBS_KHR, geometry.aabbs.data.hostAddress must be a valid host address" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03778", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, geometry.instances.data.hostAddress must be a valid host address" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03779", + "text": " For any element of pInfos[i].pGeometries or pInfos[i].ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR, each VkAccelerationStructureInstanceKHR::accelerationStructureReference value in geometry.instances.data.hostAddress must be a valid VkAccelerationStructureKHR object" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-deferredOperation-parameter", + "text": " If deferredOperation is not VK_NULL_HANDLE, deferredOperation must be a valid VkDeferredOperationKHR handle" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-parameter", + "text": " pInfos must be a valid pointer to an array of infoCount valid VkAccelerationStructureBuildGeometryInfoKHR structures" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-ppBuildRangeInfos-parameter", + "text": " ppBuildRangeInfos must be a valid pointer to an array of infoCount VkAccelerationStructureBuildRangeInfoKHR structures" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-infoCount-arraylength", + "text": " infoCount must be greater than 0" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-deferredOperation-parent", + "text": " If deferredOperation is a valid handle, it must have been created, allocated, or retrieved from device" + } + ], + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_device_group,VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03775", + "text": " For each element of pInfos, the buffer used to create its dstAccelerationStructure member must be bound to memory that was not allocated with multiple instances" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03776", + "text": " For each element of pInfos, if its mode member is VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR the buffer used to create its srcAccelerationStructure member must be bound to memory that was not allocated with multiple instances" + }, + { + "vuid": "VUID-vkBuildAccelerationStructuresKHR-pInfos-03777", + "text": " For each element of pInfos, the buffer used to create each acceleration structure referenced by the geometry.instances.data member of any element of pGeometries or ppGeometries with a geometryType of VK_GEOMETRY_TYPE_INSTANCES_KHR must be bound to memory that was not allocated with multiple instances" + } + ] + }, + "vkCopyAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-deferredOperation-03677", + "text": " If deferredOperation is not VK_NULL_HANDLE, it must be a valid VkDeferredOperationKHR object" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-deferredOperation-03678", + "text": " Any previous deferred operation that was associated with deferredOperation must be complete" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-buffer-03727", + "text": " The buffer used to create pInfo->src must be bound to host-visible device memory" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-buffer-03728", + "text": " The buffer used to create pInfo->dst must be bound to host-visible device memory" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-accelerationStructureHostCommands-03582", + "text": " The VkPhysicalDeviceAccelerationStructureFeaturesKHR::accelerationStructureHostCommands feature must be enabled" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-deferredOperation-parameter", + "text": " If deferredOperation is not VK_NULL_HANDLE, deferredOperation must be a valid VkDeferredOperationKHR handle" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureInfoKHR structure" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-deferredOperation-parent", + "text": " If deferredOperation is a valid handle, it must have been created, allocated, or retrieved from device" + } + ], + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_device_group,VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-buffer-03780", + "text": " The buffer used to create pInfo->src must be bound to memory that was not allocated with multiple instances" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureKHR-buffer-03781", + "text": " The buffer used to create pInfo->dst must be bound to memory that was not allocated with multiple instances" + } + ] + }, + "vkCopyMemoryToAccelerationStructureKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-deferredOperation-03677", + "text": " If deferredOperation is not VK_NULL_HANDLE, it must be a valid VkDeferredOperationKHR object" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-deferredOperation-03678", + "text": " Any previous deferred operation that was associated with deferredOperation must be complete" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-pInfo-03729", + "text": " pInfo->src.hostAddress must be a valid host pointer" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-pInfo-03750", + "text": " pInfo->src.hostAddress must be aligned to 16 bytes" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-buffer-03730", + "text": " The buffer used to create pInfo->dst must be bound to host-visible device memory" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-accelerationStructureHostCommands-03583", + "text": " The VkPhysicalDeviceAccelerationStructureFeaturesKHR::accelerationStructureHostCommands feature must be enabled" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-deferredOperation-parameter", + "text": " If deferredOperation is not VK_NULL_HANDLE, deferredOperation must be a valid VkDeferredOperationKHR handle" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyMemoryToAccelerationStructureInfoKHR structure" + }, + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-deferredOperation-parent", + "text": " If deferredOperation is a valid handle, it must have been created, allocated, or retrieved from device" + } + ], + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_device_group,VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-buffer-03782", + "text": " The buffer used to create pInfo->dst must be bound to memory that was not allocated with multiple instances" + } + ] + }, + "vkCopyAccelerationStructureToMemoryKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-deferredOperation-03677", + "text": " If deferredOperation is not VK_NULL_HANDLE, it must be a valid VkDeferredOperationKHR object" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-deferredOperation-03678", + "text": " Any previous deferred operation that was associated with deferredOperation must be complete" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-buffer-03731", + "text": " The buffer used to create pInfo->src must be bound to host-visible device memory" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-pInfo-03732", + "text": " pInfo->dst.hostAddress must be a valid host pointer" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-pInfo-03751", + "text": " pInfo->dst.hostAddress must be aligned to 16 bytes" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-accelerationStructureHostCommands-03584", + "text": " The VkPhysicalDeviceAccelerationStructureFeaturesKHR::accelerationStructureHostCommands feature must be enabled" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-deferredOperation-parameter", + "text": " If deferredOperation is not VK_NULL_HANDLE, deferredOperation must be a valid VkDeferredOperationKHR handle" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureToMemoryInfoKHR structure" + }, + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-deferredOperation-parent", + "text": " If deferredOperation is a valid handle, it must have been created, allocated, or retrieved from device" + } + ], + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_device_group,VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-buffer-03783", + "text": " The buffer used to create pInfo->src must be bound to memory that was not allocated with multiple instances" + } + ] + }, + "vkWriteAccelerationStructuresPropertiesKHR": { + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)": [ + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-accelerationStructures-03431", + "text": " All acceleration structures in pAccelerationStructures must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03432", + "text": " queryType must be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR or VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03448", + "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, then stride must be a multiple of the size of VkDeviceSize" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03449", + "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, then data must point to a VkDeviceSize" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03450", + "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, then stride must be a multiple of the size of VkDeviceSize" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03451", + "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, then data must point to a VkDeviceSize" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-dataSize-03452", + "text": " dataSize must be greater than or equal to accelerationStructureCount*stride" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-buffer-03733", + "text": " The buffer used to create each acceleration structure in pAccelerationStructures must be bound to host-visible device memory" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-accelerationStructureHostCommands-03585", + "text": " The VkPhysicalDeviceAccelerationStructureFeaturesKHR::accelerationStructureHostCommands feature must be enabled" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-parameter", + "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureKHR handles" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-parameter", + "text": " queryType must be a valid VkQueryType value" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pData-parameter", + "text": " pData must be a valid pointer to an array of dataSize bytes" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-accelerationStructureCount-arraylength", + "text": " accelerationStructureCount must be greater than 0" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-dataSize-arraylength", + "text": " dataSize must be greater than 0" + }, + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-parent", + "text": " Each element of pAccelerationStructures must have been created, allocated, or retrieved from device" + } + ], + "(VK_NV_ray_tracing,VK_KHR_acceleration_structure)+(VK_KHR_device_group,VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-buffer-03784", + "text": " The buffer used to create each acceleration structure in pAccelerationStructures must be bound to memory that was not allocated with multiple instances" + } + ] + }, + "vkCmdTraceRaysNV": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)": [ + { + "vuid": "VUID-vkCmdTraceRaysNV-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdTraceRaysNV-None-02691", @@ -26514,7 +37150,7 @@ }, { "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdTraceRaysNV-None-02859", @@ -26542,15 +37178,23 @@ }, { "vuid": "VUID-vkCmdTraceRaysNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdTraceRaysNV-None-03429", - "text": " Any shader group handle referenced by this call must have been queried from the currently bound ray tracing shader pipeline" + "text": " Any shader group handle referenced by this call must have been queried from the currently bound ray tracing pipeline" }, { - "vuid": "VUID-vkCmdTraceRaysNV-maxRecursionDepth-03430", - "text": " This command must not cause a shader call instruction to be executed from a shader invocation with a recursion depth greater than the value of maxRecursionDepth used to create the bound ray tracing pipeline" + "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-04624", + "text": " commandBuffer must not be a protected command buffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-maxRecursionDepth-03625", + "text": " This command must not cause a trace ray instruction to be executed from a shader invocation with a recursion depth greater than the value of maxRecursionDepth used to create the bound ray tracing pipeline" }, { "vuid": "VUID-vkCmdTraceRaysNV-raygenShaderBindingTableBuffer-04042", @@ -26635,51 +37279,7 @@ { "vuid": "VUID-vkCmdTraceRaysNV-depth-02471", "text": " depth must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2]" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdTraceRaysNV-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdTraceRaysNV-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdTraceRaysNV-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" }, - { - "vuid": "VUID-vkCmdTraceRaysNV-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdTraceRaysNV-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" - }, - { - "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-02712", - "text": " If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource" - }, - { - "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-02713", - "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -26716,13 +37316,77 @@ "vuid": "VUID-vkCmdTraceRaysNV-commonparent", "text": " Each of callableShaderBindingTableBuffer, commandBuffer, hitShaderBindingTableBuffer, missShaderBindingTableBuffer, and raygenShaderBindingTableBuffer that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysNV-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysNV-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysNV-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdTraceRaysNV-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_NV_ray_tracing)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdTraceRaysNV-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } ] }, "vkCmdTraceRaysKHR": { - "core": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ { - "vuid": "VUID-vkCmdTraceRaysKHR-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdTraceRaysKHR-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdTraceRaysKHR-None-02691", @@ -26746,7 +37410,7 @@ }, { "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdTraceRaysKHR-None-02859", @@ -26774,127 +37438,111 @@ }, { "vuid": "VUID-vkCmdTraceRaysKHR-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdTraceRaysKHR-None-03429", - "text": " Any shader group handle referenced by this call must have been queried from the currently bound ray tracing shader pipeline" + "text": " Any shader group handle referenced by this call must have been queried from the currently bound ray tracing pipeline" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-maxRecursionDepth-03430", - "text": " This command must not cause a shader call instruction to be executed from a shader invocation with a recursion depth greater than the value of maxRecursionDepth used to create the bound ray tracing pipeline" + "vuid": "VUID-vkCmdTraceRaysKHR-maxPipelineRayRecursionDepth-03679", + "text": " This command must not cause a shader call instruction to be executed from a shader invocation with a recursion depth greater than the value of maxPipelineRayRecursionDepth used to create the bound ray tracing pipeline" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-pRayGenShaderBindingTable-04019", - "text": " If pRayGenShaderBindingTable->buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + "vuid": "VUID-vkCmdTraceRaysKHR-pRayGenShaderBindingTable-03680", + "text": " If the buffer from which pRayGenShaderBindingTable->deviceAddress was queried is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-offset-04020", - "text": " The offset member of pRayGenShaderBindingTable must be less than the size of the pRayGenShaderBindingTable->buffer" + "vuid": "VUID-vkCmdTraceRaysKHR-pRayGenShaderBindingTable-03681", + "text": " The buffer from which the pRayGenShaderBindingTable->deviceAddress is queried must have been created with the VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR usage flag" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-pRayGenShaderBindingTable-04021", - "text": " pRayGenShaderBindingTable->offset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" - }, - { - "vuid": "VUID-vkCmdTraceRaysKHR-pRayGenShaderBindingTable-04022", - "text": " pRayGenShaderBindingTable->offset + pRayGenShaderBindingTable->size must be less than or equal to the size of pRayGenShaderBindingTable->buffer" + "vuid": "VUID-vkCmdTraceRaysKHR-pRayGenShaderBindingTable-03682", + "text": " pRayGenShaderBindingTable->deviceAddress must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupBaseAlignment" }, { "vuid": "VUID-vkCmdTraceRaysKHR-size-04023", "text": " The size member of pRayGenShaderBindingTable must be equal to its stride member" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-04024", - "text": " If pMissShaderBindingTable->buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + "vuid": "VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-03683", + "text": " If the buffer from which pMissShaderBindingTable->deviceAddress was queried is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-offset-04025", - "text": " The offset member of pMissShaderBindingTable must be less than the size of pMissShaderBindingTable->buffer" + "vuid": "VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-03684", + "text": " The buffer from which the pMissShaderBindingTable->deviceAddress is queried must have been created with the VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR usage flag" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-offset-04026", - "text": " The offset member of pMissShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + "vuid": "VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-03685", + "text": " pMissShaderBindingTable->deviceAddress must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupBaseAlignment" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-04027", - "text": " pMissShaderBindingTable->offset + pMissShaderBindingTable->size must be less than or equal to the size of pMissShaderBindingTable->buffer" - }, - { - "vuid": "VUID-vkCmdTraceRaysKHR-stride-04028", - "text": " The stride member of pMissShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + "vuid": "VUID-vkCmdTraceRaysKHR-stride-03686", + "text": " The stride member of pMissShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupHandleAlignment" }, { "vuid": "VUID-vkCmdTraceRaysKHR-stride-04029", - "text": " The stride member of pMissShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + "text": " The stride member of pMissShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPipelinePropertiesKHR::maxShaderGroupStride" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-04030", - "text": " If pHitShaderBindingTable->buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-03687", + "text": " If the buffer from which pHitShaderBindingTable->deviceAddress was queried is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-offset-04031", - "text": " The offset member of pHitShaderBindingTable must be less than the size of pHitShaderBindingTable->buffer" + "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-03688", + "text": " The buffer from which the pHitShaderBindingTable->deviceAddress is queried must have been created with the VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR usage flag" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-offset-04032", - "text": " The offset member of pHitShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-03689", + "text": " pHitShaderBindingTable->deviceAddress must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupBaseAlignment" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-04033", - "text": " pHitShaderBindingTable->offset + pHitShaderBindingTable->size must be less than or equal to the size of pHitShaderBindingTable->buffer" - }, - { - "vuid": "VUID-vkCmdTraceRaysKHR-stride-04034", - "text": " The stride member of pHitShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + "vuid": "VUID-vkCmdTraceRaysKHR-stride-03690", + "text": " The stride member of pHitShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupHandleAlignment" }, { "vuid": "VUID-vkCmdTraceRaysKHR-stride-04035", - "text": " The stride member of pHitShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + "text": " The stride member of pHitShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPipelinePropertiesKHR::maxShaderGroupStride" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-04036", - "text": " If pCallableShaderBindingTable->buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + "vuid": "VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-03691", + "text": " If the buffer from which pCallableShaderBindingTable->deviceAddress was queried is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-offset-04037", - "text": " The offset member of pCallableShaderBindingTable must be less than the size of pCallableShaderBindingTable->buffer" + "vuid": "VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-03692", + "text": " The buffer from which the pCallableShaderBindingTable->deviceAddress is queried must have been created with the VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR usage flag" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-offset-04038", - "text": " The offset member of pCallableShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + "vuid": "VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-03693", + "text": " pCallableShaderBindingTable->deviceAddress must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupBaseAlignment" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-04039", - "text": " pCallableShaderBindingTable->offset + pCallableShaderBindingTable->size must be less than or equal to the size of pCallableShaderBindingTable->buffer" - }, - { - "vuid": "VUID-vkCmdTraceRaysKHR-stride-04040", - "text": " The stride member of pCallableShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + "vuid": "VUID-vkCmdTraceRaysKHR-stride-03694", + "text": " The stride member of pCallableShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupHandleAlignment" }, { "vuid": "VUID-vkCmdTraceRaysKHR-stride-04041", - "text": " The stride member of pCallableShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + "text": " The stride member of pCallableShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPipelinePropertiesKHR::maxShaderGroupStride" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-flags-03508", - "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, the buffer member of pHitShaderBindingTable must not be VK_NULL_HANDLE" + "vuid": "VUID-vkCmdTraceRaysKHR-flags-03696", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, the deviceAddress member of pHitShaderBindingTable must not be zero" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-flags-03509", - "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, the buffer member of pHitShaderBindingTable must not be VK_NULL_HANDLE" - }, - { - "vuid": "VUID-vkCmdTraceRaysKHR-flags-03510", - "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, the buffer member of pHitShaderBindingTable must not be VK_NULL_HANDLE" + "vuid": "VUID-vkCmdTraceRaysKHR-flags-03697", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, the deviceAddress member of pHitShaderBindingTable must not be zero" }, { "vuid": "VUID-vkCmdTraceRaysKHR-flags-03511", - "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, the shader group handle identified by pMissShaderBindingTable must contain a valid miss shader" + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, the shader group handle identified by pMissShaderBindingTable must not be set to zero" }, { "vuid": "VUID-vkCmdTraceRaysKHR-flags-03512", - "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, entries in pHitShaderBindingTable accessed as a result of this command in order to execute an any hit shader must not be set to zero" + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, entries in pHitShaderBindingTable accessed as a result of this command in order to execute an any-hit shader must not be set to zero" }, { "vuid": "VUID-vkCmdTraceRaysKHR-flags-03513", @@ -26905,80 +37553,52 @@ "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, entries in pHitShaderBindingTable accessed as a result of this command in order to execute an intersection shader must not be set to zero" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-width-03505", - "text": " width must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0]" + "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-04735", + "text": " Any non-zero hit shader group entries in pHitShaderBindingTable accessed by this call from a geometry with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR must have been created with VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-height-03506", - "text": " height must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1]" + "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-04736", + "text": " Any non-zero hit shader group entries in pHitShaderBindingTable accessed by this call from a geometry with a geometryType of VK_GEOMETRY_TYPE_AABBS_KHR must have been created with VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-depth-03507", - "text": " depth must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2]" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdTraceRaysKHR-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdTraceRaysKHR-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdTraceRaysKHR-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-04625", + "text": " commandBuffer must not be a protected command buffer" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdTraceRaysKHR-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + "vuid": "VUID-vkCmdTraceRaysKHR-width-03626", + "text": " width must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] {times} VkPhysicalDeviceLimits::maxComputeWorkGroupSize[0]" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-02712", - "text": " If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource" + "vuid": "VUID-vkCmdTraceRaysKHR-height-03627", + "text": " height must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] {times} VkPhysicalDeviceLimits::maxComputeWorkGroupSize[1]" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-02713", - "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + "vuid": "VUID-vkCmdTraceRaysKHR-depth-03628", + "text": " depth must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] {times} VkPhysicalDeviceLimits::maxComputeWorkGroupSize[2]" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-width-03629", + "text": " width {times} height {times} depth must be less than or equal to VkPhysicalDeviceRayTracingPipelinePropertiesKHR::maxRayDispatchInvocationCount" + }, { "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { "vuid": "VUID-vkCmdTraceRaysKHR-pRaygenShaderBindingTable-parameter", - "text": " pRaygenShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + "text": " pRaygenShaderBindingTable must be a valid pointer to a valid VkStridedDeviceAddressRegionKHR structure" }, { "vuid": "VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-parameter", - "text": " pMissShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + "text": " pMissShaderBindingTable must be a valid pointer to a valid VkStridedDeviceAddressRegionKHR structure" }, { "vuid": "VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-parameter", - "text": " pHitShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + "text": " pHitShaderBindingTable must be a valid pointer to a valid VkStridedDeviceAddressRegionKHR structure" }, { "vuid": "VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-parameter", - "text": " pCallableShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + "text": " pCallableShaderBindingTable must be a valid pointer to a valid VkStridedDeviceAddressRegionKHR structure" }, { "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-recording", @@ -26992,29 +37612,89 @@ "vuid": "VUID-vkCmdTraceRaysKHR-renderpass", "text": " This command must only be called outside of a render pass instance" } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } ] }, - "VkStridedBufferRegionKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + "VkStridedDeviceAddressRegionKHR": { + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ { - "vuid": "VUID-VkStridedBufferRegionKHR-buffer-03515", - "text": " If buffer is not VK_NULL_HANDLE, size plus offset must be less than or equal to the size of buffer" + "vuid": "VUID-VkStridedDeviceAddressRegionKHR-size-04631", + "text": " If size is not zero, all addresses between deviceAddress and deviceAddress + size - 1 must be in the buffer device address range of the same buffer" }, { - "vuid": "VUID-VkStridedBufferRegionKHR-buffer-03516", - "text": " If buffer is not VK_NULL_HANDLE, stride must be less than the size of buffer" - }, - { - "vuid": "VUID-VkStridedBufferRegionKHR-buffer-parameter", - "text": " If buffer is not VK_NULL_HANDLE, buffer must be a valid VkBuffer handle" + "vuid": "VUID-VkStridedDeviceAddressRegionKHR-size-04632", + "text": " If size is not zero, stride must be less than or equal to the size of the buffer from which deviceAddress was queried" } ] }, "vkCmdTraceRaysIndirectKHR": { - "core": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02690", - "text": " If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02691", @@ -27038,7 +37718,7 @@ }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-02701", - "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer, and done so after any previously bound pipeline with the corresponding state not specified as dynamic" + "text": " If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02859", @@ -27066,127 +37746,111 @@ }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format." }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-03429", - "text": " Any shader group handle referenced by this call must have been queried from the currently bound ray tracing shader pipeline" + "text": " Any shader group handle referenced by this call must have been queried from the currently bound ray tracing pipeline" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-maxRecursionDepth-03430", - "text": " This command must not cause a shader call instruction to be executed from a shader invocation with a recursion depth greater than the value of maxRecursionDepth used to create the bound ray tracing pipeline" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-maxPipelineRayRecursionDepth-03679", + "text": " This command must not cause a shader call instruction to be executed from a shader invocation with a recursion depth greater than the value of maxPipelineRayRecursionDepth used to create the bound ray tracing pipeline" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pRayGenShaderBindingTable-04019", - "text": " If pRayGenShaderBindingTable->buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pRayGenShaderBindingTable-03680", + "text": " If the buffer from which pRayGenShaderBindingTable->deviceAddress was queried is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-04020", - "text": " The offset member of pRayGenShaderBindingTable must be less than the size of the pRayGenShaderBindingTable->buffer" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pRayGenShaderBindingTable-03681", + "text": " The buffer from which the pRayGenShaderBindingTable->deviceAddress is queried must have been created with the VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR usage flag" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pRayGenShaderBindingTable-04021", - "text": " pRayGenShaderBindingTable->offset must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" - }, - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pRayGenShaderBindingTable-04022", - "text": " pRayGenShaderBindingTable->offset + pRayGenShaderBindingTable->size must be less than or equal to the size of pRayGenShaderBindingTable->buffer" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pRayGenShaderBindingTable-03682", + "text": " pRayGenShaderBindingTable->deviceAddress must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupBaseAlignment" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-size-04023", "text": " The size member of pRayGenShaderBindingTable must be equal to its stride member" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-04024", - "text": " If pMissShaderBindingTable->buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-03683", + "text": " If the buffer from which pMissShaderBindingTable->deviceAddress was queried is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-04025", - "text": " The offset member of pMissShaderBindingTable must be less than the size of pMissShaderBindingTable->buffer" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-03684", + "text": " The buffer from which the pMissShaderBindingTable->deviceAddress is queried must have been created with the VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR usage flag" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-04026", - "text": " The offset member of pMissShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-03685", + "text": " pMissShaderBindingTable->deviceAddress must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupBaseAlignment" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-04027", - "text": " pMissShaderBindingTable->offset + pMissShaderBindingTable->size must be less than or equal to the size of pMissShaderBindingTable->buffer" - }, - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-stride-04028", - "text": " The stride member of pMissShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-stride-03686", + "text": " The stride member of pMissShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupHandleAlignment" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-stride-04029", - "text": " The stride member of pMissShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + "text": " The stride member of pMissShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPipelinePropertiesKHR::maxShaderGroupStride" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-04030", - "text": " If pHitShaderBindingTable->buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-03687", + "text": " If the buffer from which pHitShaderBindingTable->deviceAddress was queried is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-04031", - "text": " The offset member of pHitShaderBindingTable must be less than the size of pHitShaderBindingTable->buffer" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-03688", + "text": " The buffer from which the pHitShaderBindingTable->deviceAddress is queried must have been created with the VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR usage flag" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-04032", - "text": " The offset member of pHitShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-03689", + "text": " pHitShaderBindingTable->deviceAddress must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupBaseAlignment" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-04033", - "text": " pHitShaderBindingTable->offset + pHitShaderBindingTable->size must be less than or equal to the size of pHitShaderBindingTable->buffer" - }, - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-stride-04034", - "text": " The stride member of pHitShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-stride-03690", + "text": " The stride member of pHitShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupHandleAlignment" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-stride-04035", - "text": " The stride member of pHitShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + "text": " The stride member of pHitShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPipelinePropertiesKHR::maxShaderGroupStride" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-04036", - "text": " If pCallableShaderBindingTable->buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-03691", + "text": " If the buffer from which pCallableShaderBindingTable->deviceAddress was queried is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-04037", - "text": " The offset member of pCallableShaderBindingTable must be less than the size of pCallableShaderBindingTable->buffer" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-03692", + "text": " The buffer from which the pCallableShaderBindingTable->deviceAddress is queried must have been created with the VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR usage flag" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-04038", - "text": " The offset member of pCallableShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupBaseAlignment" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-03693", + "text": " pCallableShaderBindingTable->deviceAddress must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupBaseAlignment" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-04039", - "text": " pCallableShaderBindingTable->offset + pCallableShaderBindingTable->size must be less than or equal to the size of pCallableShaderBindingTable->buffer" - }, - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-stride-04040", - "text": " The stride member of pCallableShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPropertiesKHR::shaderGroupHandleSize" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-stride-03694", + "text": " The stride member of pCallableShaderBindingTable must be a multiple of VkPhysicalDeviceRayTracingPipelinePropertiesKHR::shaderGroupHandleAlignment" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-stride-04041", - "text": " The stride member of pCallableShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPropertiesKHR::maxShaderGroupStride" + "text": " The stride member of pCallableShaderBindingTable must be less than or equal to VkPhysicalDeviceRayTracingPipelinePropertiesKHR::maxShaderGroupStride" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03508", - "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, the buffer member of pHitShaderBindingTable must not be VK_NULL_HANDLE" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03696", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, the deviceAddress member of pHitShaderBindingTable must not be zero" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03509", - "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, the buffer member of pHitShaderBindingTable must not be VK_NULL_HANDLE" - }, - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03510", - "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, the buffer member of pHitShaderBindingTable must not be VK_NULL_HANDLE" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03697", + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, the deviceAddress member of pHitShaderBindingTable must not be zero" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03511", - "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, the shader group handle identified by pMissShaderBindingTable must contain a valid miss shader" + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, the shader group handle identified by pMissShaderBindingTable must not be set to zero" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03512", - "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, entries in pHitShaderBindingTable accessed as a result of this command in order to execute an any hit shader must not be set to zero" + "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, entries in pHitShaderBindingTable accessed as a result of this command in order to execute an any-hit shader must not be set to zero" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-03513", @@ -27197,88 +37861,56 @@ "text": " If the currently bound ray tracing pipeline was created with flags that included VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, entries in pHitShaderBindingTable accessed as a result of this command in order to execute an intersection shader must not be set to zero" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-buffer-02708", - "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-04735", + "text": " Any non-zero hit shader group entries in pHitShaderBindingTable accessed by this call from a geometry with a geometryType of VK_GEOMETRY_TYPE_TRIANGLES_KHR must have been created with VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-buffer-02709", - "text": " buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-04736", + "text": " Any non-zero hit shader group entries in pHitShaderBindingTable accessed by this call from a geometry with a geometryType of VK_GEOMETRY_TYPE_AABBS_KHR must have been created with VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-02710", - "text": " offset must be a multiple of 4" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-indirectDeviceAddress-03632", + "text": " If the buffer from which indirectDeviceAddress was queried is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-offset-03517", - "text": " (offset + sizeof(VkTraceRaysIndirectCommandKHR)) must be less than or equal to the size of buffer" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-indirectDeviceAddress-03633", + "text": " The buffer from which indirectDeviceAddress was queried must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-rayTracingIndirectTraceRays-03518", - "text": " the VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingIndirectTraceRays feature must be enabled" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02692", - "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02693", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" - } - ], - "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-filterCubic-02694", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-indirectDeviceAddress-03634", + "text": " indirectDeviceAddress must be a multiple of 4" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-filterCubicMinmax-02695", - "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" - } - ], - "(VK_NV_corner_sampled_image)": [ - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-02696", - "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" - } - ], - "(VK_VERSION_1_1)": [ - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-02707", - "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" - }, - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-02711", + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-03635", "text": " commandBuffer must not be a protected command buffer" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-indirectDeviceAddress-03636", + "text": " All device addresses between indirectDeviceAddress and indirectDeviceAddress + sizeof(VkTraceRaysIndirectCommandKHR) - 1 must be in the buffer device address range of the same buffer" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-rayTracingPipelineTraceRaysIndirect-03637", + "text": " The VkPhysicalDeviceRayTracingPipelineFeaturesKHR::rayTracingPipelineTraceRaysIndirect feature must be enabled" + }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pRaygenShaderBindingTable-parameter", - "text": " pRaygenShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + "text": " pRaygenShaderBindingTable must be a valid pointer to a valid VkStridedDeviceAddressRegionKHR structure" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-parameter", - "text": " pMissShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + "text": " pMissShaderBindingTable must be a valid pointer to a valid VkStridedDeviceAddressRegionKHR structure" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-parameter", - "text": " pHitShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" + "text": " pHitShaderBindingTable must be a valid pointer to a valid VkStridedDeviceAddressRegionKHR structure" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-parameter", - "text": " pCallableShaderBindingTable must be a valid pointer to a valid VkStridedBufferRegionKHR structure" - }, - { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-buffer-parameter", - "text": " buffer must be a valid VkBuffer handle" + "text": " pCallableShaderBindingTable must be a valid pointer to a valid VkStridedDeviceAddressRegionKHR structure" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-recording", @@ -27291,1068 +37923,1406 @@ { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-renderpass", "text": " This command must only be called outside of a render pass instance" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02693", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commonparent", - "text": " Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_NV_corner_sampled_image)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" + } + ], + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)+(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." } ] }, "VkTraceRaysIndirectCommandKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + "(VK_NV_ray_tracing,VK_KHR_ray_tracing_pipeline)+(VK_KHR_ray_tracing_pipeline)": [ { - "vuid": "VUID-VkTraceRaysIndirectCommandKHR-width-03519", - "text": " width must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0]" + "vuid": "VUID-VkTraceRaysIndirectCommandKHR-width-03638", + "text": " width must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] {times} VkPhysicalDeviceLimits::maxComputeWorkGroupSize[0]" }, { - "vuid": "VUID-VkTraceRaysIndirectCommandKHR-height-03520", - "text": " height must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1]" + "vuid": "VUID-VkTraceRaysIndirectCommandKHR-height-03639", + "text": " height must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] {times} VkPhysicalDeviceLimits::maxComputeWorkGroupSize[1]" }, { - "vuid": "VUID-VkTraceRaysIndirectCommandKHR-depth-03521", - "text": " depth must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2]" + "vuid": "VUID-VkTraceRaysIndirectCommandKHR-depth-03640", + "text": " depth must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] {times} VkPhysicalDeviceLimits::maxComputeWorkGroupSize[2]" + }, + { + "vuid": "VUID-VkTraceRaysIndirectCommandKHR-width-03641", + "text": " width {times} height {times} depth must be less than or equal to VkPhysicalDeviceRayTracingPipelinePropertiesKHR::maxRayDispatchInvocationCount" } ] }, - "vkCmdBuildAccelerationStructureNV": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ + "VkVideoQueueFamilyProperties2KHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241", - "text": " geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount" + "vuid": "VUID-VkVideoQueueFamilyProperties2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-dst-02488", - "text": " dst must have been created with compatible VkAccelerationStructureInfoNV where VkAccelerationStructureInfoNV::type and VkAccelerationStructureInfoNV::flags are identical, VkAccelerationStructureInfoNV::instanceCount and VkAccelerationStructureInfoNV::geometryCount for dst are greater than or equal to the build size and each geometry in VkAccelerationStructureInfoNV::pGeometries for dst has greater than or equal to the number of vertices, indices, and AABBs" + "vuid": "VUID-VkVideoQueueFamilyProperties2KHR-videoCodecOperations-parameter", + "text": " videoCodecOperations must be a valid combination of VkVideoCodecOperationFlagBitsKHR values" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02489", - "text": " If update is VK_TRUE, src must not be VK_NULL_HANDLE" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02490", - "text": " If update is VK_TRUE, src must have been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV set in VkAccelerationStructureInfoNV::flags" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02491", - "text": " If update is VK_FALSE, the size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and VkAccelerationStructureMemoryRequirementsInfoNV::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV must be less than or equal to the size of scratch minus scratchOffset" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02492", - "text": " If update is VK_TRUE, the size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and VkAccelerationStructureMemoryRequirementsInfoNV::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV must be less than or equal to the size of scratch minus scratchOffset" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-scratch-03522", - "text": " scratch must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-instanceData-03523", - "text": " If instanceData is not VK_NULL_HANDLE, instanceData must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-03524", - "text": " If update is VK_TRUE, then objects that were previously active must not be made inactive as per Inactive Primitives and Instances" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-03525", - "text": " If update is VK_TRUE, then objects that were previously inactive must not be made active as per Inactive Primitives and Instances" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-03526", - "text": " If update is VK_TRUE, the src and dst objects must either be the same object or not have any memory aliasing" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureInfoNV structure" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter", - "text": " If instanceData is not VK_NULL_HANDLE, instanceData must be a valid VkBuffer handle" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-dst-parameter", - "text": " dst must be a valid VkAccelerationStructureKHR handle" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-src-parameter", - "text": " If src is not VK_NULL_HANDLE, src must be a valid VkAccelerationStructureKHR handle" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter", - "text": " scratch must be a valid VkBuffer handle" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording", - "text": " commandBuffer must be in the recording state" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-renderpass", - "text": " This command must only be called outside of a render pass instance" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commonparent", - "text": " Each of commandBuffer, dst, instanceData, scratch, and src that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + "vuid": "VUID-VkVideoQueueFamilyProperties2KHR-videoCodecOperations-requiredbitmask", + "text": " videoCodecOperations must not be 0" } ] }, - "vkCmdBuildAccelerationStructureKHR": { - "core": [ + "VkVideoProfileKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pOffsetInfos-03402", - "text": " Each element of ppOffsetInfos[i] must be a valid pointer to an array of pInfos[i].geometryCount VkAccelerationStructureBuildOffsetInfoKHR structures" + "vuid": "VUID-VkVideoProfileKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03403", - "text": " Each pInfos[i].srcAccelerationStructure must not refer to the same acceleration structure as any pInfos[i].dstAccelerationStructure that is provided to the same build command unless it is identical for an update" + "vuid": "VUID-VkVideoProfileKHR-videoCodecOperation-parameter", + "text": " videoCodecOperation must be a valid VkVideoCodecOperationFlagBitsKHR value" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03404", - "text": " For each pInfos[i], dstAccelerationStructure must have been created with compatible VkAccelerationStructureCreateInfoKHR where VkAccelerationStructureCreateInfoKHR::type and VkAccelerationStructureCreateInfoKHR::flags are identical to VkAccelerationStructureBuildGeometryInfoKHR::type and VkAccelerationStructureBuildGeometryInfoKHR::flags respectively, VkAccelerationStructureBuildGeometryInfoKHR::geometryCount for dstAccelerationStructure are greater than or equal to the build size, and each geometry in VkAccelerationStructureBuildGeometryInfoKHR::ppGeometries for dstAccelerationStructure has greater than or equal to the number of vertices, indices, and AABBs, VkAccelerationStructureGeometryTrianglesDataKHR::transformData is both 0 or both non-zero, and all other parameters are the same" + "vuid": "VUID-VkVideoProfileKHR-chromaSubsampling-parameter", + "text": " chromaSubsampling must be a valid combination of VkVideoChromaSubsamplingFlagBitsKHR values" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03405", - "text": " For each pInfos[i], if update is VK_TRUE, then objects that were previously active for that acceleration structure must not be made inactive as per Inactive Primitives and Instances" + "vuid": "VUID-VkVideoProfileKHR-chromaSubsampling-requiredbitmask", + "text": " chromaSubsampling must not be 0" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03406", - "text": " For each pInfos[i], if update is VK_TRUE, then objects that were previously inactive for that acceleration structure must not be made active as per Inactive Primitives and Instances" + "vuid": "VUID-VkVideoProfileKHR-lumaBitDepth-parameter", + "text": " lumaBitDepth must be a valid combination of VkVideoComponentBitDepthFlagBitsKHR values" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-None-03407", - "text": " Any acceleration structure instance in any top level build in this command must not reference any bottom level acceleration structure built by this command" + "vuid": "VUID-VkVideoProfileKHR-lumaBitDepth-requiredbitmask", + "text": " lumaBitDepth must not be 0" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03408", - "text": " There must not be any memory aliasing between the scratch memories that are provided in all the pInfos[i].scratchData memories for the acceleration structure builds" + "vuid": "VUID-VkVideoProfileKHR-chromaBitDepth-parameter", + "text": " chromaBitDepth must be a valid combination of VkVideoComponentBitDepthFlagBitsKHR values" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-None-03409", - "text": " There must not be any memory aliasing between memory bound to any top level, bottom level, or instance acceleration structure accessed by this command" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-update-03527", - "text": " If update is VK_FALSE, all addresses between pInfos[i].scratchData and pInfos[i].scratchData + N - 1 must be in the buffer device address range of the same buffer, where N is given by the size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsKHR with VkAccelerationStructureMemoryRequirementsInfoKHR::accelerationStructure set to pInfos[i].dstAccelerationStructure and VkAccelerationStructureMemoryRequirementsInfoKHR::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-update-03528", - "text": " If update is VK_TRUE, all addresses between pInfos[i].scratchData and pInfos[i].scratchData + N - 1 must be in the buffer device address range of the same buffer, where N is given by the size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsKHR with VkAccelerationStructureMemoryRequirementsInfoKHR::accelerationStructure set to pInfos[i].dstAccelerationStructure and VkAccelerationStructureMemoryRequirementsInfoKHR::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-03529", - "text": " The buffer from which the buffer device address pInfos[i].scratchData is queried must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR usage flag" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-None-04046", - "text": " All VkDeviceOrHostAddressKHR or VkDeviceOrHostAddressConstKHR referenced by this command must contain valid device addresses for a buffer bound to device memory. If the buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-None-03531", - "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to device memory" - } - ], - "(VK_KHR_deferred_host_operations)": [ - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pNext-03532", - "text": " The VkDeferredOperationInfoKHR structure must not be included in the pNext chain of any of the provided VkAccelerationStructureBuildGeometryInfoKHR structures" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-pInfos-parameter", - "text": " pInfos must be a valid pointer to an array of infoCount valid VkAccelerationStructureBuildGeometryInfoKHR structures" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-ppOffsetInfos-parameter", - "text": " ppOffsetInfos must be a valid pointer to an array of infoCount VkAccelerationStructureBuildOffsetInfoKHR structures" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-commandBuffer-recording", - "text": " commandBuffer must be in the recording state" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-renderpass", - "text": " This command must only be called outside of a render pass instance" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureKHR-infoCount-arraylength", - "text": " infoCount must be greater than 0" + "vuid": "VUID-VkVideoProfileKHR-chromaBitDepth-requiredbitmask", + "text": " chromaBitDepth must not be 0" } ] }, - "vkCmdBuildAccelerationStructureIndirectKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + "vkGetPhysicalDeviceVideoCapabilitiesKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-None-04047", - "text": " All VkDeviceOrHostAddressKHR or VkDeviceOrHostAddressConstKHR referenced by this command must contain valid device addresses for a buffer bound to device memory. If the buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + "vuid": "VUID-vkGetPhysicalDeviceVideoCapabilitiesKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-None-03534", - "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to device memory" + "vuid": "VUID-vkGetPhysicalDeviceVideoCapabilitiesKHR-pVideoProfile-parameter", + "text": " pVideoProfile must be a valid pointer to a valid VkVideoProfileKHR structure" }, { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-rayTracingIndirectAccelerationStructureBuild-03535", - "text": " The VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingIndirectAccelerationStructureBuild feature must be enabled" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkAccelerationStructureBuildGeometryInfoKHR structure" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-indirectBuffer-parameter", - "text": " indirectBuffer must be a valid VkBuffer handle" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-commandBuffer-recording", - "text": " commandBuffer must be in the recording state" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-renderpass", - "text": " This command must only be called outside of a render pass instance" - }, - { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-commonparent", - "text": " Both of commandBuffer, and indirectBuffer must have been created, allocated, or retrieved from the same VkDevice" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_KHR_deferred_host_operations)": [ - { - "vuid": "VUID-vkCmdBuildAccelerationStructureIndirectKHR-pNext-03536", - "text": " The VkDeferredOperationInfoKHR structure must not be included in the pNext chain of any of the provided VkAccelerationStructureBuildGeometryInfoKHR structures" + "vuid": "VUID-vkGetPhysicalDeviceVideoCapabilitiesKHR-pCapabilities-parameter", + "text": " pCapabilities must be a valid pointer to a VkVideoCapabilitiesKHR structure" } ] }, - "VkAccelerationStructureBuildGeometryInfoKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + "VkVideoCapabilitiesKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-update-03537", - "text": " If update is VK_TRUE, srcAccelerationStructure must not be VK_NULL_HANDLE" + "vuid": "VUID-VkVideoCapabilitiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR" }, { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-update-03538", - "text": " If update is VK_TRUE, srcAccelerationStructure must have been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR set in VkAccelerationStructureBuildGeometryInfoKHR::flags" + "vuid": "VUID-VkVideoCapabilitiesKHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkVideoDecodeH264CapabilitiesEXT, VkVideoDecodeH265CapabilitiesEXT, or VkVideoEncodeH264CapabilitiesEXT" }, { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-scratchData-03539", - "text": " scratchData must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR usage flag" + "vuid": "VUID-VkVideoCapabilitiesKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + } + ] + }, + "vkGetPhysicalDeviceVideoFormatPropertiesKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceVideoFormatPropertiesKHR-imageUsage-04844", + "text": " The imageUsage enum of VkPhysicalDeviceVideoFormatInfoKHR must contain at least one of the following video image usage bit(s): VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, or VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR." }, { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-update-03540", - "text": " If update is VK_TRUE, the srcAccelerationStructure and dstAccelerationStructure objects must either be the same object or not have any memory aliasing" + "vuid": "VUID-vkGetPhysicalDeviceVideoFormatPropertiesKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" }, { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR" + "vuid": "VUID-vkGetPhysicalDeviceVideoFormatPropertiesKHR-pVideoFormatInfo-parameter", + "text": " pVideoFormatInfo must be a valid pointer to a valid VkPhysicalDeviceVideoFormatInfoKHR structure" }, { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkDeferredOperationInfoKHR" + "vuid": "VUID-vkGetPhysicalDeviceVideoFormatPropertiesKHR-pVideoFormatPropertyCount-parameter", + "text": " pVideoFormatPropertyCount must be a valid pointer to a uint32_t value" }, { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-sType-unique", + "vuid": "VUID-vkGetPhysicalDeviceVideoFormatPropertiesKHR-pVideoFormatProperties-parameter", + "text": " If the value referenced by pVideoFormatPropertyCount is not 0, and pVideoFormatProperties is not NULL, pVideoFormatProperties must be a valid pointer to an array of pVideoFormatPropertyCount VkVideoFormatPropertiesKHR structures" + } + ] + }, + "VkPhysicalDeviceVideoFormatInfoKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-VkPhysicalDeviceVideoFormatInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR" + }, + { + "vuid": "VUID-VkPhysicalDeviceVideoFormatInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "VkVideoProfilesKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-VkVideoProfilesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR" + }, + { + "vuid": "VUID-VkVideoProfilesKHR-pProfiles-parameter", + "text": " pProfiles must be a valid pointer to a valid VkVideoProfileKHR structure" + } + ] + }, + "VkVideoFormatPropertiesKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-VkVideoFormatPropertiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR" + }, + { + "vuid": "VUID-VkVideoFormatPropertiesKHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkCreateVideoSessionKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-vkCreateVideoSessionKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateVideoSessionKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkVideoSessionCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateVideoSessionKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateVideoSessionKHR-pVideoSession-parameter", + "text": " pVideoSession must be a valid pointer to a VkVideoSessionKHR handle" + } + ] + }, + "VkVideoSessionCreateInfoKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-pVideoProfile-04845", + "text": " pVideoProfile must be a pointer to a valid VkVideoProfileKHR structure whose pNext chain must include a valid codec-specific profile structure." + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-maxReferencePicturesSlotsCount-04846", + "text": " If Reference Pictures are required for use with the created video session, the maxReferencePicturesSlotsCount must be set to a value bigger than 0." + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-maxReferencePicturesSlotsCount-04847", + "text": " maxReferencePicturesSlotsCount cannot exceed the implementation reported VkVideoCapabilitiesKHR::maxReferencePicturesSlotsCount." + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-maxReferencePicturesActiveCount-04848", + "text": " If Reference Pictures are required for use with the created video session, the maxReferencePicturesActiveCount must be set to a value bigger than 0." + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-maxReferencePicturesActiveCount-04849", + "text": " maxReferencePicturesActiveCount cannot exceed the implementation reported VkVideoCapabilitiesKHR::maxReferencePicturesActiveCount." + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-maxReferencePicturesActiveCount-04850", + "text": " maxReferencePicturesActiveCount cannot exceed the maxReferencePicturesSlotsCount." + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-maxCodedExtent-04851", + "text": " maxCodedExtent cannot be smaller than VkVideoCapabilitiesKHR::minExtent and bigger than VkVideoCapabilitiesKHR::maxExtent." + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-referencePicturesFormat-04852", + "text": " referencePicturesFormat must be one of the supported formats in VkVideoFormatPropertiesKHR format returned by the vkGetPhysicalDeviceVideoFormatPropertiesKHR when the VkPhysicalDeviceVideoFormatInfoKHR imageUsage contains VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR or VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR depending on the session codec operation." + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-pictureFormat-04853", + "text": " pictureFormat for decode output must be one of the supported formats in VkVideoFormatPropertiesKHR format returned by the vkGetPhysicalDeviceVideoFormatPropertiesKHR when the VkPhysicalDeviceVideoFormatInfoKHR imageUsage contains VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR." + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-pictureFormat-04854", + "text": " pictureFormat targeting encode operations must be one of the supported formats in VkVideoFormatPropertiesKHR format returned by the vkGetPhysicalDeviceVideoFormatPropertiesKHR when the VkPhysicalDeviceVideoFormatInfoKHR imageUsage contains VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR." + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkVideoDecodeH264SessionCreateInfoEXT, VkVideoDecodeH265SessionCreateInfoEXT, or VkVideoEncodeH264SessionCreateInfoEXT" + }, + { + "vuid": "VUID-VkVideoSessionCreateInfoKHR-sType-unique", "text": " The sType value of each struct in the pNext chain must be unique" }, { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-parameter", - "text": " type must be a valid VkAccelerationStructureTypeKHR value" + "vuid": "VUID-VkVideoSessionCreateInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkVideoSessionCreateFlagBitsKHR values" }, { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-flags-parameter", - "text": " flags must be a valid combination of VkBuildAccelerationStructureFlagBitsKHR values" + "vuid": "VUID-VkVideoSessionCreateInfoKHR-pVideoProfile-parameter", + "text": " pVideoProfile must be a valid pointer to a valid VkVideoProfileKHR structure" }, { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-srcAccelerationStructure-parameter", - "text": " If srcAccelerationStructure is not VK_NULL_HANDLE, srcAccelerationStructure must be a valid VkAccelerationStructureKHR handle" + "vuid": "VUID-VkVideoSessionCreateInfoKHR-pictureFormat-parameter", + "text": " pictureFormat must be a valid VkFormat value" }, { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-dstAccelerationStructure-parameter", - "text": " dstAccelerationStructure must be a valid VkAccelerationStructureKHR handle" - }, - { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-scratchData-parameter", - "text": " scratchData must be a valid VkDeviceOrHostAddressKHR union" - }, - { - "vuid": "VUID-VkAccelerationStructureBuildGeometryInfoKHR-commonparent", - "text": " Both of dstAccelerationStructure, and srcAccelerationStructure that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + "vuid": "VUID-VkVideoSessionCreateInfoKHR-referencePicturesFormat-parameter", + "text": " referencePicturesFormat must be a valid VkFormat value" } ] }, - "VkAccelerationStructureGeometryKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + "vkDestroyVideoSessionKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-VkAccelerationStructureGeometryKHR-geometryType-03541", - "text": " If geometryType is VK_GEOMETRY_TYPE_AABBS_KHR, the aabbs member of geometry must be a valid VkAccelerationStructureGeometryAabbsDataKHR structure" + "vuid": "VUID-vkDestroyVideoSessionKHR-device-parameter", + "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-VkAccelerationStructureGeometryKHR-geometryType-03542", - "text": " If geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, the triangles member of geometry must be a valid VkAccelerationStructureGeometryTrianglesDataKHR structure" + "vuid": "VUID-vkDestroyVideoSessionKHR-videoSession-parameter", + "text": " videoSession must be a valid VkVideoSessionKHR handle" }, { - "vuid": "VUID-VkAccelerationStructureGeometryKHR-geometryType-03543", - "text": " If geometryType is VK_GEOMETRY_TYPE_INSTANCES_KHR, the instances member of geometry must be a valid VkAccelerationStructureGeometryInstancesDataKHR structure" + "vuid": "VUID-vkDestroyVideoSessionKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" }, { - "vuid": "VUID-VkAccelerationStructureGeometryKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR" + "vuid": "VUID-vkDestroyVideoSessionKHR-videoSession-parent", + "text": " videoSession must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetVideoSessionMemoryRequirementsKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-vkGetVideoSessionMemoryRequirementsKHR-device-parameter", + "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-VkAccelerationStructureGeometryKHR-pNext-pNext", + "vuid": "VUID-vkGetVideoSessionMemoryRequirementsKHR-videoSession-parameter", + "text": " videoSession must be a valid VkVideoSessionKHR handle" + }, + { + "vuid": "VUID-vkGetVideoSessionMemoryRequirementsKHR-pVideoSessionMemoryRequirementsCount-parameter", + "text": " pVideoSessionMemoryRequirementsCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetVideoSessionMemoryRequirementsKHR-pVideoSessionMemoryRequirements-parameter", + "text": " If pVideoSessionMemoryRequirements is not NULL, pVideoSessionMemoryRequirements must be a valid pointer to an array of pVideoSessionMemoryRequirementsCount VkVideoGetMemoryPropertiesKHR structures" + }, + { + "vuid": "VUID-vkGetVideoSessionMemoryRequirementsKHR-pVideoSessionMemoryRequirementsCount-arraylength", + "text": " If pVideoSessionMemoryRequirements is not NULL, the value referenced by pVideoSessionMemoryRequirementsCount must be greater than 0" + }, + { + "vuid": "VUID-vkGetVideoSessionMemoryRequirementsKHR-videoSession-parent", + "text": " videoSession must have been created, allocated, or retrieved from device" + } + ] + }, + "VkVideoGetMemoryPropertiesKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-VkVideoGetMemoryPropertiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR" + }, + { + "vuid": "VUID-VkVideoGetMemoryPropertiesKHR-pNext-pNext", "text": " pNext must be NULL" }, { - "vuid": "VUID-VkAccelerationStructureGeometryKHR-geometryType-parameter", - "text": " geometryType must be a valid VkGeometryTypeKHR value" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryKHR-triangles-parameter", - "text": " If geometryType is VK_GEOMETRY_TYPE_TRIANGLES_KHR, the triangles member of geometry must be a valid VkAccelerationStructureGeometryTrianglesDataKHR structure" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryKHR-aabbs-parameter", - "text": " If geometryType is VK_GEOMETRY_TYPE_AABBS_KHR, the aabbs member of geometry must be a valid VkAccelerationStructureGeometryAabbsDataKHR structure" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryKHR-instances-parameter", - "text": " If geometryType is VK_GEOMETRY_TYPE_INSTANCES_KHR, the instances member of geometry must be a valid VkAccelerationStructureGeometryInstancesDataKHR structure" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryKHR-flags-parameter", - "text": " flags must be a valid combination of VkGeometryFlagBitsKHR values" + "vuid": "VUID-VkVideoGetMemoryPropertiesKHR-pMemoryRequirements-parameter", + "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2 structure" } ] }, - "VkAccelerationStructureGeometryTrianglesDataKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + "vkBindVideoSessionMemoryKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR" + "vuid": "VUID-vkBindVideoSessionMemoryKHR-device-parameter", + "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-pNext-pNext", + "vuid": "VUID-vkBindVideoSessionMemoryKHR-videoSession-parameter", + "text": " videoSession must be a valid VkVideoSessionKHR handle" + }, + { + "vuid": "VUID-vkBindVideoSessionMemoryKHR-pVideoSessionBindMemories-parameter", + "text": " pVideoSessionBindMemories must be a valid pointer to an array of videoSessionBindMemoryCount valid VkVideoBindMemoryKHR structures" + }, + { + "vuid": "VUID-vkBindVideoSessionMemoryKHR-videoSessionBindMemoryCount-arraylength", + "text": " videoSessionBindMemoryCount must be greater than 0" + }, + { + "vuid": "VUID-vkBindVideoSessionMemoryKHR-videoSession-parent", + "text": " videoSession must have been created, allocated, or retrieved from device" + } + ] + }, + "VkVideoBindMemoryKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-VkVideoBindMemoryKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR" + }, + { + "vuid": "VUID-VkVideoBindMemoryKHR-pNext-pNext", "text": " pNext must be NULL" }, { - "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexFormat-parameter", - "text": " vertexFormat must be a valid VkFormat value" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexData-parameter", - "text": " vertexData must be a valid VkDeviceOrHostAddressConstKHR union" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-indexType-parameter", - "text": " indexType must be a valid VkIndexType value" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-indexData-parameter", - "text": " If indexData is not 0, indexData must be a valid VkDeviceOrHostAddressConstKHR union" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryTrianglesDataKHR-transformData-parameter", - "text": " If transformData is not 0, transformData must be a valid VkDeviceOrHostAddressConstKHR union" + "vuid": "VUID-VkVideoBindMemoryKHR-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" } ] }, - "VkAccelerationStructureGeometryAabbsDataKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ + "vkCreateVideoSessionParametersKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-data-03544", - "text": " data must be aligned to 8 bytes" + "vuid": "VUID-vkCreateVideoSessionParametersKHR-device-parameter", + "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-stride-03545", - "text": " stride must be a multiple of 8" + "vuid": "VUID-vkCreateVideoSessionParametersKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkVideoSessionParametersCreateInfoKHR structure" }, { - "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR" + "vuid": "VUID-vkCreateVideoSessionParametersKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" }, { - "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryAabbsDataKHR-data-parameter", - "text": " data must be a valid VkDeviceOrHostAddressConstKHR union" + "vuid": "VUID-vkCreateVideoSessionParametersKHR-pVideoSessionParameters-parameter", + "text": " pVideoSessionParameters must be a valid pointer to a VkVideoSessionParametersKHR handle" } ] }, - "VkAabbPositionsKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "VkVideoSessionParametersCreateInfoKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-VkAabbPositionsKHR-minX-03546", - "text": " minX must be less than or equal to maxX" + "vuid": "VUID-VkVideoSessionParametersCreateInfoKHR-videoSessionParametersTemplate-04855", + "text": " If videoSessionParametersTemplate represents a valid handle, it must have been created against videoSession." }, { - "vuid": "VUID-VkAabbPositionsKHR-minY-03547", - "text": " minY must be less than or equal to maxY" + "vuid": "VUID-VkVideoSessionParametersCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR" }, { - "vuid": "VUID-VkAabbPositionsKHR-minZ-03548", - "text": " minZ must be less than or equal to maxZ" - } - ] - }, - "VkAccelerationStructureGeometryInstancesDataKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-data-03549", - "text": " data must be aligned to 16 bytes" + "vuid": "VUID-VkVideoSessionParametersCreateInfoKHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkVideoDecodeH264SessionParametersCreateInfoEXT, VkVideoDecodeH265SessionParametersCreateInfoEXT, or VkVideoEncodeH264SessionParametersCreateInfoEXT" }, { - "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-arrayOfPointers-03550", - "text": " If arrayOfPointers is true, each pointer must be aligned to 16 bytes" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkAccelerationStructureGeometryInstancesDataKHR-data-parameter", - "text": " data must be a valid VkDeviceOrHostAddressConstKHR union" - } - ] - }, - "VkAccelerationStructureInstanceKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkAccelerationStructureInstanceKHR-flags-parameter", - "text": " flags must be a valid combination of VkGeometryInstanceFlagBitsKHR values" - } - ] - }, - "VkAccelerationStructureBuildOffsetInfoKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkAccelerationStructureBuildOffsetInfoKHR-primitiveOffset-03551", - "text": " For geometries of type VK_GEOMETRY_TYPE_TRIANGLES_KHR, if the geometry uses indices, the offset primitiveOffset from VkAccelerationStructureGeometryTrianglesDataKHR::indexData must be a multiple of the element size of VkAccelerationStructureGeometryTrianglesDataKHR::indexType" - }, - { - "vuid": "VUID-VkAccelerationStructureBuildOffsetInfoKHR-primitiveOffset-03552", - "text": " For geometries of type VK_GEOMETRY_TYPE_TRIANGLES_KHR, if the geometry doesn’t use indices, the offset primitiveOffset from VkAccelerationStructureGeometryTrianglesDataKHR::vertexData must be a multiple of the component size of VkAccelerationStructureGeometryTrianglesDataKHR::vertexFormat" - }, - { - "vuid": "VUID-VkAccelerationStructureBuildOffsetInfoKHR-transformOffset-03553", - "text": " For geometries of type VK_GEOMETRY_TYPE_TRIANGLES_KHR, the offset transformOffset from VkAccelerationStructureGeometryTrianglesDataKHR::transformData must be a multiple of 16" - }, - { - "vuid": "VUID-VkAccelerationStructureBuildOffsetInfoKHR-primitiveOffset-03554", - "text": " For geometries of type VK_GEOMETRY_TYPE_AABBS_KHR, the offset primitiveOffset from VkAccelerationStructureGeometryAabbsDataKHR::data must be a multiple of 8" - }, - { - "vuid": "VUID-VkAccelerationStructureBuildOffsetInfoKHR-primitiveOffset-03555", - "text": " For geometries of type VK_GEOMETRY_TYPE_INSTANCES_KHR, the offset primitiveOffset from VkAccelerationStructureGeometryInstancesDataKHR::data must be a multiple of 16 // TODO - Almost certainly should be more here" - } - ] - }, - "vkCmdWriteAccelerationStructuresPropertiesKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryPool-02493", - "text": " queryPool must have been created with a queryType matching queryType" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryPool-02494", - "text": " The queries identified by queryPool and firstQuery must be unavailable" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-parameter", - "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureKHR handles" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryType-parameter", - "text": " queryType must be a valid VkQueryType value" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryPool-parameter", - "text": " queryPool must be a valid VkQueryPool handle" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-recording", - "text": " commandBuffer must be in the recording state" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-renderpass", - "text": " This command must only be called outside of a render pass instance" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-accelerationStructureCount-arraylength", - "text": " accelerationStructureCount must be greater than 0" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-commonparent", - "text": " Each of commandBuffer, queryPool, and the elements of pAccelerationStructures must have been created, allocated, or retrieved from the same VkDevice" - } - ], - "core": [ - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-accelerationStructures-03431", - "text": " All acceleration structures in accelerationStructures must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR" - }, - { - "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesKHR-queryType-03432", - "text": " queryType must be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR or VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR" - } - ] - }, - "vkCmdCopyAccelerationStructureNV": { - "core": [ - { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-mode-03410", - "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR or VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-src-03411", - "text": " src must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if mode is VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ - { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-dst-parameter", - "text": " dst must be a valid VkAccelerationStructureKHR handle" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-src-parameter", - "text": " src must be a valid VkAccelerationStructureKHR handle" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-mode-parameter", - "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording", - "text": " commandBuffer must be in the recording state" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-renderpass", - "text": " This command must only be called outside of a render pass instance" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commonparent", - "text": " Each of commandBuffer, dst, and src must have been created, allocated, or retrieved from the same VkDevice" - } - ] - }, - "vkCmdCopyAccelerationStructureKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-None-03556", - "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to device memory" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureInfoKHR structure" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-recording", - "text": " commandBuffer must be in the recording state" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-renderpass", - "text": " This command must only be called outside of a render pass instance" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)+(VK_KHR_deferred_host_operations)": [ - { - "vuid": "VUID-vkCmdCopyAccelerationStructureKHR-pNext-03557", - "text": " The VkDeferredOperationInfoKHR structure must not be included in the pNext chain of the VkCopyAccelerationStructureInfoKHR structure" - } - ] - }, - "VkCopyAccelerationStructureInfoKHR": { - "core": [ - { - "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-mode-03410", - "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR or VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR" - }, - { - "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-src-03411", - "text": " src must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if mode is VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR" - }, - { - "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkDeferredOperationInfoKHR" - }, - { - "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-sType-unique", + "vuid": "VUID-VkVideoSessionParametersCreateInfoKHR-sType-unique", "text": " The sType value of each struct in the pNext chain must be unique" }, { - "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-src-parameter", - "text": " src must be a valid VkAccelerationStructureKHR handle" + "vuid": "VUID-VkVideoSessionParametersCreateInfoKHR-videoSessionParametersTemplate-parameter", + "text": " videoSessionParametersTemplate must be a valid VkVideoSessionParametersKHR handle" }, { - "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-dst-parameter", - "text": " dst must be a valid VkAccelerationStructureKHR handle" + "vuid": "VUID-VkVideoSessionParametersCreateInfoKHR-videoSession-parameter", + "text": " videoSession must be a valid VkVideoSessionKHR handle" }, { - "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-mode-parameter", - "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" + "vuid": "VUID-VkVideoSessionParametersCreateInfoKHR-videoSessionParametersTemplate-parent", + "text": " videoSessionParametersTemplate must have been created, allocated, or retrieved from videoSession" }, { - "vuid": "VUID-VkCopyAccelerationStructureInfoKHR-commonparent", - "text": " Both of dst, and src must have been created, allocated, or retrieved from the same VkDevice" + "vuid": "VUID-VkVideoSessionParametersCreateInfoKHR-commonparent", + "text": " Both of videoSession, and videoSessionParametersTemplate must have been created, allocated, or retrieved from the same VkDevice" } ] }, - "vkCmdCopyAccelerationStructureToMemoryKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "vkUpdateVideoSessionParametersKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-None-04048", - "text": " All VkDeviceOrHostAddressConstKHR referenced by this command must contain valid device addresses for a buffer bound to device memory. If the buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-None-03559", - "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to device memory" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureToMemoryInfoKHR structure" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-recording", - "text": " commandBuffer must be in the recording state" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" - }, - { - "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-renderpass", - "text": " This command must only be called outside of a render pass instance" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_deferred_host_operations)": [ - { - "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-pNext-03560", - "text": " The VkDeferredOperationInfoKHR structure must not be included in the pNext chain of the VkCopyAccelerationStructureToMemoryInfoKHR structure" - } - ], - "core": [ - { - "vuid": "VUID-vkCmdCopyAccelerationStructureToMemoryKHR-mode-03412", - "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR" - } - ] - }, - "VkCopyAccelerationStructureToMemoryInfoKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-dst-03561", - "text": " The memory pointed to by dst must be at least as large as the serialization size of src, as reported by VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR" - }, - { - "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR" - }, - { - "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkDeferredOperationInfoKHR" - }, - { - "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-sType-unique", - "text": " The sType value of each struct in the pNext chain must be unique" - }, - { - "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-src-parameter", - "text": " src must be a valid VkAccelerationStructureKHR handle" - }, - { - "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-dst-parameter", - "text": " dst must be a valid VkDeviceOrHostAddressKHR union" - }, - { - "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-mode-parameter", - "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" - } - ], - "core": [ - { - "vuid": "VUID-VkCopyAccelerationStructureToMemoryInfoKHR-mode-03412", - "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR" - } - ] - }, - "vkCmdCopyMemoryToAccelerationStructureKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-None-04049", - "text": " All VkDeviceOrHostAddressKHR referenced by this command must contain valid device addresses for a buffer bound to device memory. If the buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" - }, - { - "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-None-03563", - "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to device memory" - }, - { - "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-parameter", - "text": " commandBuffer must be a valid VkCommandBuffer handle" - }, - { - "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkCopyMemoryToAccelerationStructureInfoKHR structure" - }, - { - "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-recording", - "text": " commandBuffer must be in the recording state" - }, - { - "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" - }, - { - "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-renderpass", - "text": " This command must only be called outside of a render pass instance" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_deferred_host_operations)": [ - { - "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-pNext-03564", - "text": " The VkDeferredOperationInfoKHR structure must not be included in the pNext chain of the VkCopyMemoryToAccelerationStructureInfoKHR structure" - } - ], - "core": [ - { - "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-mode-03413", - "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR" - }, - { - "vuid": "VUID-vkCmdCopyMemoryToAccelerationStructureKHR-pInfo-03414", - "text": " The data in pInfo->src must have a format compatible with the destination physical device as returned by vkGetDeviceAccelerationStructureCompatibilityKHR" - } - ] - }, - "VkCopyMemoryToAccelerationStructureInfoKHR": { - "core": [ - { - "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-mode-03413", - "text": " mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR" - }, - { - "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-pInfo-03414", - "text": " The data in pInfo->src must have a format compatible with the destination physical device as returned by vkGetDeviceAccelerationStructureCompatibilityKHR" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR" - }, - { - "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkDeferredOperationInfoKHR" - }, - { - "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-sType-unique", - "text": " The sType value of each struct in the pNext chain must be unique" - }, - { - "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-src-parameter", - "text": " src must be a valid VkDeviceOrHostAddressConstKHR union" - }, - { - "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-dst-parameter", - "text": " dst must be a valid VkAccelerationStructureKHR handle" - }, - { - "vuid": "VUID-VkCopyMemoryToAccelerationStructureInfoKHR-mode-parameter", - "text": " mode must be a valid VkCopyAccelerationStructureModeKHR value" - } - ] - }, - "vkGetDeviceAccelerationStructureCompatibilityKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkGetDeviceAccelerationStructureCompatibilityKHR-rayTracing-03565", - "text": " The rayTracing or rayQuery feature must be enabled" - }, - { - "vuid": "VUID-vkGetDeviceAccelerationStructureCompatibilityKHR-device-parameter", + "vuid": "VUID-vkUpdateVideoSessionParametersKHR-device-parameter", "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-vkGetDeviceAccelerationStructureCompatibilityKHR-version-parameter", - "text": " version must be a valid pointer to a valid VkAccelerationStructureVersionKHR structure" + "vuid": "VUID-vkUpdateVideoSessionParametersKHR-videoSessionParameters-parameter", + "text": " videoSessionParameters must be a valid VkVideoSessionParametersKHR handle" + }, + { + "vuid": "VUID-vkUpdateVideoSessionParametersKHR-pUpdateInfo-parameter", + "text": " pUpdateInfo must be a valid pointer to a valid VkVideoSessionParametersUpdateInfoKHR structure" } ] }, - "VkAccelerationStructureVersionKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "VkVideoSessionParametersUpdateInfoKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-VkAccelerationStructureVersionKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_KHR" + "vuid": "VUID-VkVideoSessionParametersUpdateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR" }, { - "vuid": "VUID-VkAccelerationStructureVersionKHR-pNext-pNext", + "vuid": "VUID-VkVideoSessionParametersUpdateInfoKHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkVideoDecodeH264SessionParametersAddInfoEXT, VkVideoDecodeH265SessionParametersAddInfoEXT, or VkVideoEncodeH264SessionParametersAddInfoEXT" + }, + { + "vuid": "VUID-VkVideoSessionParametersUpdateInfoKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + } + ] + }, + "vkDestroyVideoSessionParametersKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-vkDestroyVideoSessionParametersKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyVideoSessionParametersKHR-videoSessionParameters-parameter", + "text": " videoSessionParameters must be a valid VkVideoSessionParametersKHR handle" + }, + { + "vuid": "VUID-vkDestroyVideoSessionParametersKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + } + ] + }, + "vkCmdBeginVideoCodingKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-vkCmdBeginVideoCodingKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBeginVideoCodingKHR-pBeginInfo-parameter", + "text": " pBeginInfo must be a valid pointer to a valid VkVideoBeginCodingInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdBeginVideoCodingKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdBeginVideoCodingKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support decode, or encode operations" + }, + { + "vuid": "VUID-vkCmdBeginVideoCodingKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdBeginVideoCodingKHR-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + } + ] + }, + "VkVideoBeginCodingInfoKHR": { + "(VK_KHR_video_queue)": [ + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-referenceSlotCount-04856", + "text": " VkVideoBeginCodingInfoKHR::referenceSlotCount must not exceed the value specified in VkVideoSessionCreateInfoKHR::maxReferencePicturesSlotsCount when creating the video session object that is being provided in videoSession." + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-videoSessionParameters-04857", + "text": " If videoSessionParameters is not VK_NULL_HANDLE, it must have been created using videoSession as a parent object." + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR" + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-pNext-pNext", "text": " pNext must be NULL" }, { - "vuid": "VUID-VkAccelerationStructureVersionKHR-versionData-parameter", - "text": " versionData must be a valid pointer to an array of 2*VK_UUID_SIZE uint8_t values" + "vuid": "VUID-VkVideoBeginCodingInfoKHR-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-codecQualityPreset-parameter", + "text": " codecQualityPreset must be a valid combination of VkVideoCodingQualityPresetFlagBitsKHR values" + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-codecQualityPreset-requiredbitmask", + "text": " codecQualityPreset must not be 0" + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-videoSession-parameter", + "text": " videoSession must be a valid VkVideoSessionKHR handle" + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-videoSessionParameters-parameter", + "text": " If videoSessionParameters is not VK_NULL_HANDLE, videoSessionParameters must be a valid VkVideoSessionParametersKHR handle" + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-pReferenceSlots-parameter", + "text": " pReferenceSlots must be a valid pointer to an array of referenceSlotCount valid VkVideoReferenceSlotKHR structures" + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-referenceSlotCount-arraylength", + "text": " referenceSlotCount must be greater than 0" + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-videoSessionParameters-parent", + "text": " If videoSessionParameters is a valid handle, it must have been created, allocated, or retrieved from videoSession" + }, + { + "vuid": "VUID-VkVideoBeginCodingInfoKHR-commonparent", + "text": " Both of videoSession, and videoSessionParameters that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" } ] }, - "vkBuildAccelerationStructureKHR": { - "core": [ + "VkVideoReferenceSlotKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-vkBuildAccelerationStructureKHR-pOffsetInfos-03402", - "text": " Each element of ppOffsetInfos[i] must be a valid pointer to an array of pInfos[i].geometryCount VkAccelerationStructureBuildOffsetInfoKHR structures" + "vuid": "VUID-VkVideoReferenceSlotKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR" }, { - "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-03403", - "text": " Each pInfos[i].srcAccelerationStructure must not refer to the same acceleration structure as any pInfos[i].dstAccelerationStructure that is provided to the same build command unless it is identical for an update" + "vuid": "VUID-VkVideoReferenceSlotKHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkVideoDecodeH264DpbSlotInfoEXT or VkVideoDecodeH265DpbSlotInfoEXT" }, { - "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-03404", - "text": " For each pInfos[i], dstAccelerationStructure must have been created with compatible VkAccelerationStructureCreateInfoKHR where VkAccelerationStructureCreateInfoKHR::type and VkAccelerationStructureCreateInfoKHR::flags are identical to VkAccelerationStructureBuildGeometryInfoKHR::type and VkAccelerationStructureBuildGeometryInfoKHR::flags respectively, VkAccelerationStructureBuildGeometryInfoKHR::geometryCount for dstAccelerationStructure are greater than or equal to the build size, and each geometry in VkAccelerationStructureBuildGeometryInfoKHR::ppGeometries for dstAccelerationStructure has greater than or equal to the number of vertices, indices, and AABBs, VkAccelerationStructureGeometryTrianglesDataKHR::transformData is both 0 or both non-zero, and all other parameters are the same" + "vuid": "VUID-VkVideoReferenceSlotKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" }, { - "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-03405", - "text": " For each pInfos[i], if update is VK_TRUE, then objects that were previously active for that acceleration structure must not be made inactive as per Inactive Primitives and Instances" - }, - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-03406", - "text": " For each pInfos[i], if update is VK_TRUE, then objects that were previously inactive for that acceleration structure must not be made active as per Inactive Primitives and Instances" - }, - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-None-03407", - "text": " Any acceleration structure instance in any top level build in this command must not reference any bottom level acceleration structure built by this command" - }, - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-03408", - "text": " There must not be any memory aliasing between the scratch memories that are provided in all the pInfos[i].scratchData memories for the acceleration structure builds" - }, - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-None-03409", - "text": " There must not be any memory aliasing between memory bound to any top level, bottom level, or instance acceleration structure accessed by this command" - }, - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-None-03437", - "text": " All VkDeviceOrHostAddressKHR or VkDeviceOrHostAddressConstKHR referenced by this command must contain valid host addresses" - }, - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-None-03438", - "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to host-visible memory" - }, - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-rayTracingHostAccelerationStructureCommands-03439", - "text": " The VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingHostAccelerationStructureCommands feature must be enabled" - } - ], - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-pInfos-parameter", - "text": " pInfos must be a valid pointer to an array of infoCount valid VkAccelerationStructureBuildGeometryInfoKHR structures" - }, - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-ppOffsetInfos-parameter", - "text": " ppOffsetInfos must be a valid pointer to an array of infoCount VkAccelerationStructureBuildOffsetInfoKHR structures" - }, - { - "vuid": "VUID-vkBuildAccelerationStructureKHR-infoCount-arraylength", - "text": " infoCount must be greater than 0" + "vuid": "VUID-VkVideoReferenceSlotKHR-pPictureResource-parameter", + "text": " pPictureResource must be a valid pointer to a valid VkVideoPictureResourceKHR structure" } ] }, - "vkCopyAccelerationStructureKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "VkVideoPictureResourceKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-vkCopyAccelerationStructureKHR-None-03440", - "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to host-visible memory" + "vuid": "VUID-VkVideoPictureResourceKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR" }, { - "vuid": "VUID-vkCopyAccelerationStructureKHR-rayTracingHostAccelerationStructureCommands-03441", - "text": " the VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingHostAccelerationStructureCommands feature must be enabled" + "vuid": "VUID-VkVideoPictureResourceKHR-pNext-pNext", + "text": " pNext must be NULL" }, { - "vuid": "VUID-vkCopyAccelerationStructureKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkCopyAccelerationStructureKHR-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureInfoKHR structure" + "vuid": "VUID-VkVideoPictureResourceKHR-imageViewBinding-parameter", + "text": " imageViewBinding must be a valid VkImageView handle" } ] }, - "vkCopyMemoryToAccelerationStructureKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "vkCmdEndVideoCodingKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-None-03442", - "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to host-visible memory" + "vuid": "VUID-vkCmdEndVideoCodingKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { - "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-None-03443", - "text": " All VkDeviceOrHostAddressConstKHR referenced by this command must contain valid host pointers" + "vuid": "VUID-vkCmdEndVideoCodingKHR-pEndCodingInfo-parameter", + "text": " pEndCodingInfo must be a valid pointer to a valid VkVideoEndCodingInfoKHR structure" }, { - "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-rayTracingHostAccelerationStructureCommands-03444", - "text": " the VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingHostAccelerationStructureCommands feature must be enabled" + "vuid": "VUID-vkCmdEndVideoCodingKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" }, { - "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-device-parameter", - "text": " device must be a valid VkDevice handle" + "vuid": "VUID-vkCmdEndVideoCodingKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support decode, or encode operations" }, { - "vuid": "VUID-vkCopyMemoryToAccelerationStructureKHR-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkCopyMemoryToAccelerationStructureInfoKHR structure" + "vuid": "VUID-vkCmdEndVideoCodingKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdEndVideoCodingKHR-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" } ] }, - "vkCopyAccelerationStructureToMemoryKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "VkVideoEndCodingInfoKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-None-03445", - "text": " All VkAccelerationStructureKHR objects referenced by this command must be bound to host-visible memory" + "vuid": "VUID-VkVideoEndCodingInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR" }, { - "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-None-03446", - "text": " All VkDeviceOrHostAddressKHR referenced by this command must contain valid host pointers" + "vuid": "VUID-VkVideoEndCodingInfoKHR-pNext-pNext", + "text": " pNext must be NULL" }, { - "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-rayTracingHostAccelerationStructureCommands-03447", - "text": " the VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingHostAccelerationStructureCommands feature must be enabled" - }, - { - "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkCopyAccelerationStructureToMemoryKHR-pInfo-parameter", - "text": " pInfo must be a valid pointer to a valid VkCopyAccelerationStructureToMemoryInfoKHR structure" + "vuid": "VUID-VkVideoEndCodingInfoKHR-flags-zerobitmask", + "text": " flags must be 0" } ] }, - "vkWriteAccelerationStructuresPropertiesKHR": { - "(VK_NV_ray_tracing,VK_KHR_ray_tracing)": [ + "vkCmdControlVideoCodingKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03448", - "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, then stride must be a multiple of the size of VkDeviceSize" + "vuid": "VUID-vkCmdControlVideoCodingKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03449", - "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, then data must point to a VkDeviceSize" + "vuid": "VUID-vkCmdControlVideoCodingKHR-pCodingControlInfo-parameter", + "text": " pCodingControlInfo must be a valid pointer to a valid VkVideoCodingControlInfoKHR structure" }, { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03450", - "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, then stride must be a multiple of the size of VkDeviceSize" + "vuid": "VUID-vkCmdControlVideoCodingKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" }, { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03451", - "text": " If queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, then data must point to a VkDeviceSize" + "vuid": "VUID-vkCmdControlVideoCodingKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support decode, or encode operations" }, { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-dataSize-03452", - "text": " dataSize must be greater than or equal to accelerationStructureCount*stride" + "vuid": "VUID-vkCmdControlVideoCodingKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" }, { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-03453", - "text": " The acceleration structures referenced by pAccelerationStructures must be bound to host-visible memory" - }, - { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-device-parameter", - "text": " device must be a valid VkDevice handle" - }, - { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-parameter", - "text": " pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureKHR handles" - }, - { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-parameter", - "text": " queryType must be a valid VkQueryType value" - }, - { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pData-parameter", - "text": " pData must be a valid pointer to an array of dataSize bytes" - }, - { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-accelerationStructureCount-arraylength", - "text": " accelerationStructureCount must be greater than 0" - }, - { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-dataSize-arraylength", - "text": " dataSize must be greater than 0" - }, - { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-pAccelerationStructures-parent", - "text": " Each element of pAccelerationStructures must have been created, allocated, or retrieved from device" + "vuid": "VUID-vkCmdControlVideoCodingKHR-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" } - ], - "core": [ + ] + }, + "VkVideoCodingControlInfoKHR": { + "(VK_KHR_video_queue)": [ { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-accelerationStructures-03431", - "text": " All acceleration structures in accelerationStructures must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR if queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR" + "vuid": "VUID-VkVideoCodingControlInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR" }, { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-queryType-03432", - "text": " queryType must be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR or VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR" + "vuid": "VUID-VkVideoCodingControlInfoKHR-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkVideoEncodeRateControlInfoKHR" }, { - "vuid": "VUID-vkWriteAccelerationStructuresPropertiesKHR-rayTracingHostAccelerationStructureCommands-03454", - "text": " the VkPhysicalDeviceRayTracingFeaturesKHR::rayTracingHostAccelerationStructureCommands feature must be enabled" + "vuid": "VUID-VkVideoCodingControlInfoKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkVideoCodingControlInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkVideoCodingControlFlagBitsKHR values" + } + ] + }, + "vkCmdDecodeVideoKHR": { + "(VK_KHR_video_decode_queue)": [ + { + "vuid": "VUID-vkCmdDecodeVideoKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDecodeVideoKHR-pFrameInfo-parameter", + "text": " pFrameInfo must be a valid pointer to a valid VkVideoDecodeInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdDecodeVideoKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdDecodeVideoKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support decode operations" + }, + { + "vuid": "VUID-vkCmdDecodeVideoKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdDecodeVideoKHR-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + } + ] + }, + "VkVideoDecodeInfoKHR": { + "(VK_KHR_video_decode_queue)": [ + { + "vuid": "VUID-VkVideoDecodeInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR" + }, + { + "vuid": "VUID-VkVideoDecodeInfoKHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkVideoDecodeH264PictureInfoEXT or VkVideoDecodeH265PictureInfoEXT" + }, + { + "vuid": "VUID-VkVideoDecodeInfoKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkVideoDecodeInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkVideoDecodeFlagBitsKHR values" + }, + { + "vuid": "VUID-VkVideoDecodeInfoKHR-srcBuffer-parameter", + "text": " srcBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkVideoDecodeInfoKHR-dstPictureResource-parameter", + "text": " dstPictureResource must be a valid VkVideoPictureResourceKHR structure" + }, + { + "vuid": "VUID-VkVideoDecodeInfoKHR-pSetupReferenceSlot-parameter", + "text": " pSetupReferenceSlot must be a valid pointer to a valid VkVideoReferenceSlotKHR structure" + }, + { + "vuid": "VUID-VkVideoDecodeInfoKHR-pReferenceSlots-parameter", + "text": " pReferenceSlots must be a valid pointer to an array of referenceSlotCount valid VkVideoReferenceSlotKHR structures" + }, + { + "vuid": "VUID-VkVideoDecodeInfoKHR-referenceSlotCount-arraylength", + "text": " referenceSlotCount must be greater than 0" + } + ] + }, + "VkVideoDecodeH264ProfileEXT": { + "(VK_EXT_video_decode_h264)": [ + { + "vuid": "VUID-VkVideoDecodeH264ProfileEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH264ProfileEXT-fieldLayout-parameter", + "text": " fieldLayout must be a valid combination of VkVideoDecodeH264FieldLayoutFlagBitsEXT values" + }, + { + "vuid": "VUID-VkVideoDecodeH264ProfileEXT-fieldLayout-requiredbitmask", + "text": " fieldLayout must not be 0" + } + ] + }, + "VkVideoDecodeH264CapabilitiesEXT": { + "(VK_EXT_video_decode_h264)": [ + { + "vuid": "VUID-VkVideoDecodeH264CapabilitiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT" + } + ] + }, + "VkVideoDecodeH264SessionCreateInfoEXT": { + "(VK_EXT_video_decode_h264)": [ + { + "vuid": "VUID-VkVideoDecodeH264SessionCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionCreateInfoEXT-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionCreateInfoEXT-pStdExtensionVersion-parameter", + "text": " pStdExtensionVersion must be a valid pointer to a valid VkExtensionProperties structure" + } + ] + }, + "VkVideoDecodeH264SessionParametersCreateInfoEXT": { + "(VK_EXT_video_decode_h264)": [ + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersCreateInfoEXT-pParametersAddInfo-parameter", + "text": " If pParametersAddInfo is not NULL, pParametersAddInfo must be a valid pointer to a valid VkVideoDecodeH264SessionParametersAddInfoEXT structure" + } + ] + }, + "VkVideoDecodeH264SessionParametersAddInfoEXT": { + "(VK_EXT_video_decode_h264)": [ + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-spsStdCount-04822", + "text": " The values of spsStdCount and ppsStdCount must be less than or equal to the values of maxSpsStdCount and maxPpsStdCount, respectively." + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-maxSpsStdCount-04823", + "text": " When the maxSpsStdCount number of parameters of type StdVideoH264SequenceParameterSet in the Video Session Parameters object is reached, no additional parameters of that type can be added to this object. VK_ERROR_TOO_MANY_OBJECTS will be returned if an attempt is made to add additional data to this object at this point." + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-maxPpsStdCount-04824", + "text": " When the maxPpsStdCount number of parameters of type StdVideoH264PictureParameterSet in the Video Session Parameters object is reached, no additional parameters of that type can be added to this object. VK_ERROR_TOO_MANY_OBJECTS will be returned if an attempt is made to add additional data to this object at this point." + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-None-04825", + "text": " Each entry to be added must have a unique, to the rest of the parameter array entries and the existing parameters in the Video Session Parameters Object that is being updated, SPS-PPS IDs." + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-None-04826", + "text": " Parameter entries that already exist in Video Session Parameters object with a particular SPS-PPS IDs cannot be replaced nor updated." + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-None-04827", + "text": " When creating a new object using a Video Session Parameters as a template, the array’s parameters with the same SPS-PPS IDs as the ones from the template take precedence." + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-None-04828", + "text": " SPS/PPS parameters must comply with the limits specified in VkVideoSessionCreateInfoKHR during Video Session creation." + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-pSpsStd-parameter", + "text": " If pSpsStd is not NULL, pSpsStd must be a valid pointer to an array of spsStdCount StdVideoH264SequenceParameterSet values" + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-pPpsStd-parameter", + "text": " If pPpsStd is not NULL, pPpsStd must be a valid pointer to an array of ppsStdCount StdVideoH264PictureParameterSet values" + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-spsStdCount-arraylength", + "text": " spsStdCount must be greater than 0" + }, + { + "vuid": "VUID-VkVideoDecodeH264SessionParametersAddInfoEXT-ppsStdCount-arraylength", + "text": " ppsStdCount must be greater than 0" + } + ] + }, + "VkVideoDecodeH264PictureInfoEXT": { + "(VK_EXT_video_decode_h264)": [ + { + "vuid": "VUID-VkVideoDecodeH264PictureInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH264PictureInfoEXT-pStdPictureInfo-parameter", + "text": " pStdPictureInfo must be a valid pointer to a valid StdVideoDecodeH264PictureInfo value" + }, + { + "vuid": "VUID-VkVideoDecodeH264PictureInfoEXT-pSlicesDataOffsets-parameter", + "text": " pSlicesDataOffsets must be a valid pointer to an array of slicesCount uint32_t values" + }, + { + "vuid": "VUID-VkVideoDecodeH264PictureInfoEXT-slicesCount-arraylength", + "text": " slicesCount must be greater than 0" + } + ] + }, + "VkVideoDecodeH264DpbSlotInfoEXT": { + "(VK_EXT_video_decode_h264)": [ + { + "vuid": "VUID-VkVideoDecodeH264DpbSlotInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH264DpbSlotInfoEXT-pStdReferenceInfo-parameter", + "text": " pStdReferenceInfo must be a valid pointer to a valid StdVideoDecodeH264ReferenceInfo value" + } + ] + }, + "VkVideoDecodeH264MvcEXT": { + "(VK_EXT_video_decode_h264)": [ + { + "vuid": "VUID-VkVideoDecodeH264MvcEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH264MvcEXT-pStdMvc-parameter", + "text": " pStdMvc must be a valid pointer to a valid StdVideoDecodeH264Mvc value" + } + ] + }, + "VkVideoDecodeH265ProfileEXT": { + "(VK_EXT_video_decode_h265)": [ + { + "vuid": "VUID-VkVideoDecodeH265ProfileEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT" + } + ] + }, + "VkVideoDecodeH265CapabilitiesEXT": { + "(VK_EXT_video_decode_h265)": [ + { + "vuid": "VUID-VkVideoDecodeH265CapabilitiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT" + } + ] + }, + "VkVideoDecodeH265SessionCreateInfoEXT": { + "(VK_EXT_video_decode_h265)": [ + { + "vuid": "VUID-VkVideoDecodeH265SessionCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionCreateInfoEXT-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionCreateInfoEXT-pStdExtensionVersion-parameter", + "text": " pStdExtensionVersion must be a valid pointer to a valid VkExtensionProperties structure" + } + ] + }, + "VkVideoDecodeH265SessionParametersCreateInfoEXT": { + "(VK_EXT_video_decode_h265)": [ + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersCreateInfoEXT-pParametersAddInfo-parameter", + "text": " If pParametersAddInfo is not NULL, pParametersAddInfo must be a valid pointer to a valid VkVideoDecodeH265SessionParametersAddInfoEXT structure" + } + ] + }, + "VkVideoDecodeH265SessionParametersAddInfoEXT": { + "(VK_EXT_video_decode_h265)": [ + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-vpsStdCount-04829", + "text": " The values of vpsStdCount, spsStdCount and ppsStdCount must be less than or equal to the values of maxVpsStdCount, maxSpsStdCount and maxPpsStdCount, respectively." + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-maxVpsStdCount-04830", + "text": " When the maxVpsStdCount number of parameters of type StdVideoH265VideoParameterSet in the Video Session Parameters object is reached, no additional parameters of that type can be added to the object. VK_ERROR_TOO_MANY_OBJECTS will be returned if an attempt is made to add additional data to this object at this point." + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-maxSpsStdCount-04831", + "text": " When the maxSpsStdCount number of parameters of type StdVideoH265SequenceParameterSet in the Video Session Parameters object is reached, no additional parameters of that type can be added to the object. VK_ERROR_TOO_MANY_OBJECTS will be returned if an attempt is made to add additional data to this object at this point." + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-maxPpsStdCount-04832", + "text": " When the maxPpsStdCount number of parameters of type StdVideoH265PictureParameterSet in the Video Session Parameters object is reached, no additional parameters of that type can be added to the object. VK_ERROR_TOO_MANY_OBJECTS will be returned if an attempt is made to add additional data to this object at this point." + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-None-04833", + "text": " Each entry to be added must have a unique, to the rest of the parameter array entries and the existing parameters in the Video Session Parameters Object that is being updated, VPS-SPS-PPS IDs." + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-None-04834", + "text": " Parameter entries that already exist in Video Session Parameters object with a particular VPS-SPS-PPS IDs cannot be replaced nor updated." + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-None-04835", + "text": " When creating a new object using a Video Session Parameters as a template, the array’s parameters with the same VPS-SPS-PPS IDs as the ones from the template take precedence." + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-None-04836", + "text": " VPS/SPS/PPS parameters must comply with the limits specified in VkVideoSessionCreateInfoKHR during Video Session creation." + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-pSpsStd-parameter", + "text": " If pSpsStd is not NULL, pSpsStd must be a valid pointer to an array of spsStdCount StdVideoH265SequenceParameterSet values" + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-pPpsStd-parameter", + "text": " If pPpsStd is not NULL, pPpsStd must be a valid pointer to an array of ppsStdCount StdVideoH265PictureParameterSet values" + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-spsStdCount-arraylength", + "text": " spsStdCount must be greater than 0" + }, + { + "vuid": "VUID-VkVideoDecodeH265SessionParametersAddInfoEXT-ppsStdCount-arraylength", + "text": " ppsStdCount must be greater than 0" + } + ] + }, + "VkVideoDecodeH265PictureInfoEXT": { + "(VK_EXT_video_decode_h265)": [ + { + "vuid": "VUID-VkVideoDecodeH265PictureInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH265PictureInfoEXT-pStdPictureInfo-parameter", + "text": " pStdPictureInfo must be a valid pointer to a StdVideoDecodeH265PictureInfo value" + }, + { + "vuid": "VUID-VkVideoDecodeH265PictureInfoEXT-pSlicesDataOffsets-parameter", + "text": " pSlicesDataOffsets must be a valid pointer to an array of slicesCount uint32_t values" + }, + { + "vuid": "VUID-VkVideoDecodeH265PictureInfoEXT-slicesCount-arraylength", + "text": " slicesCount must be greater than 0" + } + ] + }, + "VkVideoDecodeH265DpbSlotInfoEXT": { + "(VK_EXT_video_decode_h265)": [ + { + "vuid": "VUID-VkVideoDecodeH265DpbSlotInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoDecodeH265DpbSlotInfoEXT-pStdReferenceInfo-parameter", + "text": " pStdReferenceInfo must be a valid pointer to a valid StdVideoDecodeH265ReferenceInfo value" + } + ] + }, + "vkCmdEncodeVideoKHR": { + "(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-vkCmdEncodeVideoKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdEncodeVideoKHR-pEncodeInfo-parameter", + "text": " pEncodeInfo must be a valid pointer to a valid VkVideoEncodeInfoKHR structure" + }, + { + "vuid": "VUID-vkCmdEncodeVideoKHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdEncodeVideoKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support encode operations" + }, + { + "vuid": "VUID-vkCmdEncodeVideoKHR-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdEncodeVideoKHR-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + } + ] + }, + "VkVideoEncodeInfoKHR": { + "(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-VkVideoEncodeInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR" + }, + { + "vuid": "VUID-VkVideoEncodeInfoKHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkVideoEncodeH264EmitPictureParametersEXT or VkVideoEncodeH264VclFrameInfoEXT" + }, + { + "vuid": "VUID-VkVideoEncodeInfoKHR-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkVideoEncodeInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkVideoEncodeFlagBitsKHR values" + }, + { + "vuid": "VUID-VkVideoEncodeInfoKHR-dstBitstreamBuffer-parameter", + "text": " dstBitstreamBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkVideoEncodeInfoKHR-srcPictureResource-parameter", + "text": " srcPictureResource must be a valid VkVideoPictureResourceKHR structure" + }, + { + "vuid": "VUID-VkVideoEncodeInfoKHR-pSetupReferenceSlot-parameter", + "text": " pSetupReferenceSlot must be a valid pointer to a valid VkVideoReferenceSlotKHR structure" + }, + { + "vuid": "VUID-VkVideoEncodeInfoKHR-pReferenceSlots-parameter", + "text": " pReferenceSlots must be a valid pointer to an array of referenceSlotCount valid VkVideoReferenceSlotKHR structures" + }, + { + "vuid": "VUID-VkVideoEncodeInfoKHR-referenceSlotCount-arraylength", + "text": " referenceSlotCount must be greater than 0" + } + ] + }, + "VkVideoEncodeRateControlInfoKHR": { + "(VK_KHR_video_encode_queue)": [ + { + "vuid": "VUID-VkVideoEncodeRateControlInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR" + }, + { + "vuid": "VUID-VkVideoEncodeRateControlInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkVideoEncodeRateControlFlagBitsKHR values" + }, + { + "vuid": "VUID-VkVideoEncodeRateControlInfoKHR-flags-requiredbitmask", + "text": " flags must not be 0" + }, + { + "vuid": "VUID-VkVideoEncodeRateControlInfoKHR-rateControlMode-parameter", + "text": " rateControlMode must be a valid VkVideoEncodeRateControlModeFlagBitsKHR value" + } + ] + }, + "VkVideoEncodeH264ProfileEXT": { + "(VK_EXT_video_encode_h264)": [ + { + "vuid": "VUID-VkVideoEncodeH264ProfileEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT" + } + ] + }, + "VkVideoEncodeH264CapabilitiesEXT": { + "(VK_EXT_video_encode_h264)": [ + { + "vuid": "VUID-VkVideoEncodeH264CapabilitiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT" + }, + { + "vuid": "VUID-VkVideoEncodeH264CapabilitiesEXT-flags-parameter", + "text": " flags must be a valid combination of VkVideoEncodeH264CapabilitiesFlagBitsEXT values" + }, + { + "vuid": "VUID-VkVideoEncodeH264CapabilitiesEXT-flags-requiredbitmask", + "text": " flags must not be 0" + }, + { + "vuid": "VUID-VkVideoEncodeH264CapabilitiesEXT-inputModeFlags-parameter", + "text": " inputModeFlags must be a valid combination of VkVideoEncodeH264InputModeFlagBitsEXT values" + }, + { + "vuid": "VUID-VkVideoEncodeH264CapabilitiesEXT-inputModeFlags-requiredbitmask", + "text": " inputModeFlags must not be 0" + }, + { + "vuid": "VUID-VkVideoEncodeH264CapabilitiesEXT-outputModeFlags-parameter", + "text": " outputModeFlags must be a valid combination of VkVideoEncodeH264OutputModeFlagBitsEXT values" + }, + { + "vuid": "VUID-VkVideoEncodeH264CapabilitiesEXT-outputModeFlags-requiredbitmask", + "text": " outputModeFlags must not be 0" + }, + { + "vuid": "VUID-VkVideoEncodeH264CapabilitiesEXT-stdExtensionVersion-parameter", + "text": " stdExtensionVersion must be a valid VkExtensionProperties structure" + } + ] + }, + "VkVideoEncodeH264SessionCreateInfoEXT": { + "(VK_EXT_video_encode_h264)": [ + { + "vuid": "VUID-VkVideoEncodeH264SessionCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionCreateInfoEXT-flags-parameter", + "text": " flags must be a valid combination of VkVideoEncodeH264CreateFlagBitsEXT values" + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionCreateInfoEXT-flags-requiredbitmask", + "text": " flags must not be 0" + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionCreateInfoEXT-pStdExtensionVersion-parameter", + "text": " pStdExtensionVersion must be a valid pointer to a valid VkExtensionProperties structure" + } + ] + }, + "VkVideoEncodeH264SessionParametersCreateInfoEXT": { + "(VK_EXT_video_encode_h264)": [ + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersCreateInfoEXT-pParametersAddInfo-parameter", + "text": " If pParametersAddInfo is not NULL, pParametersAddInfo must be a valid pointer to a valid VkVideoEncodeH264SessionParametersAddInfoEXT structure" + } + ] + }, + "VkVideoEncodeH264SessionParametersAddInfoEXT": { + "(VK_EXT_video_encode_h264)": [ + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-spsStdCount-04837", + "text": " The values of spsStdCount and ppsStdCount must be less than or equal to the values of maxSpsStdCount and maxPpsStdCount, respectively." + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-maxSpsStdCount-04838", + "text": " When the maxSpsStdCount number of parameters of type StdVideoH264SequenceParameterSet in the Video Session Parameters object is reached, no additional parameters of that type can be added to the object. VK_ERROR_TOO_MANY_OBJECTS will be returned if an attempt is made to add additional data to this object at this point." + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-maxPpsStdCount-04839", + "text": " When the maxPpsStdCount number of parameters of type StdVideoH264PictureParameterSet in the Video Session Parameters object is reached, no additional parameters of that type can be added to the object. VK_ERROR_TOO_MANY_OBJECTS will be returned if an attempt is made to add additional data to this object at this point." + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-None-04840", + "text": " Each entry to be added must have a unique, to the rest of the parameter array entries and the existing parameters in the Video Session Parameters Object that is being updated, SPS-PPS IDs." + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-None-04841", + "text": " Parameter entries that already exist in Video Session Parameters object with a particular SPS-PPS IDs cannot be replaced nor updated." + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-None-04842", + "text": " When creating a new object using a Video Session Parameters as a template, the array’s parameters with the same SPS-PPS IDs as the ones from the template take precedence." + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-None-04843", + "text": " SPS/PPS parameters must comply with the limits specified in VkVideoSessionCreateInfoKHR during Video Session creation." + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-pSpsStd-parameter", + "text": " If pSpsStd is not NULL, pSpsStd must be a valid pointer to an array of spsStdCount StdVideoH264SequenceParameterSet values" + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-pPpsStd-parameter", + "text": " If pPpsStd is not NULL, pPpsStd must be a valid pointer to an array of ppsStdCount StdVideoH264PictureParameterSet values" + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-spsStdCount-arraylength", + "text": " spsStdCount must be greater than 0" + }, + { + "vuid": "VUID-VkVideoEncodeH264SessionParametersAddInfoEXT-ppsStdCount-arraylength", + "text": " ppsStdCount must be greater than 0" + } + ] + }, + "VkVideoEncodeH264VclFrameInfoEXT": { + "(VK_EXT_video_encode_h264)": [ + { + "vuid": "VUID-VkVideoEncodeH264VclFrameInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoEncodeH264VclFrameInfoEXT-pRefDefaultFinalList0Entries-parameter", + "text": " pRefDefaultFinalList0Entries must be a valid pointer to an array of refDefaultFinalList0EntryCount valid VkVideoEncodeH264DpbSlotInfoEXT structures" + }, + { + "vuid": "VUID-VkVideoEncodeH264VclFrameInfoEXT-pRefDefaultFinalList1Entries-parameter", + "text": " pRefDefaultFinalList1Entries must be a valid pointer to an array of refDefaultFinalList1EntryCount valid VkVideoEncodeH264DpbSlotInfoEXT structures" + }, + { + "vuid": "VUID-VkVideoEncodeH264VclFrameInfoEXT-pNaluSliceEntries-parameter", + "text": " pNaluSliceEntries must be a valid pointer to an array of naluSliceEntryCount valid VkVideoEncodeH264NaluSliceEXT structures" + }, + { + "vuid": "VUID-VkVideoEncodeH264VclFrameInfoEXT-pCurrentPictureInfo-parameter", + "text": " pCurrentPictureInfo must be a valid pointer to a valid VkVideoEncodeH264DpbSlotInfoEXT structure" + }, + { + "vuid": "VUID-VkVideoEncodeH264VclFrameInfoEXT-refDefaultFinalList0EntryCount-arraylength", + "text": " refDefaultFinalList0EntryCount must be greater than 0" + }, + { + "vuid": "VUID-VkVideoEncodeH264VclFrameInfoEXT-refDefaultFinalList1EntryCount-arraylength", + "text": " refDefaultFinalList1EntryCount must be greater than 0" + }, + { + "vuid": "VUID-VkVideoEncodeH264VclFrameInfoEXT-naluSliceEntryCount-arraylength", + "text": " naluSliceEntryCount must be greater than 0" + } + ] + }, + "VkVideoEncodeH264DpbSlotInfoEXT": { + "(VK_EXT_video_encode_h264)": [ + { + "vuid": "VUID-VkVideoEncodeH264DpbSlotInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT" + }, + { + "vuid": "VUID-VkVideoEncodeH264DpbSlotInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkVideoEncodeH264DpbSlotInfoEXT-pStdPictureInfo-parameter", + "text": " pStdPictureInfo must be a valid pointer to a valid StdVideoEncodeH264PictureInfo value" + } + ] + }, + "VkVideoEncodeH264NaluSliceEXT": { + "(VK_EXT_video_encode_h264)": [ + { + "vuid": "VUID-VkVideoEncodeH264NaluSliceEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT" + }, + { + "vuid": "VUID-VkVideoEncodeH264NaluSliceEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkVideoEncodeH264NaluSliceEXT-pSliceHeaderStd-parameter", + "text": " pSliceHeaderStd must be a valid pointer to a valid StdVideoEncodeH264SliceHeader value" + }, + { + "vuid": "VUID-VkVideoEncodeH264NaluSliceEXT-pRefFinalList0Entries-parameter", + "text": " pRefFinalList0Entries must be a valid pointer to an array of refFinalList0EntryCount valid VkVideoEncodeH264DpbSlotInfoEXT structures" + }, + { + "vuid": "VUID-VkVideoEncodeH264NaluSliceEXT-pRefFinalList1Entries-parameter", + "text": " pRefFinalList1Entries must be a valid pointer to an array of refFinalList1EntryCount valid VkVideoEncodeH264DpbSlotInfoEXT structures" + }, + { + "vuid": "VUID-VkVideoEncodeH264NaluSliceEXT-refFinalList0EntryCount-arraylength", + "text": " refFinalList0EntryCount must be greater than 0" + }, + { + "vuid": "VUID-VkVideoEncodeH264NaluSliceEXT-refFinalList1EntryCount-arraylength", + "text": " refFinalList1EntryCount must be greater than 0" + } + ] + }, + "VkVideoEncodeH264EmitPictureParametersEXT": { + "(VK_EXT_video_encode_h264)": [ + { + "vuid": "VUID-VkVideoEncodeH264EmitPictureParametersEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT" + }, + { + "vuid": "VUID-VkVideoEncodeH264EmitPictureParametersEXT-ppsIdEntries-parameter", + "text": " ppsIdEntries must be a valid pointer to an array of ppsIdEntryCount uint8_t values" + }, + { + "vuid": "VUID-VkVideoEncodeH264EmitPictureParametersEXT-ppsIdEntryCount-arraylength", + "text": " ppsIdEntryCount must be greater than 0" } ] }, @@ -28496,6 +39466,14 @@ } ] }, + "VkPhysicalDeviceShaderAtomicFloatFeaturesEXT": { + "(VK_EXT_shader_atomic_float)": [ + { + "vuid": "VUID-VkPhysicalDeviceShaderAtomicFloatFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT" + } + ] + }, "VkPhysicalDeviceShaderAtomicInt64Features": { "(VK_VERSION_1_2,VK_KHR_shader_atomic_int64)": [ { @@ -28504,6 +39482,14 @@ } ] }, + "VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT": { + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT" + } + ] + }, "VkPhysicalDevice8BitStorageFeatures": { "(VK_VERSION_1_2,VK_KHR_8bit_storage)": [ { @@ -28696,6 +39682,14 @@ } ] }, + "VkPhysicalDeviceFragmentDensityMap2FeaturesEXT": { + "(VK_EXT_fragment_density_map2)": [ + { + "vuid": "VUID-VkPhysicalDeviceFragmentDensityMap2FeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT" + } + ] + }, "VkPhysicalDeviceScalarBlockLayoutFeatures": { "(VK_VERSION_1_2,VK_EXT_scalar_block_layout)": [ { @@ -28904,15 +39898,31 @@ } ] }, - "VkPhysicalDeviceRayTracingFeaturesKHR": { - "(VK_KHR_ray_tracing)": [ + "VkPhysicalDeviceAccelerationStructureFeaturesKHR": { + "(VK_KHR_acceleration_structure)": [ { - "vuid": "VUID-VkPhysicalDeviceRayTracingFeaturesKHR-rayTracingShaderGroupHandleCaptureReplayMixed-03348", - "text": " If rayTracingShaderGroupHandleCaptureReplayMixed is VK_TRUE, rayTracingShaderGroupHandleCaptureReplay must also be VK_TRUE" + "vuid": "VUID-VkPhysicalDeviceAccelerationStructureFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR" + } + ] + }, + "VkPhysicalDeviceRayTracingPipelineFeaturesKHR": { + "(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-VkPhysicalDeviceRayTracingPipelineFeaturesKHR-rayTracingPipelineShaderGroupHandleCaptureReplayMixed-03575", + "text": " If rayTracingPipelineShaderGroupHandleCaptureReplayMixed is VK_TRUE, rayTracingPipelineShaderGroupHandleCaptureReplay must also be VK_TRUE" }, { - "vuid": "VUID-VkPhysicalDeviceRayTracingFeaturesKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_FEATURES_KHR" + "vuid": "VUID-VkPhysicalDeviceRayTracingPipelineFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR" + } + ] + }, + "VkPhysicalDeviceRayQueryFeaturesKHR": { + "(VK_KHR_ray_query)": [ + { + "vuid": "VUID-VkPhysicalDeviceRayQueryFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR" } ] }, @@ -28924,6 +39934,14 @@ } ] }, + "VkPhysicalDeviceExtendedDynamicState2FeaturesEXT": { + "(VK_EXT_extended_dynamic_state2)": [ + { + "vuid": "VUID-VkPhysicalDeviceExtendedDynamicState2FeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT" + } + ] + }, "VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV": { "(VK_NV_device_generated_commands)": [ { @@ -28940,6 +39958,22 @@ } ] }, + "VkPhysicalDeviceDeviceMemoryReportFeaturesEXT": { + "(VK_EXT_device_memory_report)": [ + { + "vuid": "VUID-VkPhysicalDeviceDeviceMemoryReportFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT" + } + ] + }, + "VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT": { + "(VK_EXT_global_priority_query)": [ + { + "vuid": "VUID-VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT" + } + ] + }, "VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT": { "(VK_EXT_pipeline_creation_cache_control)": [ { @@ -28948,6 +39982,14 @@ } ] }, + "VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR": { + "(VK_KHR_zero_initialize_workgroup_memory)": [ + { + "vuid": "VUID-VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR" + } + ] + }, "VkPhysicalDevicePrivateDataFeaturesEXT": { "(VK_EXT_private_data)": [ { @@ -28956,6 +39998,14 @@ } ] }, + "VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR": { + "(VK_KHR_shader_subgroup_uniform_control_flow)": [ + { + "vuid": "VUID-VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR" + } + ] + }, "VkPhysicalDeviceRobustness2FeaturesEXT": { "(VK_EXT_robustness2)": [ { @@ -28968,6 +40018,22 @@ } ] }, + "VkPhysicalDeviceImageRobustnessFeaturesEXT": { + "(VK_EXT_image_robustness)": [ + { + "vuid": "VUID-VkPhysicalDeviceImageRobustnessFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT" + } + ] + }, + "VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR": { + "(VK_KHR_shader_terminate_invocation)": [ + { + "vuid": "VUID-VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR" + } + ] + }, "VkPhysicalDeviceCustomBorderColorFeaturesEXT": { "(VK_EXT_custom_border_color)": [ { @@ -28976,6 +40042,110 @@ } ] }, + "VkPhysicalDevicePortabilitySubsetFeaturesKHR": { + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkPhysicalDevicePortabilitySubsetFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR" + } + ] + }, + "VkPhysicalDevicePerformanceQueryFeaturesKHR": { + "(VK_KHR_performance_query)": [ + { + "vuid": "VUID-VkPhysicalDevicePerformanceQueryFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR" + } + ] + }, + "VkPhysicalDevice4444FormatsFeaturesEXT": { + "(VK_EXT_4444_formats)": [ + { + "vuid": "VUID-VkPhysicalDevice4444FormatsFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT" + } + ] + }, + "VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE": { + "(VK_VALVE_mutable_descriptor_type)": [ + { + "vuid": "VUID-VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE" + } + ] + }, + "VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR": { + "(VK_KHR_workgroup_memory_explicit_layout)": [ + { + "vuid": "VUID-VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR" + } + ] + }, + "VkPhysicalDeviceSynchronization2FeaturesKHR": { + "(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkPhysicalDeviceSynchronization2FeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR" + } + ] + }, + "VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT": { + "(VK_EXT_vertex_input_dynamic_state)": [ + { + "vuid": "VUID-VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT" + } + ] + }, + "VkPhysicalDeviceFragmentShadingRateFeaturesKHR": { + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-VkPhysicalDeviceFragmentShadingRateFeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR" + } + ] + }, + "VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV": { + "(VK_NV_fragment_shading_rate_enums)": [ + { + "vuid": "VUID-VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV" + } + ] + }, + "VkPhysicalDeviceInheritedViewportScissorFeaturesNV": { + "(VK_NV_inherited_viewport_scissor)": [ + { + "vuid": "VUID-VkPhysicalDeviceInheritedViewportScissorFeaturesNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV" + } + ] + }, + "VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT": { + "(VK_EXT_ycbcr_2plane_444_formats)": [ + { + "vuid": "VUID-VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT" + } + ] + }, + "VkPhysicalDeviceColorWriteEnableFeaturesEXT": { + "(VK_EXT_color_write_enable)": [ + { + "vuid": "VUID-VkPhysicalDeviceColorWriteEnableFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT" + } + ] + }, + "VkPhysicalDeviceProvokingVertexFeaturesEXT": { + "(VK_EXT_provoking_vertex)": [ + { + "vuid": "VUID-VkPhysicalDeviceProvokingVertexFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT" + } + ] + }, "VkPhysicalDevicePushDescriptorPropertiesKHR": { "(VK_KHR_push_descriptor)": [ { @@ -29136,6 +40306,14 @@ } ] }, + "VkPhysicalDeviceFragmentDensityMap2PropertiesEXT": { + "(VK_EXT_fragment_density_map2)": [ + { + "vuid": "VUID-VkPhysicalDeviceFragmentDensityMap2PropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT" + } + ] + }, "VkPhysicalDeviceShaderCorePropertiesAMD": { "(VK_AMD_shader_core_properties)": [ { @@ -29160,14 +40338,6 @@ } ] }, - "VkPhysicalDevicePerformanceQueryFeaturesKHR": { - "(VK_KHR_performance_query)": [ - { - "vuid": "VUID-VkPhysicalDevicePerformanceQueryFeaturesKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR" - } - ] - }, "VkPhysicalDevicePerformanceQueryPropertiesKHR": { "(VK_KHR_performance_query)": [ { @@ -29200,11 +40370,19 @@ } ] }, - "VkPhysicalDeviceRayTracingPropertiesKHR": { - "(VK_KHR_ray_tracing)": [ + "VkPhysicalDeviceAccelerationStructurePropertiesKHR": { + "(VK_KHR_acceleration_structure)": [ { - "vuid": "VUID-VkPhysicalDeviceRayTracingPropertiesKHR-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_KHR" + "vuid": "VUID-VkPhysicalDeviceAccelerationStructurePropertiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR" + } + ] + }, + "VkPhysicalDeviceRayTracingPipelinePropertiesKHR": { + "(VK_KHR_ray_tracing_pipeline)": [ + { + "vuid": "VUID-VkPhysicalDeviceRayTracingPipelinePropertiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR" } ] }, @@ -29264,6 +40442,50 @@ } ] }, + "VkPhysicalDevicePortabilitySubsetPropertiesKHR": { + "(VK_KHR_portability_subset)": [ + { + "vuid": "VUID-VkPhysicalDevicePortabilitySubsetPropertiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR" + } + ] + }, + "VkPhysicalDeviceFragmentShadingRatePropertiesKHR": { + "(VK_KHR_fragment_shading_rate)": [ + { + "vuid": "VUID-VkPhysicalDeviceFragmentShadingRatePropertiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR" + } + ] + }, + "VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV": { + "(VK_NV_fragment_shading_rate_enums)": [ + { + "vuid": "VUID-VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV" + }, + { + "vuid": "VUID-VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV-maxFragmentShadingRateInvocationCount-parameter", + "text": " maxFragmentShadingRateInvocationCount must be a valid VkSampleCountFlagBits value" + } + ] + }, + "VkPhysicalDeviceCustomBorderColorPropertiesEXT": { + "(VK_EXT_custom_border_color)": [ + { + "vuid": "VUID-VkPhysicalDeviceCustomBorderColorPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT" + } + ] + }, + "VkPhysicalDeviceProvokingVertexPropertiesEXT": { + "(VK_EXT_provoking_vertex)": [ + { + "vuid": "VUID-VkPhysicalDeviceProvokingVertexPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT" + } + ] + }, "vkGetPhysicalDeviceMultisamplePropertiesEXT": { "(VK_EXT_sample_locations)": [ { @@ -29292,14 +40514,6 @@ } ] }, - "VkPhysicalDeviceCustomBorderColorPropertiesEXT": { - "(VK_EXT_custom_border_color)": [ - { - "vuid": "VUID-VkPhysicalDeviceCustomBorderColorPropertiesEXT-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT" - } - ] - }, "vkGetPhysicalDeviceFormatProperties": { "core": [ { @@ -29340,7 +40554,7 @@ }, { "vuid": "VUID-VkFormatProperties2-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkDrmFormatModifierPropertiesListEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDrmFormatModifierPropertiesListEXT, VkVideoProfileKHR, or VkVideoProfilesKHR" }, { "vuid": "VUID-VkFormatProperties2-sType-unique", @@ -29810,7 +41024,7 @@ }, { "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02590", - "text": " If objectType is not VK_OBJECT_TYPE_UNKNOWN, objectHandle must be VK_NULL_HANDLE or a valid Vulkan handle of the type associated with objectType as defined in the VkObjectType and Vulkan Handle Relationship table" + "text": " If objectType is not VK_OBJECT_TYPE_UNKNOWN, objectHandle must be VK_NULL_HANDLE or a valid Vulkan handle of the type associated with objectType as defined in the VkObjectType and Vulkan Handle Relationship table" }, { "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-sType-sType", @@ -29850,7 +41064,7 @@ }, { "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-objectHandle-01910", - "text": " objectHandle must be a valid Vulkan handle of the type associated with objectType as defined in the VkObjectType and Vulkan Handle Relationship table" + "text": " objectHandle must be a valid Vulkan handle of the type associated with objectType as defined in the VkObjectType and Vulkan Handle Relationship table" }, { "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-sType-sType", @@ -30046,6 +41260,14 @@ } ] }, + "PFN_vkDebugUtilsMessengerCallbackEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-PFN_vkDebugUtilsMessengerCallbackEXT-None-04769", + "text": " The callback must not make calls to any Vulkan commands." + } + ] + }, "VkDebugUtilsMessengerCallbackDataEXT": { "(VK_EXT_debug_utils)": [ { @@ -30162,7 +41384,7 @@ }, { "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-object-01492", - "text": " object must be a Vulkan object of the type associated with objectType as defined in VkDebugReportObjectTypeEXT and Vulkan Handle Relationship" + "text": " object must be a Vulkan object of the type associated with objectType as defined in VkDebugReportObjectTypeEXT and Vulkan Handle Relationship" }, { "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-sType-sType", @@ -30206,7 +41428,7 @@ }, { "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-object-01495", - "text": " object must be a Vulkan object of the type associated with objectType as defined in VkDebugReportObjectTypeEXT and Vulkan Handle Relationship" + "text": " object must be a Vulkan object of the type associated with objectType as defined in VkDebugReportObjectTypeEXT and Vulkan Handle Relationship" }, { "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-sType-sType", @@ -30354,7 +41576,7 @@ }, { "vuid": "VUID-vkDebugReportMessageEXT-objectType-01498", - "text": " If objectType is not VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT and object is not VK_NULL_HANDLE, object must be a Vulkan object of the corresponding type associated with objectType as defined in VkDebugReportObjectTypeEXT and Vulkan Handle Relationship" + "text": " If objectType is not VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT and object is not VK_NULL_HANDLE, object must be a Vulkan object of the corresponding type associated with objectType as defined in VkDebugReportObjectTypeEXT and Vulkan Handle Relationship" }, { "vuid": "VUID-vkDebugReportMessageEXT-instance-parameter", @@ -30426,6 +41648,38 @@ } ] }, + "vkGetQueueCheckpointData2NV": { + "(VK_NV_device_diagnostic_checkpoints)+(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-vkGetQueueCheckpointData2NV-queue-03892", + "text": " The device that queue belongs to must be in the lost state" + }, + { + "vuid": "VUID-vkGetQueueCheckpointData2NV-queue-parameter", + "text": " queue must be a valid VkQueue handle" + }, + { + "vuid": "VUID-vkGetQueueCheckpointData2NV-pCheckpointDataCount-parameter", + "text": " pCheckpointDataCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetQueueCheckpointData2NV-pCheckpointData-parameter", + "text": " If the value referenced by pCheckpointDataCount is not 0, and pCheckpointData is not NULL, pCheckpointData must be a valid pointer to an array of pCheckpointDataCount VkCheckpointData2NV structures" + } + ] + }, + "VkCheckpointData2NV": { + "(VK_NV_device_diagnostic_checkpoints)+(VK_KHR_synchronization2)": [ + { + "vuid": "VUID-VkCheckpointData2NV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV" + }, + { + "vuid": "VUID-VkCheckpointData2NV-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, "vkGetQueueCheckpointDataNV": { "(VK_NV_device_diagnostic_checkpoints)": [ { @@ -30485,6 +41739,390 @@ "text": " pNext must be NULL" } ] + }, + "StandaloneSpirv": { + "core": [ + { + "vuid": "VUID-StandaloneSpirv-None-04633", + "text": " Every entry point must have no return value and accept no arguments" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04634", + "text": " The static function-call graph for an entry point must not contain cycles; that is, static recursion is not allowed" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04635", + "text": " The Logical or PhysicalStorageBuffer64 addressing model must be selected" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04636", + "text": " Scope for execution must be limited to Workgroup or Subgroup" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04637", + "text": " If the Scope for execution is Workgroup, then it must only be used in the task, mesh, tessellation control, or compute execution models" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04638", + "text": " Scope for memory must be limited to Device, QueueFamily, Workgroup, ShaderCallKHR, Subgroup, or Invocation" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04639", + "text": " If the Scope for memory is Workgroup, then it must only be used in the task, mesh, or compute execution models" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04640", + "text": " If the Scope for memory is ShaderCallKHR, then it must only be used in ray generation, intersection, closest hit, any-hit, miss, and callable execution models" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04641", + "text": " If the Scope for memory is Invocation, then memory semantics must be None" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04642", + "text": " Scope for Non Uniform Group Operations must be limited to Subgroup" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04643", + "text": " Storage Class must be limited to UniformConstant, Input, Uniform, Output, Workgroup, Private, Function, PushConstant, Image, StorageBuffer, RayPayloadKHR, IncomingRayPayloadKHR, HitAttributeKHR, CallableDataKHR, IncomingCallableDataKHR, ShaderRecordBufferKHR, or PhysicalStorageBuffer" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04644", + "text": " If the Storage Class is Output, then it must not be used in the GlCompute, RayGenerationKHR, IntersectionKHR, AnyHitKHR, ClosestHitKHR, MissKHR, or CallableKHR execution models" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04645", + "text": " If the Storage Class is Workgroup, then it must only be used in the task, mesh, or compute execution models" + }, + { + "vuid": "VUID-StandaloneSpirv-OpAtomicStore-04730", + "text": " OpAtomicStore must not use Acquire, AcquireRelease, or SequentiallyConsistent memory semantics" + }, + { + "vuid": "VUID-StandaloneSpirv-OpAtomicLoad-04731", + "text": " OpAtomicLoad must not use Release, AcquireRelease, or SequentiallyConsistent memory semantics" + }, + { + "vuid": "VUID-StandaloneSpirv-OpMemoryBarrier-04732", + "text": " OpMemoryBarrier must use one of Acquire, Release, AcquireRelease, or SequentiallyConsistent memory semantics" + }, + { + "vuid": "VUID-StandaloneSpirv-OpMemoryBarrier-04733", + "text": " OpMemoryBarrier must include at least one storage class" + }, + { + "vuid": "VUID-StandaloneSpirv-OpControlBarrier-04650", + "text": " If the semantics for OpControlBarrier includes one of Acquire, Release, AcquireRelease, or SequentiallyConsistent memory semantics, then it must include at least one storage class" + }, + { + "vuid": "VUID-StandaloneSpirv-OpVariable-04651", + "text": " Any OpVariable with an Initializer operand must have Output, Private, Function, or Workgroup as its Storage Class operand" + }, + { + "vuid": "VUID-StandaloneSpirv-OpVariable-04734", + "text": " Any OpVariable with an Initializer operand and Workgroup as its Storage Class operand must use OpConstantNull as the initializer." + }, + { + "vuid": "VUID-StandaloneSpirv-OpReadClockKHR-04652", + "text": " Scope for OpReadClockKHR must be limited to Subgroup or Device" + }, + { + "vuid": "VUID-StandaloneSpirv-OriginLowerLeft-04653", + "text": " The OriginLowerLeft execution mode must not be used; fragment entry points must declare OriginUpperLeft" + }, + { + "vuid": "VUID-StandaloneSpirv-PixelCenterInteger-04654", + "text": " The PixelCenterInteger execution mode must not be used (pixels are always centered at half-integer coordinates)" + }, + { + "vuid": "VUID-StandaloneSpirv-UniformConstant-04655", + "text": " Any variable in the UniformConstant storage class must be typed as either OpTypeImage, OpTypeSampler, OpTypeSampledImage, OpTypeAccelerationStructureKHR, or an array of one of these types" + }, + { + "vuid": "VUID-StandaloneSpirv-OpTypeImage-04656", + "text": " OpTypeImage must declare a scalar 32-bit float, 64-bit integer, or 32-bit integer type for the “Sampled Type” (RelaxedPrecision can be applied to a sampling instruction and to the variable holding the result of a sampling instruction)" + }, + { + "vuid": "VUID-StandaloneSpirv-OpTypeImage-04657", + "text": " OpTypeImage must have a “Sampled” operand of 1 (sampled image) or 2 (storage image)" + }, + { + "vuid": "VUID-StandaloneSpirv-OpImageTexelPointer-04658", + "text": " If an OpImageTexelPointer is used in an atomic operation, the image type of the image parameter to OpImageTexelPointer must have an image format of R64i, R64ui, R32f, R32i, or R32ui" + }, + { + "vuid": "VUID-StandaloneSpirv-OpImageQuerySizeLod-04659", + "text": " OpImageQuerySizeLod, OpImageQueryLod, and OpImageQueryLevels must only consume an “Image” operand whose type has its “Sampled” operand set to 1" + }, + { + "vuid": "VUID-StandaloneSpirv-SubpassData-04660", + "text": " The (u,v) coordinates used for a SubpassData must be the <id> of a constant vector (0,0), or if a layer coordinate is used, must be a vector that was formed with constant 0 for the u and v components" + }, + { + "vuid": "VUID-StandaloneSpirv-OpTypeImage-04661", + "text": " Objects of types OpTypeImage, OpTypeSampler, OpTypeSampledImage, and arrays of these types must not be stored to or modified" + }, + { + "vuid": "VUID-StandaloneSpirv-Offset-04662", + "text": " Any image operation must use at most one of the Offset, ConstOffset, and ConstOffsets image operands" + }, + { + "vuid": "VUID-StandaloneSpirv-Offset-04663", + "text": " Image operand Offset must only be used with OpImage*Gather instructions" + }, + { + "vuid": "VUID-StandaloneSpirv-Offset-04865", + "text": " Any image instruction which uses an Offset, ConstOffset, or ConstOffsets image operand, must only consume a “Sampled Image” operand whose type has its “Sampled” operand set to 1" + }, + { + "vuid": "VUID-StandaloneSpirv-OpImageGather-04664", + "text": " The “Component” operand of OpImageGather, and OpImageSparseGather must be the <id> of a constant instruction" + }, + { + "vuid": "VUID-StandaloneSpirv-OpImage-04777", + "text": " OpImage*Dref must not consume an image whose Dim is 3D." + }, + { + "vuid": "VUID-StandaloneSpirv-OpTypeAccelerationStructureKHR-04665", + "text": " Objects of types OpTypeAccelerationStructureKHR and arrays of this type must not be stored to or modified" + }, + { + "vuid": "VUID-StandaloneSpirv-OpReportIntersectionKHR-04666", + "text": " The value of the “Hit Kind” operand of OpReportIntersectionKHR must be in the range [0,127]" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04667", + "text": " Structure types must not contain opaque types" + }, + { + "vuid": "VUID-StandaloneSpirv-BuiltIn-04668", + "text": " Any BuiltIn decoration not listed in Built-In Variables must not be used" + }, + { + "vuid": "VUID-StandaloneSpirv-Location-04915", + "text": " The Location or Component decorations must not be used with BuiltIn" + }, + { + "vuid": "VUID-StandaloneSpirv-Location-04916", + "text": " The Location decorations must be used on user-defined variables" + }, + { + "vuid": "VUID-StandaloneSpirv-Location-04917", + "text": " The Location decorations must be used on an OpVariable with a structure type that is not a block" + }, + { + "vuid": "VUID-StandaloneSpirv-Location-04918", + "text": " The Location decorations must not be used on the members of OpVariable with a structure type that is a block decorated with Location" + }, + { + "vuid": "VUID-StandaloneSpirv-Location-04919", + "text": " The Location decorations must be used on each member of OpVariable with a structure type that is a block not decorated with Location" + }, + { + "vuid": "VUID-StandaloneSpirv-Component-04920", + "text": " The Component decoration value must not be greater than 3" + }, + { + "vuid": "VUID-StandaloneSpirv-Component-04921", + "text": " If the Component decoration is used on an OpVariable that has a OpTypeVector type with a Component Type with a Width that is less than or equal to 32, the sum of its Component Count and the Component decoration value must be less than 4" + }, + { + "vuid": "VUID-StandaloneSpirv-Component-04922", + "text": " If the Component decoration is used on an OpVariable that has a OpTypeVector type with a Component Type with a Width that is equal to 64, the sum of two times its Component Count and the Component decoration value must be less than 4" + }, + { + "vuid": "VUID-StandaloneSpirv-Component-04923", + "text": " The Component decorations value must not be 1 or 3 for scalar or two-component 64-bit data types" + }, + { + "vuid": "VUID-StandaloneSpirv-Component-04924", + "text": " The Component decorations must not used with any type that is not a scalar or vector" + }, + { + "vuid": "VUID-StandaloneSpirv-GLSLShared-04669", + "text": " The GLSLShared and GLSLPacked decorations must not be used" + }, + { + "vuid": "VUID-StandaloneSpirv-Flat-04670", + "text": " The Flat, NoPerspective, Sample, and Centroid decorations must not be used on variables with storage class other than Input or on variables used in the interface of non-fragment shader entry points" + }, + { + "vuid": "VUID-StandaloneSpirv-Flat-04744", + "text": " The Flat decorations must be used on variables with storage class of Input in a fragment shader stage that are a scalar integer, vector of integer, or any double-precision floating-point type" + }, + { + "vuid": "VUID-StandaloneSpirv-ViewportRelativeNV-04672", + "text": " The ViewportRelativeNV decoration must only be used on a variable decorated with Layer in the vertex, tessellation evaluation, or geometry shader stages" + }, + { + "vuid": "VUID-StandaloneSpirv-ViewportRelativeNV-04673", + "text": " The ViewportRelativeNV decoration must not be used unless a variable decorated with one of ViewportIndex or ViewportMaskNV is also statically used by the same OpEntryPoint" + }, + { + "vuid": "VUID-StandaloneSpirv-ViewportMaskNV-04674", + "text": " The ViewportMaskNV and ViewportIndex decorations must not both be statically used by one or more OpEntryPoint’s that form the vertex processing stages of a graphics pipeline" + }, + { + "vuid": "VUID-StandaloneSpirv-FPRoundingMode-04675", + "text": " Rounding modes other than round-to-nearest-even and round-towards-zero must not be used for the FPRoundingMode decoration" + }, + { + "vuid": "VUID-StandaloneSpirv-FPRoundingMode-04676", + "text": " The FPRoundingMode decoration must only be used for a width-only conversion instruction whose only uses are Object operands of OpStore instructions storing through a pointer to a 16-bit floating-point object in the StorageBuffer, PhysicalStorageBuffer, Uniform, or Output storage class" + }, + { + "vuid": "VUID-StandaloneSpirv-Invariant-04677", + "text": " Variables decorated with Invariant and variables with structure types that have any members decorated with Invariant must be in the Output or Input storage class, Invariant used on an Input storage class variable or structure member has no effect" + }, + { + "vuid": "VUID-StandaloneSpirv-VulkanMemoryModel-04678", + "text": " If the VulkanMemoryModel capability is not declared, the Volatile decoration must be used on any variable declaration that includes one of the SMIDNV, WarpIDNV, SubgroupSize, SubgroupLocalInvocationId, SubgroupEqMask, SubgroupGeMask, SubgroupGtMask, SubgroupLeMask, or SubgroupLtMask BuiltIn decorations when used in the ray generation, closest hit, miss, intersection, or callable shaders, or with the RayTmaxKHR Builtin decoration when used in an intersection shader" + }, + { + "vuid": "VUID-StandaloneSpirv-VulkanMemoryModel-04679", + "text": " If the VulkanMemoryModel capability is declared, the OpLoad instruction must use the Volatile memory semantics when it accesses into any variable that includes one of the SMIDNV, WarpIDNV, SubgroupSize, SubgroupLocalInvocationId, SubgroupEqMask, SubgroupGeMask, SubgroupGtMask, SubgroupLeMask, or SubgroupLtMask BuiltIn decorations when used in the ray generation, closest hit, miss, intersection, or callable shaders, or with the RayTmaxKHR Builtin decoration when used in an intersection shader" + }, + { + "vuid": "VUID-StandaloneSpirv-OpTypeRuntimeArray-04680", + "text": " OpTypeRuntimeArray must only be used for the last member of an OpTypeStruct that is in the StorageBuffer or PhysicalStorageBuffer storage class decorated as Block, or that is in the Uniform storage class decorated as BufferBlock" + }, + { + "vuid": "VUID-StandaloneSpirv-Function-04681", + "text": " A type T that is an array sized with a specialization constant must neither be, nor be contained in, the type T2 of a variable V, unless either: a) T is equal to T2, b) V is declared in the Function, or Private storage classes, c) V is a non-Block variable in the Workgroup storage class, or d) V is an interface variable with an additional level of arrayness, as described in interface matching, and T is the member type of the array type T2." + }, + { + "vuid": "VUID-StandaloneSpirv-OpControlBarrier-04682", + "text": " If OpControlBarrier is used in ray generation, intersection, any-hit, closest hit, miss, fragment, vertex, tessellation evaluation, or geometry shaders, the execution Scope must be Subgroup" + }, + { + "vuid": "VUID-StandaloneSpirv-LocalSize-04683", + "text": " For each compute shader entry point, either a LocalSize execution mode or an object decorated with the WorkgroupSize decoration must be specified" + }, + { + "vuid": "VUID-StandaloneSpirv-DerivativeGroupQuadsNV-04684", + "text": " For compute shaders using the DerivativeGroupQuadsNV execution mode, the first two dimensions of the local workgroup size must be a multiple of two" + }, + { + "vuid": "VUID-StandaloneSpirv-DerivativeGroupLinearNV-04778", + "text": " For compute shaders using the DerivativeGroupLinearNV execution mode, the product of the dimensions of the local workgroup size must be a multiple of four" + }, + { + "vuid": "VUID-StandaloneSpirv-OpGroupNonUniformBallotBitCount-04685", + "text": " If OpGroupNonUniformBallotBitCount is used, the group operation must be limited to Reduce, InclusiveScan, or ExclusiveScan" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04686", + "text": " The Pointer operand of all atomic instructions must have a Storage Class limited to Uniform, Workgroup, Image, StorageBuffer, or PhysicalStorageBuffer" + }, + { + "vuid": "VUID-StandaloneSpirv-Offset-04687", + "text": " Output variables or block members decorated with Offset that have a 64-bit type, or a composite type containing a 64-bit type, must specify an Offset value aligned to a 8 byte boundary" + }, + { + "vuid": "VUID-StandaloneSpirv-Offset-04689", + "text": " The size of any output block containing any member decorated with Offset that is a 64-bit type must be a multiple of 8" + }, + { + "vuid": "VUID-StandaloneSpirv-Offset-04690", + "text": " The first member of an output block that specifies a Offset decoration must specify a Offset value that is aligned to an 8 byte boundary if that block contains any member decorated with Offset and is a 64-bit type" + }, + { + "vuid": "VUID-StandaloneSpirv-Offset-04691", + "text": " Output variables or block members decorated with Offset that have a 32-bit type, or a composite type contains a 32-bit type, must specify an Offset value aligned to a 4 byte boundary" + }, + { + "vuid": "VUID-StandaloneSpirv-Offset-04692", + "text": " Output variables, blocks or block members decorated with Offset must only contain base types that have components that are either 32-bit or 64-bit in size" + }, + { + "vuid": "VUID-StandaloneSpirv-Offset-04716", + "text": " Only variables or block members in the output interface decorated with Offset can be captured for transform feedback, and those variables or block members must also be decorated with XfbBuffer and XfbStride, or inherit XfbBuffer and XfbStride decorations from a block containing them" + }, + { + "vuid": "VUID-StandaloneSpirv-XfbBuffer-04693", + "text": " All variables or block members in the output interface of the entry point being compiled decorated with a specific XfbBuffer value must all be decorated with identical XfbStride values" + }, + { + "vuid": "VUID-StandaloneSpirv-Stream-04694", + "text": " If any variables or block members in the output interface of the entry point being compiled are decorated with Stream, then all variables belonging to the same XfbBuffer must specify the same Stream value" + }, + { + "vuid": "VUID-StandaloneSpirv-XfbBuffer-04696", + "text": " For any two variables or block members in the output interface of the entry point being compiled with the same XfbBuffer value, the ranges determined by the Offset decoration and the size of the type must not overlap" + }, + { + "vuid": "VUID-StandaloneSpirv-XfbBuffer-04697", + "text": " All block members in the output interface of the entry point being compiled that are in the same block and have a declared or inherited XfbBuffer decoration must specify the same XfbBuffer value" + }, + { + "vuid": "VUID-StandaloneSpirv-RayPayloadKHR-04698", + "text": " RayPayloadKHR storage class must only be used in ray generation, closest hit or miss shaders" + }, + { + "vuid": "VUID-StandaloneSpirv-IncomingRayPayloadKHR-04699", + "text": " IncomingRayPayloadKHR storage class must only be used in closest hit, any-hit, or miss shaders" + }, + { + "vuid": "VUID-StandaloneSpirv-IncomingRayPayloadKHR-04700", + "text": " There must be at most one variable with the IncomingRayPayloadKHR storage class in the input interface of an entry point" + }, + { + "vuid": "VUID-StandaloneSpirv-HitAttributeKHR-04701", + "text": " HitAttributeKHR storage class must only be used in intersection, any-hit, or closest hit shaders" + }, + { + "vuid": "VUID-StandaloneSpirv-HitAttributeKHR-04702", + "text": " There must be at most one variable with the HitAttributeKHR storage class in the input interface of an entry point" + }, + { + "vuid": "VUID-StandaloneSpirv-HitAttributeKHR-04703", + "text": " A variable with HitAttributeKHR storage class must only be written to in an intersection shader" + }, + { + "vuid": "VUID-StandaloneSpirv-CallableDataKHR-04704", + "text": " CallableDataKHR storage class must only be used in ray generation, closest hit, miss, and callable shaders" + }, + { + "vuid": "VUID-StandaloneSpirv-IncomingCallableDataKHR-04705", + "text": " IncomingCallableDataKHR storage class must only be used in callable shaders" + }, + { + "vuid": "VUID-StandaloneSpirv-IncomingCallableDataKHR-04706", + "text": " There must be at most one variable with the IncomingCallableDataKHR storage class in the input interface of an entry point" + }, + { + "vuid": "VUID-StandaloneSpirv-Base-04707", + "text": " The Base operand of OpPtrAccessChain must point to one of the following: Workgroup, if VariablePointers is enabled; StorageBuffer, if VariablePointers or VariablePointersStorageBuffer is enabled; PhysicalStorageBuffer, if the PhysicalStorageBuffer64 addressing model is enabled" + }, + { + "vuid": "VUID-StandaloneSpirv-PhysicalStorageBuffer64-04708", + "text": " If the PhysicalStorageBuffer64 addressing model is enabled, all instructions that support memory access operands and that use a physical pointer must include the Aligned operand" + }, + { + "vuid": "VUID-StandaloneSpirv-PhysicalStorageBuffer64-04709", + "text": " If the PhysicalStorageBuffer64 addressing model is enabled, any access chain instruction that accesses into a RowMajor matrix must only be used as the Pointer operand to OpLoad or OpStore" + }, + { + "vuid": "VUID-StandaloneSpirv-PhysicalStorageBuffer64-04710", + "text": " If the PhysicalStorageBuffer64 addressing model is enabled, OpConvertUToPtr and OpConvertPtrToU must use an integer type whose Width is 64" + }, + { + "vuid": "VUID-StandaloneSpirv-OpTypeForwardPointer-04711", + "text": " OpTypeForwardPointer must have a storage class of PhysicalStorageBuffer" + }, + { + "vuid": "VUID-StandaloneSpirv-None-04745", + "text": " All variables with a storage class of PushConstant declared as an array must only be accessed by dynamically uniform indices" + }, + { + "vuid": "VUID-StandaloneSpirv-Result-04780", + "text": " The Result Type operand of any OpImageRead or OpImageSparseRead instruction must be a vector of four components" + }, + { + "vuid": "VUID-StandaloneSpirv-Base-04781", + "text": " The Base operand of any OpBitCount, OpBitReverse, OpBitFieldInsert, OpBitFieldSExtract, or OpBitFieldUExtract instruction must be a 32-bit integer scalar or a vector of 32-bit integers" + } + ] } } } \ No newline at end of file diff --git a/externals/Vulkan-Headers/registry/vk.xml b/externals/Vulkan-Headers/registry/vk.xml index dcbd24d2c..98dc2c098 100755 --- a/externals/Vulkan-Headers/registry/vk.xml +++ b/externals/Vulkan-Headers/registry/vk.xml @@ -1,7 +1,7 @@ -Copyright (c) 2015-2020 The Khronos Group Inc. +Copyright 2015-2021 The Khronos Group Inc. SPDX-License-Identifier: Apache-2.0 OR MIT @@ -13,9 +13,9 @@ machine-readable definition of the API, parameter and member validation language incorporated into the Specification and reference pages, and other material which is registered by Khronos, such as tags used by extension and layer authors. The authoritative public version of vk.xml is maintained in -the master branch of the Khronos Vulkan GitHub project. The authoritative -private version is maintained in the master branch of the member gitlab -server. +the default branch (currently named main) of the Khronos Vulkan GitHub +project. The authoritative private version is maintained in the default +branch of the member gitlab server. @@ -23,6 +23,7 @@ server. + @@ -32,6 +33,7 @@ server. + @@ -49,10 +51,10 @@ server. - + - + @@ -65,6 +67,11 @@ server. + + + + + @@ -77,8 +84,10 @@ server. + + In the current header structure, each platform's interfaces are confined to a platform-specific header (vulkan_xlib.h, @@ -112,43 +121,78 @@ server. + + + + - #define VK_MAKE_VERSION(major, minor, patch) \ + // DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. +#define VK_MAKE_VERSION(major, minor, patch) \ ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) - #define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) - #define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) - #define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) + // DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead. +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) + // DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead. +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) + // DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead. +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) + + #define VK_MAKE_API_VERSION(variant, major, minor, patch) \ + ((((uint32_t)(variant)) << 29) | (((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + #define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29) + #define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22) & 0x7FU) + #define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) + #define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) // DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. //#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 - // Vulkan 1.0 version number -#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 - // Vulkan 1.1 version number -#define VK_API_VERSION_1_1 VK_MAKE_VERSION(1, 1, 0)// Patch version should always be set to 0 - // Vulkan 1.2 version number -#define VK_API_VERSION_1_2 VK_MAKE_VERSION(1, 2, 0)// Patch version should always be set to 0 + // Vulkan 1.0 version number +#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 + // Vulkan 1.1 version number +#define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0 + // Vulkan 1.2 version number +#define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 145 +#define VK_HEADER_VERSION 180 // Complete version of this file -#define VK_HEADER_VERSION_COMPLETE VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION) +#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 2, VK_HEADER_VERSION) #define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; - -#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE) -#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) - #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; -#else - #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; -#endif + +#ifndef VK_USE_64_BIT_PTR_DEFINES + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VK_USE_64_BIT_PTR_DEFINES 1 + #else + #define VK_USE_64_BIT_PTR_DEFINES 0 + #endif +#endif + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #if (defined(__cplusplus) && (__cplusplus >= 201103L)) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201103L)) + #define VK_NULL_HANDLE nullptr + #else + #define VK_NULL_HANDLE ((void*)0) + #endif + #else + #define VK_NULL_HANDLE 0ULL + #endif +#endif +#ifndef VK_NULL_HANDLE + #define VK_NULL_HANDLE 0 +#endif + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; + #else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; + #endif #endif - - -#define VK_NULL_HANDLE 0 struct ANativeWindow; struct AHardwareBuffer; @@ -162,6 +206,7 @@ typedef void CAMetalLayer; typedef uint32_t VkSampleMask; typedef uint32_t VkBool32; typedef uint32_t VkFlags; + typedef uint64_t VkFlags64; typedef uint64_t VkDeviceSize; typedef uint64_t VkDeviceAddress; @@ -170,7 +215,9 @@ typedef void CAMetalLayer; + + @@ -214,12 +261,13 @@ typedef void CAMetalLayer; typedef VkFlags VkPipelineCreateFlags; typedef VkFlags VkColorComponentFlags; typedef VkFlags VkFenceCreateFlags; + When VkSemaphoreCreateFlagBits is first extended, need to add a requires= attribute for it to VkSemaphoreCreateFlags typedef VkFlags VkSemaphoreCreateFlags; typedef VkFlags VkFormatFeatureFlags; typedef VkFlags VkQueryControlFlags; typedef VkFlags VkQueryResultFlags; typedef VkFlags VkShaderModuleCreateFlags; - typedef VkFlags VkEventCreateFlags; + typedef VkFlags VkEventCreateFlags; typedef VkFlags VkCommandPoolCreateFlags; typedef VkFlags VkCommandPoolResetFlags; typedef VkFlags VkCommandBufferResetFlags; @@ -248,6 +296,7 @@ typedef void CAMetalLayer; typedef VkFlags VkBuildAccelerationStructureFlagsKHR; typedef VkFlags VkPrivateDataSlotCreateFlagsEXT; + typedef VkFlags VkAccelerationStructureCreateFlagsKHR; typedef VkFlags VkDescriptorUpdateTemplateCreateFlags; typedef VkFlags VkPipelineCreationFeedbackFlagsEXT; @@ -258,6 +307,8 @@ typedef void CAMetalLayer; typedef VkFlags VkPipelineCompilerControlFlagsAMD; typedef VkFlags VkShaderCorePropertiesFlagsAMD; typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; + typedef VkFlags64 VkAccessFlags2KHR; + typedef VkFlags64 VkPipelineStageFlags2KHR; WSI extensions typedef VkFlags VkCompositeAlphaFlagsKHR; @@ -272,12 +323,14 @@ typedef void CAMetalLayer; typedef VkFlags VkWin32SurfaceCreateFlagsKHR; typedef VkFlags VkXlibSurfaceCreateFlagsKHR; typedef VkFlags VkXcbSurfaceCreateFlagsKHR; + typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT; typedef VkFlags VkIOSSurfaceCreateFlagsMVK; typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; typedef VkFlags VkMetalSurfaceCreateFlagsEXT; typedef VkFlags VkImagePipeSurfaceCreateFlagsFUCHSIA; typedef VkFlags VkStreamDescriptorSurfaceCreateFlagsGGP; typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT; + typedef VkFlags VkScreenSurfaceCreateFlagsQNX; typedef VkFlags VkPeerMemoryFeatureFlags; typedef VkFlags VkMemoryAllocateFlags; @@ -316,6 +369,7 @@ typedef void CAMetalLayer; typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; + typedef VkFlags VkDeviceMemoryReportFlagsEXT; typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; typedef VkFlags VkDescriptorBindingFlags; @@ -326,52 +380,91 @@ typedef void CAMetalLayer; typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT; typedef VkFlags VkSwapchainImageUsageFlagsANDROID; typedef VkFlags VkToolPurposeFlagsEXT; + typedef VkFlags VkSubmitFlagsKHR; + + Video Core extension + typedef VkFlags VkVideoCodecOperationFlagsKHR; + typedef VkFlags VkVideoCapabilitiesFlagsKHR; + typedef VkFlags VkVideoSessionCreateFlagsKHR; + typedef VkFlags VkVideoBeginCodingFlagsKHR; + typedef VkFlags VkVideoEndCodingFlagsKHR; + typedef VkFlags VkVideoCodingQualityPresetFlagsKHR; + typedef VkFlags VkVideoCodingControlFlagsKHR; + + Video Decode Core extension + typedef VkFlags VkVideoDecodeFlagsKHR; + + Video Decode H.264 extension + typedef VkFlags VkVideoDecodeH264FieldLayoutFlagsEXT; + typedef VkFlags VkVideoDecodeH264CreateFlagsEXT; + + Video Decode H.265 extension + typedef VkFlags VkVideoDecodeH265CreateFlagsEXT; + + Video Encode Core extension + typedef VkFlags VkVideoEncodeFlagsKHR; + typedef VkFlags VkVideoEncodeRateControlFlagsKHR; + typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR; + typedef VkFlags VkVideoChromaSubsamplingFlagsKHR; + typedef VkFlags VkVideoComponentBitDepthFlagsKHR; + + Video Encode H.264 extension + typedef VkFlags VkVideoEncodeH264CapabilitiesFlagsEXT; + typedef VkFlags VkVideoEncodeH264InputModeFlagsEXT; + typedef VkFlags VkVideoEncodeH264OutputModeFlagsEXT; + typedef VkFlags VkVideoEncodeH264CreateFlagsEXT; Types which can be void pointers or class pointers, selected at compile time - VK_DEFINE_HANDLE(VkInstance) - VK_DEFINE_HANDLE(VkPhysicalDevice) - VK_DEFINE_HANDLE(VkDevice) - VK_DEFINE_HANDLE(VkQueue) - VK_DEFINE_HANDLE(VkCommandBuffer) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) + VK_DEFINE_HANDLE(VkInstance) + VK_DEFINE_HANDLE(VkPhysicalDevice) + VK_DEFINE_HANDLE(VkDevice) + VK_DEFINE_HANDLE(VkQueue) + VK_DEFINE_HANDLE(VkCommandBuffer) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) - - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlotEXT) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlotEXT) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) WSI extensions - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) - VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) + + Video extensions + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR) Types generated from corresponding enums tags below @@ -451,6 +544,8 @@ typedef void CAMetalLayer; + + When VkSemaphoreCreateFlagBits is first extended, need to add a type enum tag for it here Extensions @@ -484,6 +579,8 @@ typedef void CAMetalLayer; + + @@ -492,9 +589,10 @@ typedef void CAMetalLayer; - - + + + @@ -517,6 +615,11 @@ typedef void CAMetalLayer; + + + + + WSI extensions @@ -526,6 +629,7 @@ typedef void CAMetalLayer; + @@ -576,6 +680,8 @@ typedef void CAMetalLayer; + + Enumerated types in the header, but not used by the API @@ -585,6 +691,35 @@ typedef void CAMetalLayer; + Video Core extensions + + + + + + + + + + Video Decode extensions + + + Video H.264 Decode extensions + + + Video H.265 Decode extensions + + Video Encode extensions + + + + + Video H.264 Encode extensions + + + + + The PFN_vk*Function types are used by VkAllocationCallbacks below typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( void* pUserData, @@ -632,14 +767,19 @@ typedef void CAMetalLayer; const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData); + The PFN_vkDeviceMemoryReportCallbackEXT type is used by the VK_EXT_device_memory_report extension + typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)( + const VkDeviceMemoryReportCallbackDataEXT* pCallbackData, + void* pUserData); + Struct types VkStructureType sType - struct VkBaseOutStructure* pNext + struct VkBaseOutStructure* pNext VkStructureType sType - const struct VkBaseInStructure* pNext + const struct VkBaseInStructure* pNext int32_t x @@ -683,15 +823,15 @@ typedef void CAMetalLayer; VkComponentSwizzle a - uint32_t apiVersion - uint32_t driverVersion - uint32_t vendorID - uint32_t deviceID - VkPhysicalDeviceType deviceType - char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE] - uint8_t pipelineCacheUUID[VK_UUID_SIZE] - VkPhysicalDeviceLimits limits - VkPhysicalDeviceSparseProperties sparseProperties + uint32_t apiVersion + uint32_t driverVersion + uint32_t vendorID + uint32_t deviceID + VkPhysicalDeviceType deviceType + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE] + uint8_t pipelineCacheUUID[VK_UUID_SIZE] + VkPhysicalDeviceLimits limits + VkPhysicalDeviceSparseProperties sparseProperties char extensionName[VK_MAX_EXTENSION_NAME_SIZE]extension name @@ -705,7 +845,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext const char* pApplicationName uint32_t applicationVersion const char* pEngineName @@ -722,7 +862,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDeviceQueueCreateFlags flags uint32_t queueFamilyIndex uint32_t queueCount @@ -730,7 +870,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDeviceCreateFlags flags uint32_t queueCreateInfoCount const VkDeviceQueueCreateInfo* pQueueCreateInfos @@ -742,7 +882,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkInstanceCreateFlags flags const VkApplicationInfo* pApplicationInfo uint32_t enabledLayerCount @@ -764,7 +904,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDeviceSize allocationSizeSize of memory allocation uint32_t memoryTypeIndexIndex of the of the memory type to allocate from @@ -795,7 +935,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDeviceMemory memoryMapped memory object VkDeviceSize offsetOffset within the memory object where the range starts VkDeviceSize sizeSize of the range within the memory object @@ -824,7 +964,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDescriptorSet dstSetDestination descriptor set uint32_t dstBindingBinding within the destination descriptor set to write uint32_t dstArrayElementArray element within the destination binding to write @@ -836,7 +976,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDescriptorSet srcSetSource descriptor set uint32_t srcBindingBinding within the source descriptor set to copy from uint32_t srcArrayElementArray element within the source binding to copy from @@ -847,7 +987,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkBufferCreateFlags flagsBuffer creation flags VkDeviceSize sizeSpecified in bytes VkBufferUsageFlags usageBuffer usage flags @@ -857,7 +997,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkBufferViewCreateFlagsflags VkBuffer buffer VkFormat formatOptionally specifies format of elements @@ -884,13 +1024,13 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkAccessFlags srcAccessMaskMemory accesses from the source of the dependency to synchronize VkAccessFlags dstAccessMaskMemory accesses from the destination of the dependency to synchronize VkStructureType sType - const void* pNext + const void* pNext VkAccessFlags srcAccessMaskMemory accesses from the source of the dependency to synchronize VkAccessFlags dstAccessMaskMemory accesses from the destination of the dependency to synchronize uint32_t srcQueueFamilyIndexQueue family to transition ownership from @@ -901,7 +1041,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkAccessFlags srcAccessMaskMemory accesses from the source of the dependency to synchronize VkAccessFlags dstAccessMaskMemory accesses from the destination of the dependency to synchronize VkImageLayout oldLayoutCurrent layout of the image @@ -913,7 +1053,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkImageCreateFlags flagsImage creation flags VkImageType imageType VkFormat format @@ -937,7 +1077,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkImageViewCreateFlags flags VkImage image VkImageViewType viewType @@ -982,7 +1122,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t waitSemaphoreCount const VkSemaphore* pWaitSemaphores uint32_t bufferBindCount @@ -1024,7 +1164,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkShaderModuleCreateFlags flags size_t codeSizeSpecified in bytes const uint32_t* pCodeBinary code of size codeSize @@ -1038,7 +1178,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDescriptorSetLayoutCreateFlags flags uint32_t bindingCountNumber of bindings in the descriptor set layout const VkDescriptorSetLayoutBinding* pBindingsArray of descriptor set layout bindings @@ -1049,7 +1189,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDescriptorPoolCreateFlags flags uint32_t maxSets uint32_t poolSizeCount @@ -1057,7 +1197,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDescriptorPool descriptorPool uint32_t descriptorSetCount const VkDescriptorSetLayout* pSetLayouts @@ -1075,7 +1215,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineShaderStageCreateFlags flags VkShaderStageFlagBits stageShader stage VkShaderModule moduleModule containing entry point @@ -1084,7 +1224,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineCreateFlags flagsPipeline creation flags VkPipelineShaderStageCreateInfo stage VkPipelineLayout layoutInterface layout of the pipeline @@ -1104,7 +1244,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineVertexInputStateCreateFlags flags uint32_t vertexBindingDescriptionCountnumber of bindings const VkVertexInputBindingDescription* pVertexBindingDescriptions @@ -1113,29 +1253,29 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineInputAssemblyStateCreateFlags flags VkPrimitiveTopology topology VkBool32 primitiveRestartEnable VkStructureType sType - const void* pNext + const void* pNext VkPipelineTessellationStateCreateFlags flags uint32_t patchControlPoints VkStructureType sType - const void* pNext + const void* pNext VkPipelineViewportStateCreateFlags flags - uint32_t viewportCount + uint32_t viewportCount const VkViewport* pViewports - uint32_t scissorCount + uint32_t scissorCount const VkRect2D* pScissors VkStructureType sType - const void* pNext + const void* pNext VkPipelineRasterizationStateCreateFlags flags VkBool32 depthClampEnable VkBool32 rasterizerDiscardEnable @@ -1150,7 +1290,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineMultisampleStateCreateFlags flags VkSampleCountFlagBits rasterizationSamplesNumber of samples used for rasterization VkBool32 sampleShadingEnableoptional (GL45) @@ -1171,7 +1311,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineColorBlendStateCreateFlags flags VkBool32 logicOpEnable VkLogicOp logicOp @@ -1181,7 +1321,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineDynamicStateCreateFlags flags uint32_t dynamicStateCount const VkDynamicState* pDynamicStates @@ -1197,7 +1337,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineDepthStencilStateCreateFlags flags VkBool32 depthTestEnable VkBool32 depthWriteEnable @@ -1211,7 +1351,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineCreateFlags flagsPipeline creation flags uint32_t stageCount const VkPipelineShaderStageCreateInfo* pStagesOne entry for each active shader stage @@ -1232,7 +1372,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineCacheCreateFlags flags size_t initialDataSizeSize of initial data to populate cache, in bytes const void* pInitialDataInitial data to populate cache @@ -1244,7 +1384,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineLayoutCreateFlags flags uint32_t setLayoutCountNumber of descriptor sets interfaced by the pipeline const VkDescriptorSetLayout* pSetLayoutsArray of setCount number of descriptor set layout objects defining the layout of the @@ -1253,7 +1393,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkSamplerCreateFlags flags VkFilter magFilterFilter mode for magnification VkFilter minFilterFilter mode for minifiation @@ -1273,20 +1413,20 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkCommandPoolCreateFlags flagsCommand pool creation flags uint32_t queueFamilyIndex VkStructureType sType - const void* pNext + const void* pNext VkCommandPool commandPool VkCommandBufferLevel level uint32_t commandBufferCount VkStructureType sType - const void* pNext + const void* pNext VkRenderPass renderPassRender pass for secondary command buffers uint32_t subpass VkFramebuffer framebufferFramebuffer for secondary command buffers @@ -1296,18 +1436,18 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkCommandBufferUsageFlags flagsCommand buffer usage flags const VkCommandBufferInheritanceInfo* pInheritanceInfoPointer to inheritance info for secondary command buffers VkStructureType sType - const void* pNext + const void* pNext VkRenderPass renderPass VkFramebuffer framebuffer VkRect2D renderArea uint32_t clearValueCount - const VkClearValue* pClearValues + const VkClearValue* pClearValues float float32[4] @@ -1319,13 +1459,13 @@ typedef void CAMetalLayer; uint32_t stencil - VkClearColorValue color + VkClearColorValue color VkClearDepthStencilValue depthStencil VkImageAspectFlags aspectMask uint32_t colorAttachment - VkClearValue clearValue + VkClearValue clearValue VkAttachmentDescriptionFlags flags @@ -1357,15 +1497,15 @@ typedef void CAMetalLayer; uint32_t srcSubpass uint32_t dstSubpass - VkPipelineStageFlags srcStageMask - VkPipelineStageFlags dstStageMask + VkPipelineStageFlags srcStageMask + VkPipelineStageFlags dstStageMask VkAccessFlags srcAccessMaskMemory accesses from the source of the dependency to synchronize VkAccessFlags dstAccessMaskMemory accesses from the destination of the dependency to synchronize VkDependencyFlags dependencyFlags VkStructureType sType - const void* pNext + const void* pNext VkRenderPassCreateFlags flags uint32_t attachmentCount const VkAttachmentDescription* pAttachments @@ -1376,12 +1516,12 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkEventCreateFlags flagsEvent creation flags VkStructureType sType - const void* pNext + const void* pNext VkFenceCreateFlags flagsFence creation flags @@ -1442,137 +1582,137 @@ typedef void CAMetalLayer; VkBool32 inheritedQueriesQueries may be inherited from primary to secondary command buffers - VkBool32 residencyStandard2DBlockShapeSparse resources support: GPU will access all 2D (single sample) sparse resources using the standard sparse image block shapes (based on pixel format) - VkBool32 residencyStandard2DMultisampleBlockShapeSparse resources support: GPU will access all 2D (multisample) sparse resources using the standard sparse image block shapes (based on pixel format) - VkBool32 residencyStandard3DBlockShapeSparse resources support: GPU will access all 3D sparse resources using the standard sparse image block shapes (based on pixel format) - VkBool32 residencyAlignedMipSizeSparse resources support: Images with mip level dimensions that are NOT a multiple of the sparse image block dimensions will be placed in the mip tail - VkBool32 residencyNonResidentStrictSparse resources support: GPU can consistently access non-resident regions of a resource, all reads return as if data is 0, writes are discarded + VkBool32 residencyStandard2DBlockShapeSparse resources support: GPU will access all 2D (single sample) sparse resources using the standard sparse image block shapes (based on pixel format) + VkBool32 residencyStandard2DMultisampleBlockShapeSparse resources support: GPU will access all 2D (multisample) sparse resources using the standard sparse image block shapes (based on pixel format) + VkBool32 residencyStandard3DBlockShapeSparse resources support: GPU will access all 3D sparse resources using the standard sparse image block shapes (based on pixel format) + VkBool32 residencyAlignedMipSizeSparse resources support: Images with mip level dimensions that are NOT a multiple of the sparse image block dimensions will be placed in the mip tail + VkBool32 residencyNonResidentStrictSparse resources support: GPU can consistently access non-resident regions of a resource, all reads return as if data is 0, writes are discarded resource maximum sizes - uint32_t maxImageDimension1Dmax 1D image dimension - uint32_t maxImageDimension2Dmax 2D image dimension - uint32_t maxImageDimension3Dmax 3D image dimension - uint32_t maxImageDimensionCubemax cubemap image dimension - uint32_t maxImageArrayLayersmax layers for image arrays - uint32_t maxTexelBufferElementsmax texel buffer size (fstexels) - uint32_t maxUniformBufferRangemax uniform buffer range (bytes) - uint32_t maxStorageBufferRangemax storage buffer range (bytes) - uint32_t maxPushConstantsSizemax size of the push constants pool (bytes) + uint32_t maxImageDimension1Dmax 1D image dimension + uint32_t maxImageDimension2Dmax 2D image dimension + uint32_t maxImageDimension3Dmax 3D image dimension + uint32_t maxImageDimensionCubemax cubemap image dimension + uint32_t maxImageArrayLayersmax layers for image arrays + uint32_t maxTexelBufferElementsmax texel buffer size (fstexels) + uint32_t maxUniformBufferRangemax uniform buffer range (bytes) + uint32_t maxStorageBufferRangemax storage buffer range (bytes) + uint32_t maxPushConstantsSizemax size of the push constants pool (bytes) memory limits - uint32_t maxMemoryAllocationCountmax number of device memory allocations supported - uint32_t maxSamplerAllocationCountmax number of samplers that can be allocated on a device - VkDeviceSize bufferImageGranularityGranularity (in bytes) at which buffers and images can be bound to adjacent memory for simultaneous usage - VkDeviceSize sparseAddressSpaceSizeTotal address space available for sparse allocations (bytes) + uint32_t maxMemoryAllocationCountmax number of device memory allocations supported + uint32_t maxSamplerAllocationCountmax number of samplers that can be allocated on a device + VkDeviceSize bufferImageGranularityGranularity (in bytes) at which buffers and images can be bound to adjacent memory for simultaneous usage + VkDeviceSize sparseAddressSpaceSizeTotal address space available for sparse allocations (bytes) descriptor set limits - uint32_t maxBoundDescriptorSetsmax number of descriptors sets that can be bound to a pipeline - uint32_t maxPerStageDescriptorSamplersmax number of samplers allowed per-stage in a descriptor set - uint32_t maxPerStageDescriptorUniformBuffersmax number of uniform buffers allowed per-stage in a descriptor set - uint32_t maxPerStageDescriptorStorageBuffersmax number of storage buffers allowed per-stage in a descriptor set - uint32_t maxPerStageDescriptorSampledImagesmax number of sampled images allowed per-stage in a descriptor set - uint32_t maxPerStageDescriptorStorageImagesmax number of storage images allowed per-stage in a descriptor set - uint32_t maxPerStageDescriptorInputAttachmentsmax number of input attachments allowed per-stage in a descriptor set - uint32_t maxPerStageResourcesmax number of resources allowed by a single stage - uint32_t maxDescriptorSetSamplersmax number of samplers allowed in all stages in a descriptor set - uint32_t maxDescriptorSetUniformBuffersmax number of uniform buffers allowed in all stages in a descriptor set - uint32_t maxDescriptorSetUniformBuffersDynamicmax number of dynamic uniform buffers allowed in all stages in a descriptor set - uint32_t maxDescriptorSetStorageBuffersmax number of storage buffers allowed in all stages in a descriptor set - uint32_t maxDescriptorSetStorageBuffersDynamicmax number of dynamic storage buffers allowed in all stages in a descriptor set - uint32_t maxDescriptorSetSampledImagesmax number of sampled images allowed in all stages in a descriptor set - uint32_t maxDescriptorSetStorageImagesmax number of storage images allowed in all stages in a descriptor set - uint32_t maxDescriptorSetInputAttachmentsmax number of input attachments allowed in all stages in a descriptor set + uint32_t maxBoundDescriptorSetsmax number of descriptors sets that can be bound to a pipeline + uint32_t maxPerStageDescriptorSamplersmax number of samplers allowed per-stage in a descriptor set + uint32_t maxPerStageDescriptorUniformBuffersmax number of uniform buffers allowed per-stage in a descriptor set + uint32_t maxPerStageDescriptorStorageBuffersmax number of storage buffers allowed per-stage in a descriptor set + uint32_t maxPerStageDescriptorSampledImagesmax number of sampled images allowed per-stage in a descriptor set + uint32_t maxPerStageDescriptorStorageImagesmax number of storage images allowed per-stage in a descriptor set + uint32_t maxPerStageDescriptorInputAttachmentsmax number of input attachments allowed per-stage in a descriptor set + uint32_t maxPerStageResourcesmax number of resources allowed by a single stage + uint32_t maxDescriptorSetSamplersmax number of samplers allowed in all stages in a descriptor set + uint32_t maxDescriptorSetUniformBuffersmax number of uniform buffers allowed in all stages in a descriptor set + uint32_t maxDescriptorSetUniformBuffersDynamicmax number of dynamic uniform buffers allowed in all stages in a descriptor set + uint32_t maxDescriptorSetStorageBuffersmax number of storage buffers allowed in all stages in a descriptor set + uint32_t maxDescriptorSetStorageBuffersDynamicmax number of dynamic storage buffers allowed in all stages in a descriptor set + uint32_t maxDescriptorSetSampledImagesmax number of sampled images allowed in all stages in a descriptor set + uint32_t maxDescriptorSetStorageImagesmax number of storage images allowed in all stages in a descriptor set + uint32_t maxDescriptorSetInputAttachmentsmax number of input attachments allowed in all stages in a descriptor set vertex stage limits - uint32_t maxVertexInputAttributesmax number of vertex input attribute slots - uint32_t maxVertexInputBindingsmax number of vertex input binding slots - uint32_t maxVertexInputAttributeOffsetmax vertex input attribute offset added to vertex buffer offset - uint32_t maxVertexInputBindingStridemax vertex input binding stride - uint32_t maxVertexOutputComponentsmax number of output components written by vertex shader + uint32_t maxVertexInputAttributesmax number of vertex input attribute slots + uint32_t maxVertexInputBindingsmax number of vertex input binding slots + uint32_t maxVertexInputAttributeOffsetmax vertex input attribute offset added to vertex buffer offset + uint32_t maxVertexInputBindingStridemax vertex input binding stride + uint32_t maxVertexOutputComponentsmax number of output components written by vertex shader tessellation control stage limits - uint32_t maxTessellationGenerationLevelmax level supported by tessellation primitive generator - uint32_t maxTessellationPatchSizemax patch size (vertices) - uint32_t maxTessellationControlPerVertexInputComponentsmax number of input components per-vertex in TCS - uint32_t maxTessellationControlPerVertexOutputComponentsmax number of output components per-vertex in TCS - uint32_t maxTessellationControlPerPatchOutputComponentsmax number of output components per-patch in TCS - uint32_t maxTessellationControlTotalOutputComponentsmax total number of per-vertex and per-patch output components in TCS + uint32_t maxTessellationGenerationLevelmax level supported by tessellation primitive generator + uint32_t maxTessellationPatchSizemax patch size (vertices) + uint32_t maxTessellationControlPerVertexInputComponentsmax number of input components per-vertex in TCS + uint32_t maxTessellationControlPerVertexOutputComponentsmax number of output components per-vertex in TCS + uint32_t maxTessellationControlPerPatchOutputComponentsmax number of output components per-patch in TCS + uint32_t maxTessellationControlTotalOutputComponentsmax total number of per-vertex and per-patch output components in TCS tessellation evaluation stage limits - uint32_t maxTessellationEvaluationInputComponentsmax number of input components per vertex in TES - uint32_t maxTessellationEvaluationOutputComponentsmax number of output components per vertex in TES + uint32_t maxTessellationEvaluationInputComponentsmax number of input components per vertex in TES + uint32_t maxTessellationEvaluationOutputComponentsmax number of output components per vertex in TES geometry stage limits - uint32_t maxGeometryShaderInvocationsmax invocation count supported in geometry shader - uint32_t maxGeometryInputComponentsmax number of input components read in geometry stage - uint32_t maxGeometryOutputComponentsmax number of output components written in geometry stage - uint32_t maxGeometryOutputVerticesmax number of vertices that can be emitted in geometry stage - uint32_t maxGeometryTotalOutputComponentsmax total number of components (all vertices) written in geometry stage + uint32_t maxGeometryShaderInvocationsmax invocation count supported in geometry shader + uint32_t maxGeometryInputComponentsmax number of input components read in geometry stage + uint32_t maxGeometryOutputComponentsmax number of output components written in geometry stage + uint32_t maxGeometryOutputVerticesmax number of vertices that can be emitted in geometry stage + uint32_t maxGeometryTotalOutputComponentsmax total number of components (all vertices) written in geometry stage fragment stage limits - uint32_t maxFragmentInputComponentsmax number of input components read in fragment stage - uint32_t maxFragmentOutputAttachmentsmax number of output attachments written in fragment stage - uint32_t maxFragmentDualSrcAttachmentsmax number of output attachments written when using dual source blending - uint32_t maxFragmentCombinedOutputResourcesmax total number of storage buffers, storage images and output buffers + uint32_t maxFragmentInputComponentsmax number of input components read in fragment stage + uint32_t maxFragmentOutputAttachmentsmax number of output attachments written in fragment stage + uint32_t maxFragmentDualSrcAttachmentsmax number of output attachments written when using dual source blending + uint32_t maxFragmentCombinedOutputResourcesmax total number of storage buffers, storage images and output buffers compute stage limits - uint32_t maxComputeSharedMemorySizemax total storage size of work group local storage (bytes) - uint32_t maxComputeWorkGroupCount[3]max num of compute work groups that may be dispatched by a single command (x,y,z) - uint32_t maxComputeWorkGroupInvocationsmax total compute invocations in a single local work group - uint32_t maxComputeWorkGroupSize[3]max local size of a compute work group (x,y,z) - uint32_t subPixelPrecisionBitsnumber bits of subpixel precision in screen x and y - uint32_t subTexelPrecisionBitsnumber bits of precision for selecting texel weights - uint32_t mipmapPrecisionBitsnumber bits of precision for selecting mipmap weights - uint32_t maxDrawIndexedIndexValuemax index value for indexed draw calls (for 32-bit indices) - uint32_t maxDrawIndirectCountmax draw count for indirect draw calls - float maxSamplerLodBiasmax absolute sampler LOD bias - float maxSamplerAnisotropymax degree of sampler anisotropy - uint32_t maxViewportsmax number of active viewports - uint32_t maxViewportDimensions[2]max viewport dimensions (x,y) - float viewportBoundsRange[2]viewport bounds range (min,max) - uint32_t viewportSubPixelBitsnumber bits of subpixel precision for viewport - size_t minMemoryMapAlignmentmin required alignment of pointers returned by MapMemory (bytes) - VkDeviceSize minTexelBufferOffsetAlignmentmin required alignment for texel buffer offsets (bytes) - VkDeviceSize minUniformBufferOffsetAlignmentmin required alignment for uniform buffer sizes and offsets (bytes) - VkDeviceSize minStorageBufferOffsetAlignmentmin required alignment for storage buffer offsets (bytes) - int32_t minTexelOffsetmin texel offset for OpTextureSampleOffset - uint32_t maxTexelOffsetmax texel offset for OpTextureSampleOffset - int32_t minTexelGatherOffsetmin texel offset for OpTextureGatherOffset - uint32_t maxTexelGatherOffsetmax texel offset for OpTextureGatherOffset - float minInterpolationOffsetfurthest negative offset for interpolateAtOffset - float maxInterpolationOffsetfurthest positive offset for interpolateAtOffset - uint32_t subPixelInterpolationOffsetBitsnumber of subpixel bits for interpolateAtOffset - uint32_t maxFramebufferWidthmax width for a framebuffer - uint32_t maxFramebufferHeightmax height for a framebuffer - uint32_t maxFramebufferLayersmax layer count for a layered framebuffer - VkSampleCountFlags framebufferColorSampleCountssupported color sample counts for a framebuffer - VkSampleCountFlags framebufferDepthSampleCountssupported depth sample counts for a framebuffer - VkSampleCountFlags framebufferStencilSampleCountssupported stencil sample counts for a framebuffer - VkSampleCountFlags framebufferNoAttachmentsSampleCountssupported sample counts for a subpass which uses no attachments - uint32_t maxColorAttachmentsmax number of color attachments per subpass - VkSampleCountFlags sampledImageColorSampleCountssupported color sample counts for a non-integer sampled image - VkSampleCountFlags sampledImageIntegerSampleCountssupported sample counts for an integer image - VkSampleCountFlags sampledImageDepthSampleCountssupported depth sample counts for a sampled image - VkSampleCountFlags sampledImageStencilSampleCountssupported stencil sample counts for a sampled image - VkSampleCountFlags storageImageSampleCountssupported sample counts for a storage image - uint32_t maxSampleMaskWordsmax number of sample mask words - VkBool32 timestampComputeAndGraphicstimestamps on graphics and compute queues - float timestampPeriodnumber of nanoseconds it takes for timestamp query value to increment by 1 - uint32_t maxClipDistancesmax number of clip distances - uint32_t maxCullDistancesmax number of cull distances - uint32_t maxCombinedClipAndCullDistancesmax combined number of user clipping - uint32_t discreteQueuePrioritiesdistinct queue priorities available - float pointSizeRange[2]range (min,max) of supported point sizes - float lineWidthRange[2]range (min,max) of supported line widths - float pointSizeGranularitygranularity of supported point sizes - float lineWidthGranularitygranularity of supported line widths - VkBool32 strictLinesline rasterization follows preferred rules - VkBool32 standardSampleLocationssupports standard sample locations for all supported sample counts - VkDeviceSize optimalBufferCopyOffsetAlignmentoptimal offset of buffer copies - VkDeviceSize optimalBufferCopyRowPitchAlignmentoptimal pitch of buffer copies - VkDeviceSize nonCoherentAtomSizeminimum size and alignment for non-coherent host-mapped device memory access + uint32_t maxComputeSharedMemorySizemax total storage size of work group local storage (bytes) + uint32_t maxComputeWorkGroupCount[3]max num of compute work groups that may be dispatched by a single command (x,y,z) + uint32_t maxComputeWorkGroupInvocationsmax total compute invocations in a single local work group + uint32_t maxComputeWorkGroupSize[3]max local size of a compute work group (x,y,z) + uint32_t subPixelPrecisionBitsnumber bits of subpixel precision in screen x and y + uint32_t subTexelPrecisionBitsnumber bits of precision for selecting texel weights + uint32_t mipmapPrecisionBitsnumber bits of precision for selecting mipmap weights + uint32_t maxDrawIndexedIndexValuemax index value for indexed draw calls (for 32-bit indices) + uint32_t maxDrawIndirectCountmax draw count for indirect draw calls + float maxSamplerLodBiasmax absolute sampler LOD bias + float maxSamplerAnisotropymax degree of sampler anisotropy + uint32_t maxViewportsmax number of active viewports + uint32_t maxViewportDimensions[2]max viewport dimensions (x,y) + float viewportBoundsRange[2]viewport bounds range (min,max) + uint32_t viewportSubPixelBitsnumber bits of subpixel precision for viewport + size_t minMemoryMapAlignmentmin required alignment of pointers returned by MapMemory (bytes) + VkDeviceSize minTexelBufferOffsetAlignmentmin required alignment for texel buffer offsets (bytes) + VkDeviceSize minUniformBufferOffsetAlignmentmin required alignment for uniform buffer sizes and offsets (bytes) + VkDeviceSize minStorageBufferOffsetAlignmentmin required alignment for storage buffer offsets (bytes) + int32_t minTexelOffsetmin texel offset for OpTextureSampleOffset + uint32_t maxTexelOffsetmax texel offset for OpTextureSampleOffset + int32_t minTexelGatherOffsetmin texel offset for OpTextureGatherOffset + uint32_t maxTexelGatherOffsetmax texel offset for OpTextureGatherOffset + float minInterpolationOffsetfurthest negative offset for interpolateAtOffset + float maxInterpolationOffsetfurthest positive offset for interpolateAtOffset + uint32_t subPixelInterpolationOffsetBitsnumber of subpixel bits for interpolateAtOffset + uint32_t maxFramebufferWidthmax width for a framebuffer + uint32_t maxFramebufferHeightmax height for a framebuffer + uint32_t maxFramebufferLayersmax layer count for a layered framebuffer + VkSampleCountFlags framebufferColorSampleCountssupported color sample counts for a framebuffer + VkSampleCountFlags framebufferDepthSampleCountssupported depth sample counts for a framebuffer + VkSampleCountFlags framebufferStencilSampleCountssupported stencil sample counts for a framebuffer + VkSampleCountFlags framebufferNoAttachmentsSampleCountssupported sample counts for a subpass which uses no attachments + uint32_t maxColorAttachmentsmax number of color attachments per subpass + VkSampleCountFlags sampledImageColorSampleCountssupported color sample counts for a non-integer sampled image + VkSampleCountFlags sampledImageIntegerSampleCountssupported sample counts for an integer image + VkSampleCountFlags sampledImageDepthSampleCountssupported depth sample counts for a sampled image + VkSampleCountFlags sampledImageStencilSampleCountssupported stencil sample counts for a sampled image + VkSampleCountFlags storageImageSampleCountssupported sample counts for a storage image + uint32_t maxSampleMaskWordsmax number of sample mask words + VkBool32 timestampComputeAndGraphicstimestamps on graphics and compute queues + float timestampPeriodnumber of nanoseconds it takes for timestamp query value to increment by 1 + uint32_t maxClipDistancesmax number of clip distances + uint32_t maxCullDistancesmax number of cull distances + uint32_t maxCombinedClipAndCullDistancesmax combined number of user clipping + uint32_t discreteQueuePrioritiesdistinct queue priorities available + float pointSizeRange[2]range (min,max) of supported point sizes + float lineWidthRange[2]range (min,max) of supported line widths + float pointSizeGranularitygranularity of supported point sizes + float lineWidthGranularitygranularity of supported line widths + VkBool32 strictLinesline rasterization follows preferred rules + VkBool32 standardSampleLocationssupports standard sample locations for all supported sample counts + VkDeviceSize optimalBufferCopyOffsetAlignmentoptimal offset of buffer copies + VkDeviceSize optimalBufferCopyRowPitchAlignmentoptimal pitch of buffer copies + VkDeviceSize nonCoherentAtomSizeminimum size and alignment for non-coherent host-mapped device memory access VkStructureType sType - const void* pNext + const void* pNext VkSemaphoreCreateFlags flagsSemaphore creation flags VkStructureType sType - const void* pNext + const void* pNext VkQueryPoolCreateFlags flags VkQueryType queryType uint32_t queryCount @@ -1580,7 +1720,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkFramebufferCreateFlags flags VkRenderPass renderPass uint32_t attachmentCount @@ -1609,7 +1749,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t waitSemaphoreCount const VkSemaphore* pWaitSemaphores const VkPipelineStageFlags* pWaitDstStageMask @@ -1642,7 +1782,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDisplayModeCreateFlagsKHR flags VkDisplayModeParametersKHR parametersThe parameters this mode uses. @@ -1659,7 +1799,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDisplaySurfaceCreateFlagsKHR flags VkDisplayModeKHR displayModeThe mode to use when displaying this surface uint32_t planeIndexThe plane on which this surface appears. Must be between 0 and the value returned by vkGetPhysicalDeviceDisplayPlanePropertiesKHR() in pPropertyCount. @@ -1671,7 +1811,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkRect2D srcRectRectangle within the presentable image to read pixel data from when presenting to the display. VkRect2D dstRectRectangle within the current display mode's visible region to display srcRectangle in. VkBool32 persistentFor smart displays, use buffered mode. If the display properties member "persistentMode" is VK_FALSE, this member must always be VK_FALSE. @@ -1683,70 +1823,84 @@ typedef void CAMetalLayer; VkExtent2D minImageExtentSupported minimum image width and height for the surface VkExtent2D maxImageExtentSupported maximum image width and height for the surface uint32_t maxImageArrayLayersSupported maximum number of image layers for the surface - VkSurfaceTransformFlagsKHR supportedTransforms1 or more bits representing the transforms supported + VkSurfaceTransformFlagsKHR supportedTransforms1 or more bits representing the transforms supported VkSurfaceTransformFlagBitsKHR currentTransformThe surface's current transform relative to the device's natural orientation - VkCompositeAlphaFlagsKHR supportedCompositeAlpha1 or more bits representing the alpha compositing modes supported - VkImageUsageFlags supportedUsageFlagsSupported image usage flags for the surface + VkCompositeAlphaFlagsKHR supportedCompositeAlpha1 or more bits representing the alpha compositing modes supported + VkImageUsageFlags supportedUsageFlagsSupported image usage flags for the surface VkStructureType sType - const void* pNext + const void* pNext VkAndroidSurfaceCreateFlagsKHR flags struct ANativeWindow* window VkStructureType sType - const void* pNext + const void* pNext VkViSurfaceCreateFlagsNN flags void* window VkStructureType sType - const void* pNext + const void* pNext VkWaylandSurfaceCreateFlagsKHR flags struct wl_display* display struct wl_surface* surface VkStructureType sType - const void* pNext + const void* pNext VkWin32SurfaceCreateFlagsKHR flags HINSTANCE hinstance HWND hwnd VkStructureType sType - const void* pNext + const void* pNext VkXlibSurfaceCreateFlagsKHR flags Display* dpy Window window VkStructureType sType - const void* pNext + const void* pNext VkXcbSurfaceCreateFlagsKHR flags xcb_connection_t* connection xcb_window_t window + + VkStructureType sType + const void* pNext + VkDirectFBSurfaceCreateFlagsEXT flags + IDirectFB* dfb + IDirectFBSurface* surface + VkStructureType sType - const void* pNext + const void* pNext VkImagePipeSurfaceCreateFlagsFUCHSIA flags zx_handle_t imagePipeHandle VkStructureType sType - const void* pNext + const void* pNext VkStreamDescriptorSurfaceCreateFlagsGGP flags GgpStreamDescriptor streamDescriptor + + VkStructureType sType + const void* pNext + VkScreenSurfaceCreateFlagsQNX flags + struct _screen_context* context + struct _screen_window* window + VkFormat formatSupported pair of rendering format VkColorSpaceKHR colorSpaceand color space for the surface VkStructureType sType - const void* pNext + const void* pNext VkSwapchainCreateFlagsKHR flags VkSurfaceKHR surfaceThe swapchain's target surface uint32_t minImageCountMinimum number of presentation images the application needs @@ -1766,7 +1920,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t waitSemaphoreCountNumber of semaphores to wait for before presenting const VkSemaphore* pWaitSemaphoresSemaphores to wait for before presenting uint32_t swapchainCountNumber of swapchains to present in this call @@ -1776,20 +1930,20 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDebugReportFlagsEXT flagsIndicates which events call this callback PFN_vkDebugReportCallbackEXT pfnCallbackFunction pointer of a callback function void* pUserDataUser data provided to callback function VkStructureType sTypeMust be VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT - const void* pNext + const void* pNext uint32_t disabledValidationCheckCountNumber of validation checks to disable const VkValidationCheckEXT* pDisabledValidationChecksValidation checks to disable VkStructureType sTypeMust be VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT - const void* pNext + const void* pNext uint32_t enabledValidationFeatureCountNumber of validation features to enable const VkValidationFeatureEnableEXT* pEnabledValidationFeaturesValidation features to enable uint32_t disabledValidationFeatureCountNumber of validation features to disable @@ -1797,19 +1951,19 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkRasterizationOrderAMD rasterizationOrderRasterization order to use for the pipeline VkStructureType sType - const void* pNext + const void* pNext VkDebugReportObjectTypeEXT objectTypeThe type of the object uint64_t objectThe handle of the object, cast to uint64_t const char* pObjectNameName to apply to the object VkStructureType sType - const void* pNext + const void* pNext VkDebugReportObjectTypeEXT objectTypeThe type of the object uint64_t objectThe handle of the object, cast to uint64_t uint64_t tagNameThe name of the tag to set on the object @@ -1818,23 +1972,23 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext const char* pMarkerNameName of the debug marker - float color[4]Optional color for debug marker + float color[4]Optional color for debug marker VkStructureType sType - const void* pNext + const void* pNext VkBool32 dedicatedAllocationWhether this image uses a dedicated allocation VkStructureType sType - const void* pNext + const void* pNext VkBool32 dedicatedAllocationWhether this buffer uses a dedicated allocation VkStructureType sType - const void* pNext + const void* pNext VkImage imageImage that this allocation will be bound to VkBuffer bufferBuffer that this allocation will be bound to @@ -1846,29 +2000,29 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkExternalMemoryHandleTypeFlagsNV handleTypes VkStructureType sType - const void* pNext + const void* pNext VkExternalMemoryHandleTypeFlagsNV handleTypes VkStructureType sType - const void* pNext + const void* pNext VkExternalMemoryHandleTypeFlagsNV handleType HANDLE handle VkStructureType sType - const void* pNext + const void* pNext const SECURITY_ATTRIBUTES* pAttributes DWORD dwAccess - + VkStructureType sType - const void* pNext + const void* pNext uint32_t acquireCount const VkDeviceMemory* pAcquireSyncs const uint64_t* pAcquireKeys @@ -1879,40 +2033,40 @@ typedef void CAMetalLayer; VkStructureTypesType - void* pNext + void* pNext VkBool32 deviceGeneratedCommands VkStructureType sType - const void* pNext + const void* pNext uint32_t privateDataSlotRequestCount VkStructureType sType - const void* pNext + const void* pNext VkPrivateDataSlotCreateFlagsEXT flags VkStructureType sType - void* pNext + void* pNext VkBool32 privateData VkStructureType sType - void* pNext - uint32_t maxGraphicsShaderGroupCount - uint32_t maxIndirectSequenceCount - uint32_t maxIndirectCommandsTokenCount - uint32_t maxIndirectCommandsStreamCount - uint32_t maxIndirectCommandsTokenOffset - uint32_t maxIndirectCommandsStreamStride - uint32_t minSequencesCountBufferOffsetAlignment - uint32_t minSequencesIndexBufferOffsetAlignment - uint32_t minIndirectCommandsBufferOffsetAlignment + void* pNext + uint32_t maxGraphicsShaderGroupCount + uint32_t maxIndirectSequenceCount + uint32_t maxIndirectCommandsTokenCount + uint32_t maxIndirectCommandsStreamCount + uint32_t maxIndirectCommandsTokenOffset + uint32_t maxIndirectCommandsStreamStride + uint32_t minSequencesCountBufferOffsetAlignment + uint32_t minSequencesIndexBufferOffsetAlignment + uint32_t minIndirectCommandsBufferOffsetAlignment VkStructureType sType - const void* pNext + const void* pNext uint32_t stageCount const VkPipelineShaderStageCreateInfo* pStages const VkPipelineVertexInputStateCreateInfo* pVertexInputState @@ -1920,7 +2074,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t groupCount const VkGraphicsShaderGroupCreateInfoNV* pGroups uint32_t pipelineCount @@ -1948,7 +2102,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkIndirectCommandsTokenTypeNV tokenType uint32_t stream uint32_t offset @@ -1965,8 +2119,8 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext - VkIndirectCommandsLayoutUsageFlagsNV flags + const void* pNext + VkIndirectCommandsLayoutUsageFlagsNV flags VkPipelineBindPoint pipelineBindPoint uint32_t tokenCount const VkIndirectCommandsLayoutTokenNV* pTokens @@ -1975,7 +2129,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineBindPoint pipelineBindPoint VkPipeline pipeline VkIndirectCommandsLayoutNV indirectCommandsLayout @@ -1986,13 +2140,13 @@ typedef void CAMetalLayer; VkDeviceSize preprocessOffset VkDeviceSize preprocessSize VkBuffer sequencesCountBuffer - VkDeviceSize sequencesCountOffset + VkDeviceSize sequencesCountOffset VkBuffer sequencesIndexBuffer - VkDeviceSize sequencesIndexOffset + VkDeviceSize sequencesIndexOffset VkStructureType sType - const void* pNext + const void* pNext VkPipelineBindPoint pipelineBindPoint VkPipeline pipeline VkIndirectCommandsLayoutNV indirectCommandsLayout @@ -2000,31 +2154,31 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext VkPhysicalDeviceFeatures features VkStructureType sType - void* pNext - VkPhysicalDeviceProperties properties + void* pNext + VkPhysicalDeviceProperties properties VkStructureType sType - void* pNext + void* pNext VkFormatProperties formatProperties VkStructureType sType - void* pNext + void* pNext VkImageFormatProperties imageFormatProperties VkStructureType sType - const void* pNext + const void* pNext VkFormat format VkImageType type VkImageTiling tiling @@ -2034,25 +2188,25 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext VkQueueFamilyProperties queueFamilyProperties VkStructureType sType - void* pNext + void* pNext VkPhysicalDeviceMemoryProperties memoryProperties VkStructureType sType - void* pNext + void* pNext VkSparseImageFormatProperties properties VkStructureType sType - const void* pNext + const void* pNext VkFormat format VkImageType type VkSampleCountFlagBits samples @@ -2062,8 +2216,8 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext - uint32_t maxPushDescriptors + void* pNext + uint32_t maxPushDescriptors uint8_t major @@ -2074,16 +2228,16 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext - VkDriverId driverID - char driverName[VK_MAX_DRIVER_NAME_SIZE] - char driverInfo[VK_MAX_DRIVER_INFO_SIZE] - VkConformanceVersion conformanceVersion + void* pNext + VkDriverId driverID + char driverName[VK_MAX_DRIVER_NAME_SIZE] + char driverInfo[VK_MAX_DRIVER_INFO_SIZE] + VkConformanceVersion conformanceVersion VkStructureType sType - const void* pNext + const void* pNext uint32_t swapchainCountCopy of VkPresentInfoKHR::swapchainCount const VkPresentRegionKHR* pRegionsThe regions that have changed @@ -2098,7 +2252,7 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext VkBool32 variablePointersStorageBuffer VkBool32 variablePointers @@ -2113,19 +2267,19 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkExternalMemoryHandleTypeFlagBits handleType VkStructureType sType - void* pNext + void* pNext VkExternalMemoryProperties externalMemoryProperties VkStructureType sType - const void* pNext + const void* pNext VkBufferCreateFlags flags VkBufferUsageFlags usage VkExternalMemoryHandleTypeFlagBits handleType @@ -2133,83 +2287,100 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext VkExternalMemoryProperties externalMemoryProperties VkStructureType sType - void* pNext - uint8_t deviceUUID[VK_UUID_SIZE] - uint8_t driverUUID[VK_UUID_SIZE] - uint8_t deviceLUID[VK_LUID_SIZE] - uint32_t deviceNodeMask - VkBool32 deviceLUIDValid + void* pNext + uint8_t deviceUUID[VK_UUID_SIZE] + uint8_t driverUUID[VK_UUID_SIZE] + uint8_t deviceLUID[VK_LUID_SIZE] + uint32_t deviceNodeMask + VkBool32 deviceLUIDValid VkStructureType sType - const void* pNext - VkExternalMemoryHandleTypeFlags handleTypes + const void* pNext + VkExternalMemoryHandleTypeFlags handleTypes VkStructureType sType - const void* pNext + const void* pNext VkExternalMemoryHandleTypeFlags handleTypes VkStructureType sType - const void* pNext + const void* pNext VkExternalMemoryHandleTypeFlags handleTypes VkStructureType sType - const void* pNext + const void* pNext VkExternalMemoryHandleTypeFlagBits handleType HANDLE handle LPCWSTR name VkStructureType sType - const void* pNext + const void* pNext const SECURITY_ATTRIBUTES* pAttributes DWORD dwAccess LPCWSTR name + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlagBits handleType + zx_handle_t handle + + + VkStructureType sType + void* pNext + uint32_t memoryTypeBits + + + VkStructureType sType + const void* pNext + VkDeviceMemory memory + VkExternalMemoryHandleTypeFlagBits handleType + VkStructureType sType - void* pNext + void* pNext uint32_t memoryTypeBits VkStructureType sType - const void* pNext + const void* pNext VkDeviceMemory memory VkExternalMemoryHandleTypeFlagBits handleType VkStructureType sType - const void* pNext + const void* pNext VkExternalMemoryHandleTypeFlagBits handleType int fd VkStructureType sType - void* pNext + void* pNext uint32_t memoryTypeBits VkStructureType sType - const void* pNext + const void* pNext VkDeviceMemory memory VkExternalMemoryHandleTypeFlagBits handleType - + VkStructureType sType - const void* pNext + const void* pNext uint32_t acquireCount const VkDeviceMemory* pAcquireSyncs const uint64_t* pAcquireKeys @@ -2220,13 +2391,13 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkExternalSemaphoreHandleTypeFlagBits handleType VkStructureType sType - void* pNext + void* pNext VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures @@ -2234,29 +2405,29 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkExternalSemaphoreHandleTypeFlags handleTypes VkStructureType sType - const void* pNext + const void* pNext VkSemaphore semaphore VkSemaphoreImportFlags flags - VkExternalSemaphoreHandleTypeFlagBits handleType + VkExternalSemaphoreHandleTypeFlagBits handleType HANDLE handle LPCWSTR name VkStructureType sType - const void* pNext + const void* pNext const SECURITY_ATTRIBUTES* pAttributes DWORD dwAccess LPCWSTR name VkStructureType sType - const void* pNext + const void* pNext uint32_t waitSemaphoreValuesCount const uint64_t* pWaitSemaphoreValues uint32_t signalSemaphoreValuesCount @@ -2264,13 +2435,13 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkSemaphore semaphore VkExternalSemaphoreHandleTypeFlagBits handleType VkStructureType sType - const void* pNext + const void* pNext VkSemaphore semaphore VkSemaphoreImportFlags flags VkExternalSemaphoreHandleTypeFlagBits handleType @@ -2278,19 +2449,33 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext + VkSemaphore semaphore + VkExternalSemaphoreHandleTypeFlagBits handleType + + + VkStructureType sType + const void* pNext + VkSemaphore semaphore + VkSemaphoreImportFlags flags + VkExternalSemaphoreHandleTypeFlagBits handleType + zx_handle_t zirconHandle + + + VkStructureType sType + const void* pNext VkSemaphore semaphore VkExternalSemaphoreHandleTypeFlagBits handleType VkStructureType sType - const void* pNext + const void* pNext VkExternalFenceHandleTypeFlagBits handleType VkStructureType sType - void* pNext + void* pNext VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes VkExternalFenceHandleTypeFlags compatibleHandleTypes VkExternalFenceFeatureFlags externalFenceFeatures @@ -2298,35 +2483,35 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkExternalFenceHandleTypeFlags handleTypes VkStructureType sType - const void* pNext + const void* pNext VkFence fence VkFenceImportFlags flags - VkExternalFenceHandleTypeFlagBits handleType + VkExternalFenceHandleTypeFlagBits handleType HANDLE handle LPCWSTR name VkStructureType sType - const void* pNext + const void* pNext const SECURITY_ATTRIBUTES* pAttributes DWORD dwAccess LPCWSTR name VkStructureType sType - const void* pNext + const void* pNext VkFence fence VkExternalFenceHandleTypeFlagBits handleType VkStructureType sType - const void* pNext + const void* pNext VkFence fence VkFenceImportFlags flags VkExternalFenceHandleTypeFlagBits handleType @@ -2334,13 +2519,13 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkFence fence VkExternalFenceHandleTypeFlagBits handleType VkStructureType sType - void* pNext + void* pNext VkBool32 multiviewMultiple views in a renderpass VkBool32 multiviewGeometryShaderMultiple views in a renderpass w/ geometry shader VkBool32 multiviewTessellationShaderMultiple views in a renderpass w/ tessellation shader @@ -2348,14 +2533,14 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext - uint32_t maxMultiviewViewCountmax number of views in a subpass - uint32_t maxMultiviewInstanceIndexmax instance index for a draw in a multiview subpass + void* pNext + uint32_t maxMultiviewViewCountmax number of views in a subpass + uint32_t maxMultiviewInstanceIndexmax instance index for a draw in a multiview subpass VkStructureType sType - const void* pNext + const void* pNext uint32_t subpassCount const uint32_t* pViewMasks uint32_t dependencyCount @@ -2366,42 +2551,42 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext uint32_t minImageCountSupported minimum number of images for the surface uint32_t maxImageCountSupported maximum number of images for the surface, 0 for unlimited VkExtent2D currentExtentCurrent image width and height for the surface, (0, 0) if undefined VkExtent2D minImageExtentSupported minimum image width and height for the surface VkExtent2D maxImageExtentSupported maximum image width and height for the surface uint32_t maxImageArrayLayersSupported maximum number of image layers for the surface - VkSurfaceTransformFlagsKHR supportedTransforms1 or more bits representing the transforms supported + VkSurfaceTransformFlagsKHR supportedTransforms1 or more bits representing the transforms supported VkSurfaceTransformFlagBitsKHR currentTransformThe surface's current transform relative to the device's natural orientation - VkCompositeAlphaFlagsKHR supportedCompositeAlpha1 or more bits representing the alpha compositing modes supported - VkImageUsageFlags supportedUsageFlagsSupported image usage flags for the surface + VkCompositeAlphaFlagsKHR supportedCompositeAlpha1 or more bits representing the alpha compositing modes supported + VkImageUsageFlags supportedUsageFlagsSupported image usage flags for the surface VkSurfaceCounterFlagsEXT supportedSurfaceCounters VkStructureType sType - const void* pNext + const void* pNext VkDisplayPowerStateEXT powerState VkStructureType sType - const void* pNext + const void* pNext VkDeviceEventTypeEXT deviceEvent VkStructureType sType - const void* pNext + const void* pNext VkDisplayEventTypeEXT displayEvent VkStructureType sType - const void* pNext + const void* pNext VkSurfaceCounterFlagsEXT surfaceCounters VkStructureType sType - void* pNext + void* pNext uint32_t physicalDeviceCount VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE] VkBool32 subsetAllocation @@ -2409,14 +2594,14 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkMemoryAllocateFlags flags uint32_t deviceMask VkStructureType sType - const void* pNext + const void* pNext VkBuffer buffer VkDeviceMemory memory VkDeviceSize memoryOffset @@ -2424,14 +2609,14 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t deviceIndexCount const uint32_t* pDeviceIndices VkStructureType sType - const void* pNext + const void* pNext VkImage image VkDeviceMemory memory VkDeviceSize memoryOffset @@ -2439,7 +2624,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t deviceIndexCount const uint32_t* pDeviceIndices uint32_t splitInstanceBindRegionCount @@ -2448,7 +2633,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t deviceMask uint32_t deviceRenderAreaCount const VkRect2D* pDeviceRenderAreas @@ -2456,13 +2641,13 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t deviceMask VkStructureType sType - const void* pNext + const void* pNext uint32_t waitSemaphoreCount const uint32_t* pWaitSemaphoreDeviceIndices uint32_t commandBufferCount @@ -2473,31 +2658,31 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t resourceDeviceIndex uint32_t memoryDeviceIndex VkStructureType sType - const void* pNext + const void* pNext uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE] VkDeviceGroupPresentModeFlagsKHR modes VkStructureType sType - const void* pNext + const void* pNext VkSwapchainKHR swapchain VkStructureType sType - const void* pNext + const void* pNext VkSwapchainKHR swapchain uint32_t imageIndex VkStructureType sType - const void* pNext + const void* pNext VkSwapchainKHR swapchain uint64_t timeout VkSemaphore semaphore @@ -2506,21 +2691,21 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t swapchainCount const uint32_t* pDeviceMasks VkDeviceGroupPresentModeFlagBitsKHR mode VkStructureType sType - const void* pNext + const void* pNext uint32_t physicalDeviceCount const VkPhysicalDevice* pPhysicalDevices VkStructureType sType - const void* pNext + const void* pNext VkDeviceGroupPresentModeFlagsKHR modes @@ -2534,7 +2719,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDescriptorUpdateTemplateCreateFlags flags uint32_t descriptorUpdateEntryCountNumber of descriptor update entries to use for the update template const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntriesDescriptor update entries for the template @@ -2552,7 +2737,7 @@ typedef void CAMetalLayer; Display primary in chromaticity coordinates VkStructureType sType - const void* pNext + const void* pNext From SMPTE 2086 VkXYColorEXT displayPrimaryRedDisplay primary's Red VkXYColorEXT displayPrimaryGreenDisplay primary's Green @@ -2566,12 +2751,12 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext VkBool32 localDimmingSupport VkStructureType sType - const void* pNext + const void* pNext VkBool32 localDimmingEnable @@ -2586,7 +2771,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t swapchainCountCopy of VkPresentInfoKHR::swapchainCount const VkPresentTimeGOOGLE* pTimesThe earliest times to present images @@ -2596,19 +2781,19 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkIOSSurfaceCreateFlagsMVK flags const void* pView VkStructureType sType - const void* pNext + const void* pNext VkMacOSSurfaceCreateFlagsMVK flags const void* pView VkStructureType sType - const void* pNext + const void* pNext VkMetalSurfaceCreateFlagsEXT flags const CAMetalLayer* pLayer @@ -2618,7 +2803,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkBool32 viewportWScalingEnable uint32_t viewportCount const VkViewportWScalingNV* pViewportWScalings @@ -2631,19 +2816,19 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineViewportSwizzleStateCreateFlagsNV flags uint32_t viewportCount const VkViewportSwizzleNV* pViewportSwizzles VkStructureType sType - void* pNext - uint32_t maxDiscardRectanglesmax number of active discard rectangles + void* pNext + uint32_t maxDiscardRectanglesmax number of active discard rectangles VkStructureType sType - const void* pNext + const void* pNext VkPipelineDiscardRectangleStateCreateFlagsEXT flags VkDiscardRectangleModeEXT discardRectangleMode uint32_t discardRectangleCount @@ -2651,8 +2836,8 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext - VkBool32 perViewPositionAllComponents + void* pNext + VkBool32 perViewPositionAllComponents uint32_t subpass @@ -2662,60 +2847,60 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t aspectReferenceCount const VkInputAttachmentAspectReference* pAspectReferences VkStructureType sType - const void* pNext + const void* pNext VkSurfaceKHR surface VkStructureType sType - void* pNext + void* pNext VkSurfaceCapabilitiesKHR surfaceCapabilities VkStructureType sType - void* pNext + void* pNext VkSurfaceFormatKHR surfaceFormat VkStructureType sType - void* pNext + void* pNext VkDisplayPropertiesKHR displayProperties VkStructureType sType - void* pNext + void* pNext VkDisplayPlanePropertiesKHR displayPlaneProperties VkStructureType sType - void* pNext + void* pNext VkDisplayModePropertiesKHR displayModeProperties VkStructureType sType - const void* pNext + const void* pNext VkDisplayModeKHR mode uint32_t planeIndex VkStructureType sType - void* pNext + void* pNext VkDisplayPlaneCapabilitiesKHR capabilities VkStructureType sType - void* pNext + void* pNext VkImageUsageFlags sharedPresentSupportedUsageFlagsSupported image usage flags if swapchain created using a shared present mode VkStructureType sType - void* pNext + void* pNext VkBool32 storageBuffer16BitAccess16-bit integer/floating-point variables supported in BufferBlock VkBool32 uniformAndStorageBuffer16BitAccess16-bit integer/floating-point variables supported in BufferBlock and Block VkBool32 storagePushConstant1616-bit integer/floating-point variables supported in PushConstant @@ -2724,89 +2909,89 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext - uint32_t subgroupSizeThe size of a subgroup for this queue. - VkShaderStageFlags supportedStagesBitfield of what shader stages support subgroup operations - VkSubgroupFeatureFlags supportedOperationsBitfield of what subgroup operations are supported. - VkBool32 quadOperationsInAllStagesFlag to specify whether quad operations are available in all stages. + void* pNext + uint32_t subgroupSizeThe size of a subgroup for this queue. + VkShaderStageFlags supportedStagesBitfield of what shader stages support subgroup operations + VkSubgroupFeatureFlags supportedOperationsBitfield of what subgroup operations are supported. + VkBool32 quadOperationsInAllStagesFlag to specify whether quad operations are available in all stages. - VkStructureType sType - void* pNext - VkBool32 shaderSubgroupExtendedTypesFlag to specify whether subgroup operations with extended types are supported + VkStructureType sType + void* pNext + VkBool32 shaderSubgroupExtendedTypesFlag to specify whether subgroup operations with extended types are supported VkStructureType sType - const void* pNext + const void* pNext VkBuffer buffer VkStructureType sType - const void* pNext + const void* pNext VkImage image VkStructureType sType - const void* pNext + const void* pNext VkImage image VkStructureType sType - void* pNext + void* pNext VkMemoryRequirements memoryRequirements VkStructureType sType - void* pNext + void* pNext VkSparseImageMemoryRequirements memoryRequirements VkStructureType sType - void* pNext - VkPointClippingBehavior pointClippingBehavior + void* pNext + VkPointClippingBehavior pointClippingBehavior VkStructureType sType - void* pNext + void* pNext VkBool32 prefersDedicatedAllocation VkBool32 requiresDedicatedAllocation VkStructureType sType - const void* pNext + const void* pNext VkImage imageImage that this allocation will be bound to VkBuffer bufferBuffer that this allocation will be bound to VkStructureType sType - const void* pNext + const void* pNext VkImageUsageFlags usage VkStructureType sType - const void* pNext + const void* pNext VkTessellationDomainOrigin domainOrigin VkStructureType sType - const void* pNext + const void* pNext VkSamplerYcbcrConversion conversion VkStructureType sType - const void* pNext + const void* pNext VkFormat format VkSamplerYcbcrModelConversion ycbcrModel VkSamplerYcbcrRange ycbcrRange @@ -2819,84 +3004,84 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkImageAspectFlagBits planeAspect VkStructureType sType - const void* pNext + const void* pNext VkImageAspectFlagBits planeAspect VkStructureType sType - void* pNext + void* pNext VkBool32 samplerYcbcrConversionSampler color conversion supported VkStructureType sType - void* pNext + void* pNext uint32_t combinedImageSamplerDescriptorCount VkStructureType sType - void* pNext + void* pNext VkBool32 supportsTextureGatherLODBiasAMD VkStructureType sType - const void* pNext + const void* pNext VkBuffer buffer VkDeviceSize offset VkConditionalRenderingFlagsEXT flags VkStructureType sType - const void* pNext + const void* pNext VkBool32 protectedSubmitSubmit protected command buffers VkStructureType sType - void* pNext + void* pNext VkBool32 protectedMemory VkStructureType sType - void* pNext - VkBool32 protectedNoFault + void* pNext + VkBool32 protectedNoFault VkStructureType sType - const void* pNext + const void* pNext VkDeviceQueueCreateFlags flags uint32_t queueFamilyIndex uint32_t queueIndex VkStructureType sType - const void* pNext + const void* pNext VkPipelineCoverageToColorStateCreateFlagsNV flags VkBool32 coverageToColorEnable uint32_t coverageToColorLocation VkStructureType sType - void* pNext - VkBool32 filterMinmaxSingleComponentFormats - VkBool32 filterMinmaxImageComponentMapping + void* pNext + VkBool32 filterMinmaxSingleComponentFormats + VkBool32 filterMinmaxImageComponentMapping float x float y - + VkStructureType sType - const void* pNext - VkSampleCountFlagBits sampleLocationsPerPixel + const void* pNext + VkSampleCountFlagBits sampleLocationsPerPixel VkExtent2D sampleLocationGridSize uint32_t sampleLocationsCount const VkSampleLocationEXT* pSampleLocations @@ -2911,7 +3096,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t attachmentInitialSampleLocationsCount const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations uint32_t postSubpassSampleLocationsCount @@ -2919,81 +3104,81 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkBool32 sampleLocationsEnable VkSampleLocationsInfoEXT sampleLocationsInfo VkStructureType sType - void* pNext - VkSampleCountFlags sampleLocationSampleCounts - VkExtent2D maxSampleLocationGridSize - float sampleLocationCoordinateRange[2] - uint32_t sampleLocationSubPixelBits - VkBool32 variableSampleLocations + void* pNext + VkSampleCountFlags sampleLocationSampleCounts + VkExtent2D maxSampleLocationGridSize + float sampleLocationCoordinateRange[2] + uint32_t sampleLocationSubPixelBits + VkBool32 variableSampleLocations VkStructureType sType - void* pNext + void* pNext VkExtent2D maxSampleLocationGridSize VkStructureType sType - const void* pNext + const void* pNext VkSamplerReductionMode reductionMode VkStructureType sType - void* pNext + void* pNext VkBool32 advancedBlendCoherentOperations VkStructureType sType - void* pNext - uint32_t advancedBlendMaxColorAttachments - VkBool32 advancedBlendIndependentBlend - VkBool32 advancedBlendNonPremultipliedSrcColor - VkBool32 advancedBlendNonPremultipliedDstColor - VkBool32 advancedBlendCorrelatedOverlap - VkBool32 advancedBlendAllOperations + void* pNext + uint32_t advancedBlendMaxColorAttachments + VkBool32 advancedBlendIndependentBlend + VkBool32 advancedBlendNonPremultipliedSrcColor + VkBool32 advancedBlendNonPremultipliedDstColor + VkBool32 advancedBlendCorrelatedOverlap + VkBool32 advancedBlendAllOperations VkStructureType sType - const void* pNext + const void* pNext VkBool32 srcPremultiplied VkBool32 dstPremultiplied VkBlendOverlapEXT blendOverlap VkStructureType sType - void* pNext + void* pNext VkBool32 inlineUniformBlock VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind VkStructureType sType - void* pNext - uint32_t maxInlineUniformBlockSize - uint32_t maxPerStageDescriptorInlineUniformBlocks - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks - uint32_t maxDescriptorSetInlineUniformBlocks - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks + void* pNext + uint32_t maxInlineUniformBlockSize + uint32_t maxPerStageDescriptorInlineUniformBlocks + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks + uint32_t maxDescriptorSetInlineUniformBlocks + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks VkStructureType sType - const void* pNext + const void* pNext uint32_t dataSize const void* pData VkStructureType sType - const void* pNext + const void* pNext uint32_t maxInlineUniformBlockBindings VkStructureType sType - const void* pNext + const void* pNext VkPipelineCoverageModulationStateCreateFlagsNV flags VkCoverageModulationModeNV coverageModulationMode VkBool32 coverageModulationTableEnable @@ -3002,45 +3187,45 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t viewFormatCount const VkFormat* pViewFormats VkStructureType sType - const void* pNext + const void* pNext VkValidationCacheCreateFlagsEXT flags size_t initialDataSize const void* pInitialData VkStructureType sType - const void* pNext + const void* pNext VkValidationCacheEXT validationCache VkStructureType sType - void* pNext - uint32_t maxPerSetDescriptors - VkDeviceSize maxMemoryAllocationSize + void* pNext + uint32_t maxPerSetDescriptors + VkDeviceSize maxMemoryAllocationSize VkStructureType sType - void* pNext + void* pNext VkBool32 supported VkStructureType sType - void* pNext + void* pNext VkBool32 shaderDrawParameters VkStructureType sType - void* pNext + void* pNext VkBool32 shaderFloat1616-bit floats (halfs) in shaders VkBool32 shaderInt88-bit integers in shaders @@ -3048,29 +3233,29 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext - VkShaderFloatControlsIndependence denormBehaviorIndependence - VkShaderFloatControlsIndependence roundingModeIndependence - VkBool32 shaderSignedZeroInfNanPreserveFloat16An implementation can preserve signed zero, nan, inf - VkBool32 shaderSignedZeroInfNanPreserveFloat32An implementation can preserve signed zero, nan, inf - VkBool32 shaderSignedZeroInfNanPreserveFloat64An implementation can preserve signed zero, nan, inf - VkBool32 shaderDenormPreserveFloat16An implementation can preserve denormals - VkBool32 shaderDenormPreserveFloat32An implementation can preserve denormals - VkBool32 shaderDenormPreserveFloat64An implementation can preserve denormals - VkBool32 shaderDenormFlushToZeroFloat16An implementation can flush to zero denormals - VkBool32 shaderDenormFlushToZeroFloat32An implementation can flush to zero denormals - VkBool32 shaderDenormFlushToZeroFloat64An implementation can flush to zero denormals - VkBool32 shaderRoundingModeRTEFloat16An implementation can support RTE - VkBool32 shaderRoundingModeRTEFloat32An implementation can support RTE - VkBool32 shaderRoundingModeRTEFloat64An implementation can support RTE - VkBool32 shaderRoundingModeRTZFloat16An implementation can support RTZ - VkBool32 shaderRoundingModeRTZFloat32An implementation can support RTZ - VkBool32 shaderRoundingModeRTZFloat64An implementation can support RTZ + void* pNext + VkShaderFloatControlsIndependence denormBehaviorIndependence + VkShaderFloatControlsIndependence roundingModeIndependence + VkBool32 shaderSignedZeroInfNanPreserveFloat16An implementation can preserve signed zero, nan, inf + VkBool32 shaderSignedZeroInfNanPreserveFloat32An implementation can preserve signed zero, nan, inf + VkBool32 shaderSignedZeroInfNanPreserveFloat64An implementation can preserve signed zero, nan, inf + VkBool32 shaderDenormPreserveFloat16An implementation can preserve denormals + VkBool32 shaderDenormPreserveFloat32An implementation can preserve denormals + VkBool32 shaderDenormPreserveFloat64An implementation can preserve denormals + VkBool32 shaderDenormFlushToZeroFloat16An implementation can flush to zero denormals + VkBool32 shaderDenormFlushToZeroFloat32An implementation can flush to zero denormals + VkBool32 shaderDenormFlushToZeroFloat64An implementation can flush to zero denormals + VkBool32 shaderRoundingModeRTEFloat16An implementation can support RTE + VkBool32 shaderRoundingModeRTEFloat32An implementation can support RTE + VkBool32 shaderRoundingModeRTEFloat64An implementation can support RTE + VkBool32 shaderRoundingModeRTZFloat16An implementation can support RTZ + VkBool32 shaderRoundingModeRTZFloat32An implementation can support RTZ + VkBool32 shaderRoundingModeRTZFloat64An implementation can support RTZ VkStructureType sType - void* pNext + void* pNext VkBool32 hostQueryReset @@ -3080,7 +3265,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext const void* handle int stride int format @@ -3089,12 +3274,12 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkSwapchainImageUsageFlagsANDROID usage VkStructureType sType - const void* pNext + const void* pNext VkBool32 sharedImage @@ -3115,19 +3300,30 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkQueueGlobalPriorityEXT globalPriority + + VkStructureType sType + void* pNext + VkBool32 globalPriorityQuery + + + VkStructureType sType + void* pNext + uint32_t priorityCount + VkQueueGlobalPriorityEXT priorities[VK_MAX_GLOBAL_PRIORITY_SIZE_EXT] + VkStructureType sType - const void* pNext + const void* pNext VkObjectType objectType uint64_t objectHandle const char* pObjectName VkStructureType sType - const void* pNext + const void* pNext VkObjectType objectType uint64_t objectHandle uint64_t tagName @@ -3136,13 +3332,13 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext const char* pLabelName - float color[4] + float color[4] - + VkStructureType sType - const void* pNext + const void* pNext VkDebugUtilsMessengerCreateFlagsEXT flags VkDebugUtilsMessageSeverityFlagsEXT messageSeverity VkDebugUtilsMessageTypeFlagsEXT messageType @@ -3154,7 +3350,7 @@ typedef void CAMetalLayer; const void* pNext VkDebugUtilsMessengerCallbackDataFlagsEXT flags const char* pMessageIdName - int32_t messageIdNumber + int32_t messageIdNumber const char* pMessage uint32_t queueLabelCount const VkDebugUtilsLabelEXT* pQueueLabels @@ -3163,74 +3359,97 @@ typedef void CAMetalLayer; uint32_t objectCount const VkDebugUtilsObjectNameInfoEXT* pObjects + + VkStructureType sType + void* pNext + VkBool32 deviceMemoryReport + + + VkStructureType sType + const void* pNext + VkDeviceMemoryReportFlagsEXT flags + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback + void* pUserData + + + VkStructureType sType + const void* pNext + VkDeviceMemoryReportFlagsEXT flags + VkDeviceMemoryReportEventTypeEXT type + uint64_t memoryObjectId + VkDeviceSize size + VkObjectType objectType + uint64_t objectHandle + uint32_t heapIndex + VkStructureType sType - const void* pNext + const void* pNext VkExternalMemoryHandleTypeFlagBits handleType void* pHostPointer VkStructureType sType - void* pNext + void* pNext uint32_t memoryTypeBits VkStructureType sType - void* pNext - VkDeviceSize minImportedHostPointerAlignment + void* pNext + VkDeviceSize minImportedHostPointerAlignment VkStructureType sType - void* pNext - float primitiveOverestimationSizeThe size in pixels the primitive is enlarged at each edge during conservative rasterization - float maxExtraPrimitiveOverestimationSizeThe maximum additional overestimation the client can specify in the pipeline state - float extraPrimitiveOverestimationSizeGranularityThe granularity of extra overestimation sizes the implementations supports between 0 and maxExtraOverestimationSize - VkBool32 primitiveUnderestimationtrue if the implementation supports conservative rasterization underestimation mode - VkBool32 conservativePointAndLineRasterizationtrue if conservative rasterization also applies to points and lines - VkBool32 degenerateTrianglesRasterizedtrue if degenerate triangles (those with zero area after snap) are rasterized - VkBool32 degenerateLinesRasterizedtrue if degenerate lines (those with zero length after snap) are rasterized - VkBool32 fullyCoveredFragmentShaderInputVariabletrue if the implementation supports the FullyCoveredEXT SPIR-V builtin fragment shader input variable - VkBool32 conservativeRasterizationPostDepthCoveragetrue if the implementation supports both conservative rasterization and post depth coverage sample coverage mask + void* pNext + float primitiveOverestimationSizeThe size in pixels the primitive is enlarged at each edge during conservative rasterization + float maxExtraPrimitiveOverestimationSizeThe maximum additional overestimation the client can specify in the pipeline state + float extraPrimitiveOverestimationSizeGranularityThe granularity of extra overestimation sizes the implementations supports between 0 and maxExtraOverestimationSize + VkBool32 primitiveUnderestimationtrue if the implementation supports conservative rasterization underestimation mode + VkBool32 conservativePointAndLineRasterizationtrue if conservative rasterization also applies to points and lines + VkBool32 degenerateTrianglesRasterizedtrue if degenerate triangles (those with zero area after snap) are rasterized + VkBool32 degenerateLinesRasterizedtrue if degenerate lines (those with zero length after snap) are rasterized + VkBool32 fullyCoveredFragmentShaderInputVariabletrue if the implementation supports the FullyCoveredEXT SPIR-V builtin fragment shader input variable + VkBool32 conservativeRasterizationPostDepthCoveragetrue if the implementation supports both conservative rasterization and post depth coverage sample coverage mask VkStructureType sType - const void* pNext + const void* pNext VkTimeDomainEXT timeDomain VkStructureType sType - void* pNext - uint32_t shaderEngineCountnumber of shader engines - uint32_t shaderArraysPerEngineCountnumber of shader arrays - uint32_t computeUnitsPerShaderArraynumber of physical CUs per shader array - uint32_t simdPerComputeUnitnumber of SIMDs per compute unit - uint32_t wavefrontsPerSimdnumber of wavefront slots in each SIMD - uint32_t wavefrontSizemaximum number of threads per wavefront - uint32_t sgprsPerSimdnumber of physical SGPRs per SIMD - uint32_t minSgprAllocationminimum number of SGPRs that can be allocated by a wave - uint32_t maxSgprAllocationnumber of available SGPRs - uint32_t sgprAllocationGranularitySGPRs are allocated in groups of this size - uint32_t vgprsPerSimdnumber of physical VGPRs per SIMD - uint32_t minVgprAllocationminimum number of VGPRs that can be allocated by a wave - uint32_t maxVgprAllocationnumber of available VGPRs - uint32_t vgprAllocationGranularityVGPRs are allocated in groups of this size + void* pNext + uint32_t shaderEngineCountnumber of shader engines + uint32_t shaderArraysPerEngineCountnumber of shader arrays + uint32_t computeUnitsPerShaderArraynumber of physical CUs per shader array + uint32_t simdPerComputeUnitnumber of SIMDs per compute unit + uint32_t wavefrontsPerSimdnumber of wavefront slots in each SIMD + uint32_t wavefrontSizemaximum number of threads per wavefront + uint32_t sgprsPerSimdnumber of physical SGPRs per SIMD + uint32_t minSgprAllocationminimum number of SGPRs that can be allocated by a wave + uint32_t maxSgprAllocationnumber of available SGPRs + uint32_t sgprAllocationGranularitySGPRs are allocated in groups of this size + uint32_t vgprsPerSimdnumber of physical VGPRs per SIMD + uint32_t minVgprAllocationminimum number of VGPRs that can be allocated by a wave + uint32_t maxVgprAllocationnumber of available VGPRs + uint32_t vgprAllocationGranularityVGPRs are allocated in groups of this size VkStructureType sType - void* pNextPointer to next structure - VkShaderCorePropertiesFlagsAMD shaderCoreFeaturesfeatures supported by the shader core - uint32_t activeComputeUnitCountnumber of active compute units across all shader engines/arrays + void* pNextPointer to next structure + VkShaderCorePropertiesFlagsAMD shaderCoreFeaturesfeatures supported by the shader core + uint32_t activeComputeUnitCountnumber of active compute units across all shader engines/arrays VkStructureType sType - const void* pNext + const void* pNext VkPipelineRasterizationConservativeStateCreateFlagsEXT flagsReserved VkConservativeRasterizationModeEXT conservativeRasterizationModeConservative rasterization mode float extraPrimitiveOverestimationSizeExtra overestimation to add to the primitive VkStructureType sType - void* pNext + void* pNext VkBool32 shaderInputAttachmentArrayDynamicIndexing VkBool32 shaderUniformTexelBufferArrayDynamicIndexing VkBool32 shaderStorageTexelBufferArrayDynamicIndexing @@ -3255,55 +3474,55 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext - uint32_t maxUpdateAfterBindDescriptorsInAllPools - VkBool32 shaderUniformBufferArrayNonUniformIndexingNative - VkBool32 shaderSampledImageArrayNonUniformIndexingNative - VkBool32 shaderStorageBufferArrayNonUniformIndexingNative - VkBool32 shaderStorageImageArrayNonUniformIndexingNative - VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative - VkBool32 robustBufferAccessUpdateAfterBind - VkBool32 quadDivergentImplicitLod - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments - uint32_t maxPerStageUpdateAfterBindResources - uint32_t maxDescriptorSetUpdateAfterBindSamplers - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic - uint32_t maxDescriptorSetUpdateAfterBindSampledImages - uint32_t maxDescriptorSetUpdateAfterBindStorageImages - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments + void* pNext + uint32_t maxUpdateAfterBindDescriptorsInAllPools + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative + VkBool32 shaderSampledImageArrayNonUniformIndexingNative + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative + VkBool32 shaderStorageImageArrayNonUniformIndexingNative + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative + VkBool32 robustBufferAccessUpdateAfterBind + VkBool32 quadDivergentImplicitLod + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments + uint32_t maxPerStageUpdateAfterBindResources + uint32_t maxDescriptorSetUpdateAfterBindSamplers + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic + uint32_t maxDescriptorSetUpdateAfterBindSampledImages + uint32_t maxDescriptorSetUpdateAfterBindStorageImages + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments VkStructureType sType - const void* pNext + const void* pNext uint32_t bindingCount - const VkDescriptorBindingFlags* pBindingFlags + const VkDescriptorBindingFlags* pBindingFlags VkStructureType sType - const void* pNext + const void* pNext uint32_t descriptorSetCount const uint32_t* pDescriptorCounts VkStructureType sType - void* pNext + void* pNext uint32_t maxVariableDescriptorCount VkStructureType sType - const void* pNext + const void* pNext VkAttachmentDescriptionFlags flags VkFormat format VkSampleCountFlagBits samples @@ -3317,7 +3536,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t attachment VkImageLayout layout VkImageAspectFlags aspectMask @@ -3325,7 +3544,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkSubpassDescriptionFlags flags VkPipelineBindPoint pipelineBindPoint uint32_t viewMask @@ -3341,20 +3560,20 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t srcSubpass uint32_t dstSubpass - VkPipelineStageFlags srcStageMask - VkPipelineStageFlags dstStageMask + VkPipelineStageFlags srcStageMask + VkPipelineStageFlags dstStageMask VkAccessFlags srcAccessMask VkAccessFlags dstAccessMask VkDependencyFlags dependencyFlags - int32_t viewOffset + int32_t viewOffset VkStructureType sType - const void* pNext + const void* pNext VkRenderPassCreateFlags flags uint32_t attachmentCount const VkAttachmentDescription2* pAttachments @@ -3368,37 +3587,37 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkSubpassContents contents VkStructureType sType - const void* pNext + const void* pNext VkStructureType sType - void* pNext + void* pNext VkBool32 timelineSemaphore VkStructureType sType - void* pNext - uint64_t maxTimelineSemaphoreValueDifference + void* pNext + uint64_t maxTimelineSemaphoreValueDifference VkStructureType sType - const void* pNext + const void* pNext VkSemaphoreType semaphoreType uint64_t initialValue VkStructureType sType - const void* pNext + const void* pNext uint32_t waitSemaphoreValueCount const uint64_t* pWaitSemaphoreValues uint32_t signalSemaphoreValueCount @@ -3407,7 +3626,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkSemaphoreWaitFlags flags uint32_t semaphoreCount const VkSemaphore* pSemaphores @@ -3416,7 +3635,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkSemaphore semaphore uint64_t value @@ -3427,47 +3646,47 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t vertexBindingDivisorCount const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors VkStructureType sType - void* pNext - uint32_t maxVertexAttribDivisormax value of vertex attribute divisor + void* pNext + uint32_t maxVertexAttribDivisormax value of vertex attribute divisor VkStructureType sType - void* pNext - uint32_t pciDomain - uint32_t pciBus - uint32_t pciDevice - uint32_t pciFunction + void* pNext + uint32_t pciDomain + uint32_t pciBus + uint32_t pciDevice + uint32_t pciFunction VkStructureType sType - const void* pNext + const void* pNext struct AHardwareBuffer* buffer VkStructureType sType - void* pNext + void* pNext uint64_t androidHardwareBufferUsage VkStructureType sType - void* pNext + void* pNext VkDeviceSize allocationSize uint32_t memoryTypeBits VkStructureType sType - const void* pNext + const void* pNext VkDeviceMemory memory VkStructureType sType - void* pNext + void* pNext VkFormat format uint64_t externalFormat VkFormatFeatureFlags formatFeatures @@ -3479,17 +3698,17 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkBool32 conditionalRenderingEnableWhether this secondary command buffer may be executed during an active conditional rendering VkStructureType sType - void* pNext + void* pNext uint64_t externalFormat VkStructureType sType - void* pNext + void* pNext VkBool32 storageBuffer8BitAccess8-bit integer variables supported in StorageBuffer VkBool32 uniformAndStorageBuffer8BitAccess8-bit integer variables supported in StorageBuffer and Uniform VkBool32 storagePushConstant88-bit integer variables supported in PushConstant @@ -3497,13 +3716,13 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext VkBool32 conditionalRendering VkBool32 inheritedConditionalRendering VkStructureType sType - void* pNext + void* pNext VkBool32 vulkanMemoryModel VkBool32 vulkanMemoryModelDeviceScope VkBool32 vulkanMemoryModelAvailabilityVisibilityChains @@ -3511,126 +3730,142 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext VkBool32 shaderBufferInt64Atomics VkBool32 shaderSharedInt64Atomics + + VkStructureType sType + void* pNext + VkBool32 shaderBufferFloat32Atomics + VkBool32 shaderBufferFloat32AtomicAdd + VkBool32 shaderBufferFloat64Atomics + VkBool32 shaderBufferFloat64AtomicAdd + VkBool32 shaderSharedFloat32Atomics + VkBool32 shaderSharedFloat32AtomicAdd + VkBool32 shaderSharedFloat64Atomics + VkBool32 shaderSharedFloat64AtomicAdd + VkBool32 shaderImageFloat32Atomics + VkBool32 shaderImageFloat32AtomicAdd + VkBool32 sparseImageFloat32Atomics + VkBool32 sparseImageFloat32AtomicAdd + VkStructureType sType - void* pNext + void* pNext VkBool32 vertexAttributeInstanceRateDivisor VkBool32 vertexAttributeInstanceRateZeroDivisor VkStructureType sType - void* pNext + void* pNext VkPipelineStageFlags checkpointExecutionStageMask VkStructureType sType - void* pNext + void* pNext VkPipelineStageFlagBits stage void* pCheckpointMarker VkStructureType sType - void* pNext - VkResolveModeFlags supportedDepthResolveModessupported depth resolve modes - VkResolveModeFlags supportedStencilResolveModessupported stencil resolve modes - VkBool32 independentResolveNonedepth and stencil resolve modes can be set independently if one of them is none - VkBool32 independentResolvedepth and stencil resolve modes can be set independently + void* pNext + VkResolveModeFlags supportedDepthResolveModessupported depth resolve modes + VkResolveModeFlags supportedStencilResolveModessupported stencil resolve modes + VkBool32 independentResolveNonedepth and stencil resolve modes can be set independently if one of them is none + VkBool32 independentResolvedepth and stencil resolve modes can be set independently VkStructureType sType - const void* pNext - VkResolveModeFlagBits depthResolveModedepth resolve mode - VkResolveModeFlagBits stencilResolveModestencil resolve mode + const void* pNext + VkResolveModeFlagBits depthResolveModedepth resolve mode + VkResolveModeFlagBits stencilResolveModestencil resolve mode const VkAttachmentReference2* pDepthStencilResolveAttachmentdepth/stencil resolve attachment VkStructureType sType - const void* pNext + const void* pNext VkFormat decodeMode VkStructureType sType - void* pNext + void* pNext VkBool32 decodeModeSharedExponent VkStructureType sType - void* pNext + void* pNext VkBool32 transformFeedback VkBool32 geometryStreams VkStructureType sType - void* pNext - uint32_t maxTransformFeedbackStreams - uint32_t maxTransformFeedbackBuffers - VkDeviceSize maxTransformFeedbackBufferSize - uint32_t maxTransformFeedbackStreamDataSize - uint32_t maxTransformFeedbackBufferDataSize - uint32_t maxTransformFeedbackBufferDataStride - VkBool32 transformFeedbackQueries - VkBool32 transformFeedbackStreamsLinesTriangles - VkBool32 transformFeedbackRasterizationStreamSelect - VkBool32 transformFeedbackDraw + void* pNext + uint32_t maxTransformFeedbackStreams + uint32_t maxTransformFeedbackBuffers + VkDeviceSize maxTransformFeedbackBufferSize + uint32_t maxTransformFeedbackStreamDataSize + uint32_t maxTransformFeedbackBufferDataSize + uint32_t maxTransformFeedbackBufferDataStride + VkBool32 transformFeedbackQueries + VkBool32 transformFeedbackStreamsLinesTriangles + VkBool32 transformFeedbackRasterizationStreamSelect + VkBool32 transformFeedbackDraw VkStructureType sType - const void* pNext + const void* pNext VkPipelineRasterizationStateStreamCreateFlagsEXT flags uint32_t rasterizationStream VkStructureTypesType - void* pNext + void* pNext VkBool32 representativeFragmentTest VkStructureType sType - const void* pNext + const void* pNext VkBool32 representativeFragmentTestEnable VkStructureType sType - void* pNext + void* pNext VkBool32 exclusiveScissor VkStructureType sType - const void* pNext + const void* pNext uint32_t exclusiveScissorCount const VkRect2D* pExclusiveScissors VkStructureType sType - void* pNext + void* pNext VkBool32 cornerSampledImage VkStructureType sType - void* pNext + void* pNext VkBool32 computeDerivativeGroupQuads VkBool32 computeDerivativeGroupLinear VkStructureType sType - void* pNext + void* pNext VkBool32 fragmentShaderBarycentric VkStructureType sType - void* pNext + void* pNext VkBool32 imageFootprint VkStructureType sType - void* pNext + void* pNext VkBool32 dedicatedAllocationImageAliasing @@ -3639,23 +3874,23 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkBool32 shadingRateImageEnable - uint32_t viewportCount + uint32_t viewportCount const VkShadingRatePaletteNV* pShadingRatePalettes VkStructureType sType - void* pNext + void* pNext VkBool32 shadingRateImage VkBool32 shadingRateCoarseSampleOrder VkStructureType sType - void* pNext - VkExtent2D shadingRateTexelSize - uint32_t shadingRatePaletteSize - uint32_t shadingRateMaxCoarseSamples + void* pNext + VkExtent2D shadingRateTexelSize + uint32_t shadingRatePaletteSize + uint32_t shadingRateMaxCoarseSamples uint32_t pixelX @@ -3670,33 +3905,33 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkCoarseSampleOrderTypeNV sampleOrderType uint32_t customSampleOrderCount const VkCoarseSampleOrderCustomNV* pCustomSampleOrders VkStructureType sType - void* pNext + void* pNext VkBool32 taskShader VkBool32 meshShader VkStructureType sType - void* pNext - uint32_t maxDrawMeshTasksCount - uint32_t maxTaskWorkGroupInvocations - uint32_t maxTaskWorkGroupSize[3] - uint32_t maxTaskTotalMemorySize - uint32_t maxTaskOutputCount - uint32_t maxMeshWorkGroupInvocations - uint32_t maxMeshWorkGroupSize[3] - uint32_t maxMeshTotalMemorySize - uint32_t maxMeshOutputVertices - uint32_t maxMeshOutputPrimitives - uint32_t maxMeshMultiviewViewCount - uint32_t meshOutputPerVertexGranularity - uint32_t meshOutputPerPrimitiveGranularity + void* pNext + uint32_t maxDrawMeshTasksCount + uint32_t maxTaskWorkGroupInvocations + uint32_t maxTaskWorkGroupSize[3] + uint32_t maxTaskTotalMemorySize + uint32_t maxTaskOutputCount + uint32_t maxMeshWorkGroupInvocations + uint32_t maxMeshWorkGroupSize[3] + uint32_t maxMeshTotalMemorySize + uint32_t maxMeshOutputVertices + uint32_t maxMeshOutputPrimitives + uint32_t maxMeshMultiviewViewCount + uint32_t meshOutputPerVertexGranularity + uint32_t meshOutputPerPrimitiveGranularity uint32_t taskCount @@ -3704,7 +3939,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkRayTracingShaderGroupTypeKHR type uint32_t generalShader uint32_t closestHitShader @@ -3713,7 +3948,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkRayTracingShaderGroupTypeKHR type uint32_t generalShader uint32_t closestHitShader @@ -3723,7 +3958,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineCreateFlags flagsPipeline creation flags uint32_t stageCount const VkPipelineShaderStageCreateInfo* pStagesOne entry for each active shader stage @@ -3736,22 +3971,23 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineCreateFlags flagsPipeline creation flags uint32_t stageCount const VkPipelineShaderStageCreateInfo* pStagesOne entry for each active shader stage uint32_t groupCount const VkRayTracingShaderGroupCreateInfoKHR* pGroups - uint32_t maxRecursionDepth - VkPipelineLibraryCreateInfoKHR libraries + uint32_t maxPipelineRayRecursionDepth + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface + const VkPipelineDynamicStateCreateInfo* pDynamicState VkPipelineLayout layoutInterface layout of the pipeline VkPipeline basePipelineHandleIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is nonzero, it specifies the handle of the base pipeline this is a derivative of int32_t basePipelineIndexIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is not -1, it specifies an index into pCreateInfos of the base pipeline this is a derivative of VkStructureType sType - const void* pNext + const void* pNext VkBuffer vertexData VkDeviceSize vertexOffset uint32_t vertexCount @@ -3766,7 +4002,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkBuffer aabbData uint32_t numAABBs uint32_t strideStride in bytes between AABBs @@ -3778,14 +4014,14 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkGeometryTypeKHR geometryType VkGeometryDataNV geometry VkGeometryFlagsKHR flags VkStructureType sType - const void* pNext + const void* pNext VkAccelerationStructureTypeNV type VkBuildAccelerationStructureFlagsNVflags uint32_t instanceCount @@ -3794,81 +4030,98 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkDeviceSize compactedSize VkAccelerationStructureInfoNV info - - VkStructureType sType - const void* pNext - VkAccelerationStructureKHR accelerationStructure + + VkStructureType sType + const void* pNext + VkAccelerationStructureNV accelerationStructure VkDeviceMemory memory VkDeviceSize memoryOffset uint32_t deviceIndexCount const uint32_t* pDeviceIndices - VkStructureType sType - const void* pNext + const void* pNext uint32_t accelerationStructureCount - const VkAccelerationStructureKHR* pAccelerationStructures + const VkAccelerationStructureKHR* pAccelerationStructures - - - VkStructureType sType - const void* pNext - VkAccelerationStructureMemoryRequirementsTypeKHR type - VkAccelerationStructureBuildTypeKHR buildType - VkAccelerationStructureKHR accelerationStructure + + VkStructureType sType + const void* pNext + uint32_t accelerationStructureCount + const VkAccelerationStructureNV* pAccelerationStructures VkStructureType sType - const void* pNext + const void* pNext VkAccelerationStructureMemoryRequirementsTypeNV type VkAccelerationStructureNV accelerationStructure - - VkStructureType sType - void* pNext - VkBool32 rayTracing - VkBool32 rayTracingShaderGroupHandleCaptureReplay - VkBool32 rayTracingShaderGroupHandleCaptureReplayMixed - VkBool32 rayTracingAccelerationStructureCaptureReplay - VkBool32 rayTracingIndirectTraceRays - VkBool32 rayTracingIndirectAccelerationStructureBuild - VkBool32 rayTracingHostAccelerationStructureCommands - VkBool32 rayQuery - VkBool32 rayTracingPrimitiveCulling + + VkStructureType sType + void* pNext + VkBool32 accelerationStructure + VkBool32 accelerationStructureCaptureReplay + VkBool32 accelerationStructureIndirectBuild + VkBool32 accelerationStructureHostCommands + VkBool32 descriptorBindingAccelerationStructureUpdateAfterBind - - VkStructureType sType - void* pNext - uint32_t shaderGroupHandleSize - uint32_t maxRecursionDepth - uint32_t maxShaderGroupStride - uint32_t shaderGroupBaseAlignment - uint64_t maxGeometryCount - uint64_t maxInstanceCount - uint64_t maxPrimitiveCount - uint32_t maxDescriptorSetAccelerationStructures - uint32_t shaderGroupHandleCaptureReplaySize + + VkStructureType sType + void* pNext + VkBool32 rayTracingPipeline + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplay + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed + VkBool32 rayTracingPipelineTraceRaysIndirect + VkBool32 rayTraversalPrimitiveCulling + + + VkStructureType sType + void* pNext + VkBool32 rayQuery + + + VkStructureType sType + void* pNext + uint64_t maxGeometryCount + uint64_t maxInstanceCount + uint64_t maxPrimitiveCount + uint32_t maxPerStageDescriptorAccelerationStructures + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures + uint32_t maxDescriptorSetAccelerationStructures + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures + uint32_t minAccelerationStructureScratchOffsetAlignment + + + VkStructureType sType + void* pNext + uint32_t shaderGroupHandleSize + uint32_t maxRayRecursionDepth + uint32_t maxShaderGroupStride + uint32_t shaderGroupBaseAlignment + uint32_t shaderGroupHandleCaptureReplaySize + uint32_t maxRayDispatchInvocationCount + uint32_t shaderGroupHandleAlignment + uint32_t maxRayHitAttributeSize VkStructureType sType - void* pNext - uint32_t shaderGroupHandleSize - uint32_t maxRecursionDepth - uint32_t maxShaderGroupStride - uint32_t shaderGroupBaseAlignment - uint64_t maxGeometryCount - uint64_t maxInstanceCount - uint64_t maxTriangleCount - uint32_t maxDescriptorSetAccelerationStructures + void* pNext + uint32_t shaderGroupHandleSize + uint32_t maxRecursionDepth + uint32_t maxShaderGroupStride + uint32_t shaderGroupBaseAlignment + uint64_t maxGeometryCount + uint64_t maxInstanceCount + uint64_t maxTriangleCount + uint32_t maxDescriptorSetAccelerationStructures - - VkBuffer buffer - VkDeviceSize offset + + VkDeviceAddress deviceAddress VkDeviceSize stride VkDeviceSize size @@ -3879,7 +4132,7 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext uint32_t drmFormatModifierCount VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties @@ -3890,7 +4143,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint64_t drmFormatModifier VkSharingMode sharingMode uint32_t queueFamilyIndexCount @@ -3898,99 +4151,112 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t drmFormatModifierCount const uint64_t* pDrmFormatModifiers VkStructureType sType - const void* pNext + const void* pNext uint64_t drmFormatModifier uint32_t drmFormatModifierPlaneCount const VkSubresourceLayout* pPlaneLayouts VkStructureType sType - void* pNext + void* pNext uint64_t drmFormatModifier VkStructureType sType - const void* pNext + const void* pNext VkImageUsageFlags stencilUsage VkStructureType sType - const void* pNext + const void* pNext VkMemoryOverallocationBehaviorAMD overallocationBehavior - + VkStructureType sType - void* pNext + void* pNext VkBool32 fragmentDensityMap VkBool32 fragmentDensityMapDynamic VkBool32 fragmentDensityMapNonSubsampledImages + + VkStructureType sType + void* pNext + VkBool32 fragmentDensityMapDeferred + VkStructureType sType - void* pNext - VkExtent2D minFragmentDensityTexelSize - VkExtent2D maxFragmentDensityTexelSize - VkBool32 fragmentDensityInvocations + void* pNext + VkExtent2D minFragmentDensityTexelSize + VkExtent2D maxFragmentDensityTexelSize + VkBool32 fragmentDensityInvocations + + + VkStructureType sType + void* pNext + VkBool32 subsampledLoads + VkBool32 subsampledCoarseReconstructionEarlyAccess + uint32_t maxSubsampledArrayLayers + uint32_t maxDescriptorSetSubsampledSamplers VkStructureType sType - const void* pNext + const void* pNext VkAttachmentReference fragmentDensityMapAttachment VkStructureType sType - void* pNext + void* pNext VkBool32 scalarBlockLayout VkStructureType sType - const void* pNext + const void* pNext VkBool32 supportsProtectedRepresents if surface can be protected VkStructureType sType - void* pNext + void* pNext VkBool32 uniformBufferStandardLayout VkStructureType sType - void* pNext + void* pNext VkBool32 depthClipEnable VkStructureType sType - const void* pNext + const void* pNext VkPipelineRasterizationDepthClipStateCreateFlagsEXT flagsReserved VkBool32 depthClipEnable VkStructureType sType - void* pNext + void* pNext VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS] VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS] VkStructureType sType - void* pNext + void* pNext VkBool32 memoryPriority VkStructureType sType - const void* pNext + const void* pNext float priority VkStructureType sType - void* pNext + void* pNext VkBool32 bufferDeviceAddress VkBool32 bufferDeviceAddressCaptureReplay VkBool32 bufferDeviceAddressMultiDevice @@ -3998,7 +4264,7 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext VkBool32 bufferDeviceAddress VkBool32 bufferDeviceAddressCaptureReplay VkBool32 bufferDeviceAddressMultiDevice @@ -4006,49 +4272,49 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkBuffer buffer VkStructureType sType - const void* pNext + const void* pNext uint64_t opaqueCaptureAddress VkStructureType sType - const void* pNext + const void* pNext VkDeviceAddress deviceAddress VkStructureType sType - void* pNext + void* pNext VkImageViewType imageViewType VkStructureType sType - void* pNext + void* pNext VkBool32 filterCubicThe combinations of format, image type (and image view type if provided) can be filtered with VK_FILTER_CUBIC_EXT VkBool32 filterCubicMinmaxThe combination of format, image type (and image view type if provided) can be filtered with VK_FILTER_CUBIC_EXT and ReductionMode of Min or Max VkStructureType sType - void* pNext + void* pNext VkBool32 imagelessFramebuffer VkStructureType sType - const void* pNext + const void* pNext uint32_t attachmentImageInfoCount const VkFramebufferAttachmentImageInfo* pAttachmentImageInfos VkStructureType sType - const void* pNext + const void* pNext VkImageCreateFlags flagsImage creation flags VkImageUsageFlags usageImage usage flags uint32_t width @@ -4060,30 +4326,30 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t attachmentCount const VkImageView* pAttachments VkStructureType sType - void* pNext + void* pNext VkBool32 textureCompressionASTC_HDR VkStructureType sType - void* pNext + void* pNext VkBool32 cooperativeMatrix VkBool32 cooperativeMatrixRobustBufferAccess VkStructureType sType - void* pNext - VkShaderStageFlags cooperativeMatrixSupportedStages + void* pNext + VkShaderStageFlags cooperativeMatrixSupportedStages VkStructureType sType - void* pNext + void* pNext uint32_t MSize uint32_t NSize uint32_t KSize @@ -4095,25 +4361,25 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext VkBool32 ycbcrImageArrays VkStructureType sType - const void* pNext + const void* pNext VkImageView imageView VkDescriptorType descriptorType VkSampler sampler VkStructureType sType - void* pNext + void* pNext VkDeviceAddress deviceAddress VkDeviceSize size VkStructureType sType - const void* pNext + const void* pNext GgpFrameToken frameToken @@ -4122,39 +4388,40 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipelineCreationFeedbackEXT* pPipelineCreationFeedbackOutput pipeline creation feedback. uint32_t pipelineStageCreationFeedbackCount VkPipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacksOne entry for each shader stage specified in the parent Vk*PipelineCreateInfo struct VkStructureType sType - void* pNext + void* pNext VkFullScreenExclusiveEXT fullScreenExclusive VkStructureType sType - const void* pNext + const void* pNext HMONITOR hmonitor VkStructureType sType - void* pNext + void* pNext VkBool32 fullScreenExclusiveSupported VkStructureType sType - void* pNext + void* pNext VkBool32 performanceCounterQueryPoolsperformance counters supported in query pools - VkBool32 performanceCounterMultipleQueryPoolsperformance counters from multiple query pools can be accessed in the same primary command buffer + VkBool32 performanceCounterMultipleQueryPoolsperformance counters from multiple query pools can be accessed in the same primary command buffer + - VkStructureType sType - void* pNext - VkBool32 allowCommandBufferQueryCopiesFlag to specify whether performance queries are allowed to be used in vkCmdCopyQueryPoolResults + VkStructureType sType + void* pNext + VkBool32 allowCommandBufferQueryCopiesFlag to specify whether performance queries are allowed to be used in vkCmdCopyQueryPoolResults VkStructureType sType - const void* pNext + const void* pNext VkPerformanceCounterUnitKHR unit VkPerformanceCounterScopeKHR scope VkPerformanceCounterStorageKHR storage @@ -4162,7 +4429,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPerformanceCounterDescriptionFlagsKHR flags char name[VK_MAX_DESCRIPTION_SIZE] char category[VK_MAX_DESCRIPTION_SIZE] @@ -4170,7 +4437,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext uint32_t queueFamilyIndex uint32_t counterIndexCount const uint32_t* pCounterIndices @@ -4185,34 +4452,34 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkAcquireProfilingLockFlagsKHR flagsAcquire profiling lock flags uint64_t timeout - + VkStructureType sType - const void* pNext + const void* pNext uint32_t counterPassIndexIndex for which counter pass to submit VkStructureType sType - const void* pNext + const void* pNext VkHeadlessSurfaceCreateFlagsEXT flags VkStructureTypesType - void* pNext + void* pNext VkBool32 coverageReductionMode VkStructureType sType - const void* pNext + const void* pNext VkPipelineCoverageReductionStateCreateFlagsNV flags VkCoverageReductionModeNV coverageReductionMode VkStructureType sType - void* pNext + void* pNext VkCoverageReductionModeNV coverageReductionMode VkSampleCountFlagBits rasterizationSamples VkSampleCountFlags depthStencilSamples @@ -4220,7 +4487,7 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext VkBool32 shaderIntegerFunctions2 @@ -4232,102 +4499,102 @@ typedef void CAMetalLayer; VkPerformanceValueTypeINTEL type - VkPerformanceValueDataINTEL data + VkPerformanceValueDataINTEL data VkStructureType sType - const void* pNext - void* pUserData + const void* pNext + void* pUserData VkStructureType sType - const void* pNext + const void* pNext VkQueryPoolSamplingModeINTEL performanceCountersSampling VkStructureType sType - const void* pNext + const void* pNext uint64_t marker VkStructureType sType - const void* pNext + const void* pNext uint32_t marker VkStructureType sType - const void* pNext + const void* pNext VkPerformanceOverrideTypeINTEL type VkBool32 enable uint64_t parameter VkStructureType sType - const void* pNext + const void* pNext VkPerformanceConfigurationTypeINTEL type VkStructureType sType - void* pNext + void* pNext VkBool32 shaderSubgroupClock VkBool32 shaderDeviceClock VkStructureType sType - void* pNext + void* pNext VkBool32 indexTypeUint8 VkStructureType sType - void* pNext - uint32_t shaderSMCount - uint32_t shaderWarpsPerSM + void* pNext + uint32_t shaderSMCount + uint32_t shaderWarpsPerSM VkStructureTypesType - void* pNext + void* pNext VkBool32 shaderSMBuiltins VkStructureType sType - void* pNextPointer to next structure + void* pNextPointer to next structure VkBool32 fragmentShaderSampleInterlock VkBool32 fragmentShaderPixelInterlock VkBool32 fragmentShaderShadingRateInterlock VkStructureTypesType - void* pNext + void* pNext VkBool32 separateDepthStencilLayouts VkStructureTypesType - void* pNext + void* pNext VkImageLayout stencilLayout VkStructureTypesType - void* pNext + void* pNext VkImageLayout stencilInitialLayout VkImageLayout stencilFinalLayout VkStructureType sType - void* pNext + void* pNext VkBool32 pipelineExecutableInfo VkStructureType sType - const void* pNext + const void* pNext VkPipeline pipeline VkStructureType sType - void* pNext + void* pNext VkShaderStageFlags stages char name[VK_MAX_DESCRIPTION_SIZE] char description[VK_MAX_DESCRIPTION_SIZE] @@ -4335,7 +4602,7 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkPipeline pipeline uint32_t executableIndex @@ -4347,73 +4614,73 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext + void* pNext char name[VK_MAX_DESCRIPTION_SIZE] char description[VK_MAX_DESCRIPTION_SIZE] VkPipelineExecutableStatisticFormatKHR format - VkPipelineExecutableStatisticValueKHR value + VkPipelineExecutableStatisticValueKHR value VkStructureType sType - void* pNext + void* pNext char name[VK_MAX_DESCRIPTION_SIZE] char description[VK_MAX_DESCRIPTION_SIZE] VkBool32 isText - size_t dataSize + size_t dataSize void* pData VkStructureType sType - void* pNext + void* pNext VkBool32 shaderDemoteToHelperInvocation VkStructureType sType - void* pNext + void* pNext VkBool32 texelBufferAlignment VkStructureType sType - void* pNext - VkDeviceSize storageTexelBufferOffsetAlignmentBytes - VkBool32 storageTexelBufferOffsetSingleTexelAlignment - VkDeviceSize uniformTexelBufferOffsetAlignmentBytes - VkBool32 uniformTexelBufferOffsetSingleTexelAlignment + void* pNext + VkDeviceSize storageTexelBufferOffsetAlignmentBytes + VkBool32 storageTexelBufferOffsetSingleTexelAlignment + VkDeviceSize uniformTexelBufferOffsetAlignmentBytes + VkBool32 uniformTexelBufferOffsetSingleTexelAlignment - VkStructureType sType - void* pNext - VkBool32 subgroupSizeControl - VkBool32 computeFullSubgroups + VkStructureType sType + void* pNext + VkBool32 subgroupSizeControl + VkBool32 computeFullSubgroups - VkStructureType sType - void* pNext - uint32_t minSubgroupSizeThe minimum subgroup size supported by this device - uint32_t maxSubgroupSizeThe maximum subgroup size supported by this device - uint32_t maxComputeWorkgroupSubgroupsThe maximum number of subgroups supported in a workgroup - VkShaderStageFlags requiredSubgroupSizeStagesThe shader stages that support specifying a subgroup size + VkStructureType sType + void* pNext + uint32_t minSubgroupSizeThe minimum subgroup size supported by this device + uint32_t maxSubgroupSizeThe maximum subgroup size supported by this device + uint32_t maxComputeWorkgroupSubgroupsThe maximum number of subgroups supported in a workgroup + VkShaderStageFlags requiredSubgroupSizeStagesThe shader stages that support specifying a subgroup size VkStructureType sType - void* pNext + void* pNext uint32_t requiredSubgroupSize VkStructureType sType - const void* pNext + const void* pNext uint64_t opaqueCaptureAddress VkStructureType sType - const void* pNext + const void* pNext VkDeviceMemory memory VkStructureType sType - void* pNext + void* pNext VkBool32 rectangularLines VkBool32 bresenhamLines VkBool32 smoothLines @@ -4423,25 +4690,25 @@ typedef void CAMetalLayer; VkStructureType sType - void* pNext - uint32_t lineSubPixelPrecisionBits + void* pNext + uint32_t lineSubPixelPrecisionBits VkStructureType sType - const void* pNext + const void* pNext VkLineRasterizationModeEXT lineRasterizationMode VkBool32 stippledLineEnable - uint32_t lineStippleFactor - uint16_t lineStipplePattern + uint32_t lineStippleFactor + uint16_t lineStipplePattern VkStructureType sType - void* pNext + void* pNext VkBool32 pipelineCreationCacheControl VkStructureTypesType - void* pNext + void* pNext VkBool32 storageBuffer16BitAccess16-bit integer/floating-point variables supported in BufferBlock VkBool32 uniformAndStorageBuffer16BitAccess16-bit integer/floating-point variables supported in BufferBlock and Block VkBool32 storagePushConstant1616-bit integer/floating-point variables supported in PushConstant @@ -4457,26 +4724,26 @@ typedef void CAMetalLayer; VkStructureTypesType - void* pNext - uint8_t deviceUUID[VK_UUID_SIZE] - uint8_t driverUUID[VK_UUID_SIZE] - uint8_t deviceLUID[VK_LUID_SIZE] - uint32_t deviceNodeMask - VkBool32 deviceLUIDValid - uint32_t subgroupSizeThe size of a subgroup for this queue. - VkShaderStageFlags subgroupSupportedStagesBitfield of what shader stages support subgroup operations - VkSubgroupFeatureFlags subgroupSupportedOperationsBitfield of what subgroup operations are supported. - VkBool32 subgroupQuadOperationsInAllStagesFlag to specify whether quad operations are available in all stages. - VkPointClippingBehavior pointClippingBehavior - uint32_t maxMultiviewViewCountmax number of views in a subpass - uint32_t maxMultiviewInstanceIndexmax instance index for a draw in a multiview subpass - VkBool32 protectedNoFault - uint32_t maxPerSetDescriptors - VkDeviceSize maxMemoryAllocationSize + void* pNext + uint8_t deviceUUID[VK_UUID_SIZE] + uint8_t driverUUID[VK_UUID_SIZE] + uint8_t deviceLUID[VK_LUID_SIZE] + uint32_t deviceNodeMask + VkBool32 deviceLUIDValid + uint32_t subgroupSizeThe size of a subgroup for this queue. + VkShaderStageFlags subgroupSupportedStagesBitfield of what shader stages support subgroup operations + VkSubgroupFeatureFlags subgroupSupportedOperationsBitfield of what subgroup operations are supported. + VkBool32 subgroupQuadOperationsInAllStagesFlag to specify whether quad operations are available in all stages. + VkPointClippingBehavior pointClippingBehavior + uint32_t maxMultiviewViewCountmax number of views in a subpass + uint32_t maxMultiviewInstanceIndexmax instance index for a draw in a multiview subpass + VkBool32 protectedNoFault + uint32_t maxPerSetDescriptors + VkDeviceSize maxMemoryAllocationSize VkStructureTypesType - void* pNext + void* pNext VkBool32 samplerMirrorClampToEdge VkBool32 drawIndirectCount VkBool32 storageBuffer8BitAccess8-bit integer variables supported in StorageBuffer @@ -4527,73 +4794,73 @@ typedef void CAMetalLayer; VkStructureTypesType - void* pNext - VkDriverId driverID - char driverName[VK_MAX_DRIVER_NAME_SIZE] - char driverInfo[VK_MAX_DRIVER_INFO_SIZE] - VkConformanceVersion conformanceVersion - VkShaderFloatControlsIndependencedenormBehaviorIndependence - VkShaderFloatControlsIndependenceroundingModeIndependence - VkBool32 shaderSignedZeroInfNanPreserveFloat16An implementation can preserve signed zero, nan, inf - VkBool32 shaderSignedZeroInfNanPreserveFloat32An implementation can preserve signed zero, nan, inf - VkBool32 shaderSignedZeroInfNanPreserveFloat64An implementation can preserve signed zero, nan, inf - VkBool32 shaderDenormPreserveFloat16An implementation can preserve denormals - VkBool32 shaderDenormPreserveFloat32An implementation can preserve denormals - VkBool32 shaderDenormPreserveFloat64An implementation can preserve denormals - VkBool32 shaderDenormFlushToZeroFloat16An implementation can flush to zero denormals - VkBool32 shaderDenormFlushToZeroFloat32An implementation can flush to zero denormals - VkBool32 shaderDenormFlushToZeroFloat64An implementation can flush to zero denormals - VkBool32 shaderRoundingModeRTEFloat16An implementation can support RTE - VkBool32 shaderRoundingModeRTEFloat32An implementation can support RTE - VkBool32 shaderRoundingModeRTEFloat64An implementation can support RTE - VkBool32 shaderRoundingModeRTZFloat16An implementation can support RTZ - VkBool32 shaderRoundingModeRTZFloat32An implementation can support RTZ - VkBool32 shaderRoundingModeRTZFloat64An implementation can support RTZ - uint32_t maxUpdateAfterBindDescriptorsInAllPools - VkBool32 shaderUniformBufferArrayNonUniformIndexingNative - VkBool32 shaderSampledImageArrayNonUniformIndexingNative - VkBool32 shaderStorageBufferArrayNonUniformIndexingNative - VkBool32 shaderStorageImageArrayNonUniformIndexingNative - VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative - VkBool32 robustBufferAccessUpdateAfterBind - VkBool32 quadDivergentImplicitLod - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments - uint32_t maxPerStageUpdateAfterBindResources - uint32_t maxDescriptorSetUpdateAfterBindSamplers - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic - uint32_t maxDescriptorSetUpdateAfterBindSampledImages - uint32_t maxDescriptorSetUpdateAfterBindStorageImages - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments - VkResolveModeFlags supportedDepthResolveModessupported depth resolve modes - VkResolveModeFlags supportedStencilResolveModessupported stencil resolve modes - VkBool32 independentResolveNonedepth and stencil resolve modes can be set independently if one of them is none - VkBool32 independentResolvedepth and stencil resolve modes can be set independently - VkBool32 filterMinmaxSingleComponentFormats - VkBool32 filterMinmaxImageComponentMapping - uint64_t maxTimelineSemaphoreValueDifference - VkSampleCountFlags framebufferIntegerColorSampleCounts + void* pNext + VkDriverId driverID + char driverName[VK_MAX_DRIVER_NAME_SIZE] + char driverInfo[VK_MAX_DRIVER_INFO_SIZE] + VkConformanceVersion conformanceVersion + VkShaderFloatControlsIndependencedenormBehaviorIndependence + VkShaderFloatControlsIndependenceroundingModeIndependence + VkBool32 shaderSignedZeroInfNanPreserveFloat16An implementation can preserve signed zero, nan, inf + VkBool32 shaderSignedZeroInfNanPreserveFloat32An implementation can preserve signed zero, nan, inf + VkBool32 shaderSignedZeroInfNanPreserveFloat64An implementation can preserve signed zero, nan, inf + VkBool32 shaderDenormPreserveFloat16An implementation can preserve denormals + VkBool32 shaderDenormPreserveFloat32An implementation can preserve denormals + VkBool32 shaderDenormPreserveFloat64An implementation can preserve denormals + VkBool32 shaderDenormFlushToZeroFloat16An implementation can flush to zero denormals + VkBool32 shaderDenormFlushToZeroFloat32An implementation can flush to zero denormals + VkBool32 shaderDenormFlushToZeroFloat64An implementation can flush to zero denormals + VkBool32 shaderRoundingModeRTEFloat16An implementation can support RTE + VkBool32 shaderRoundingModeRTEFloat32An implementation can support RTE + VkBool32 shaderRoundingModeRTEFloat64An implementation can support RTE + VkBool32 shaderRoundingModeRTZFloat16An implementation can support RTZ + VkBool32 shaderRoundingModeRTZFloat32An implementation can support RTZ + VkBool32 shaderRoundingModeRTZFloat64An implementation can support RTZ + uint32_t maxUpdateAfterBindDescriptorsInAllPools + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative + VkBool32 shaderSampledImageArrayNonUniformIndexingNative + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative + VkBool32 shaderStorageImageArrayNonUniformIndexingNative + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative + VkBool32 robustBufferAccessUpdateAfterBind + VkBool32 quadDivergentImplicitLod + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments + uint32_t maxPerStageUpdateAfterBindResources + uint32_t maxDescriptorSetUpdateAfterBindSamplers + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic + uint32_t maxDescriptorSetUpdateAfterBindSampledImages + uint32_t maxDescriptorSetUpdateAfterBindStorageImages + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments + VkResolveModeFlags supportedDepthResolveModessupported depth resolve modes + VkResolveModeFlags supportedStencilResolveModessupported stencil resolve modes + VkBool32 independentResolveNonedepth and stencil resolve modes can be set independently if one of them is none + VkBool32 independentResolvedepth and stencil resolve modes can be set independently + VkBool32 filterMinmaxSingleComponentFormats + VkBool32 filterMinmaxImageComponentMapping + uint64_t maxTimelineSemaphoreValueDifference + VkSampleCountFlags framebufferIntegerColorSampleCounts VkStructureType sType - const void* pNext + const void* pNext VkPipelineCompilerControlFlagsAMD compilerControlFlags VkStructureType sType - void* pNext + void* pNext VkBool32 deviceCoherentMemory VkStructureType sType - void* pNext + void* pNext char name[VK_MAX_EXTENSION_NAME_SIZE] char version[VK_MAX_EXTENSION_NAME_SIZE] VkToolPurposeFlagsEXT purposes @@ -4602,18 +4869,18 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext - VkClearColorValue customBorderColor + const void* pNext + VkClearColorValue customBorderColor VkFormat format VkStructureType sType - void* pNext - uint32_t maxCustomBorderColorSamplers + void* pNext + uint32_t maxCustomBorderColorSamplers VkStructureType sType - void* pNext + void* pNext VkBool32 customBorderColors VkBool32 customBorderColorWithoutFormat @@ -4627,25 +4894,26 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkFormat vertexFormat - VkDeviceOrHostAddressConstKHR vertexData + VkDeviceOrHostAddressConstKHR vertexData VkDeviceSize vertexStride + uint32_t maxVertex VkIndexType indexType - VkDeviceOrHostAddressConstKHR indexData - VkDeviceOrHostAddressConstKHR transformData + VkDeviceOrHostAddressConstKHR indexData + VkDeviceOrHostAddressConstKHR transformData VkStructureType sType - const void* pNext - VkDeviceOrHostAddressConstKHR data + const void* pNext + VkDeviceOrHostAddressConstKHR data VkDeviceSize stride VkStructureType sType - const void* pNext + const void* pNext VkBool32 arrayOfPointers - VkDeviceOrHostAddressConstKHR data + VkDeviceOrHostAddressConstKHR data VkAccelerationStructureGeometryTrianglesDataKHR triangles @@ -4654,48 +4922,38 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkGeometryTypeKHR geometryType VkAccelerationStructureGeometryDataKHR geometry VkGeometryFlagsKHR flags VkStructureType sType - const void* pNext + const void* pNext VkAccelerationStructureTypeKHR type VkBuildAccelerationStructureFlagsKHR flags - VkBool32 update - VkAccelerationStructureKHR srcAccelerationStructure - VkAccelerationStructureKHR dstAccelerationStructure - VkBool32 geometryArrayOfPointers + VkBuildAccelerationStructureModeKHR mode + VkAccelerationStructureKHR srcAccelerationStructure + VkAccelerationStructureKHR dstAccelerationStructure uint32_t geometryCount - const VkAccelerationStructureGeometryKHR* const* ppGeometries - VkDeviceOrHostAddressKHR scratchData + const VkAccelerationStructureGeometryKHR* pGeometries + const VkAccelerationStructureGeometryKHR* const* ppGeometries + VkDeviceOrHostAddressKHR scratchData - + uint32_t primitiveCount uint32_t primitiveOffset - uint32_t firstVertex - uint32_t transformOffset - - - VkStructureType sType - const void* pNext - VkGeometryTypeKHR geometryType - uint32_t maxPrimitiveCount - VkIndexType indexType - uint32_t maxVertexCount - VkFormat vertexFormat - VkBool32 allowsTransforms + uint32_t firstVertex + uint32_t transformOffset VkStructureType sType - const void* pNext - VkDeviceSize compactedSize + const void* pNext + VkAccelerationStructureCreateFlagsKHR createFlags + VkBuffer buffer + VkDeviceSize offsetSpecified in bytes + VkDeviceSize size VkAccelerationStructureTypeKHR type - VkBuildAccelerationStructureFlagsKHR flags - uint32_t maxGeometryCount - const VkAccelerationStructureCreateGeometryTypeInfoKHR* pGeometryInfos VkDeviceAddress deviceAddress @@ -4723,125 +4981,951 @@ typedef void CAMetalLayer; VkStructureType sType - const void* pNext + const void* pNext VkAccelerationStructureKHR accelerationStructure - - VkStructureType sType - const void* pNext - const uint8_t* versionData + + VkStructureType sType + const void* pNext + const uint8_t* pVersionData VkStructureType sType - const void* pNext + const void* pNext VkAccelerationStructureKHR src VkAccelerationStructureKHR dst VkCopyAccelerationStructureModeKHR mode VkStructureType sType - const void* pNext + const void* pNext VkAccelerationStructureKHR src - VkDeviceOrHostAddressKHR dst + VkDeviceOrHostAddressKHR dst VkCopyAccelerationStructureModeKHR mode VkStructureType sType - const void* pNext - VkDeviceOrHostAddressConstKHR src + const void* pNext + VkDeviceOrHostAddressConstKHR src VkAccelerationStructureKHR dst VkCopyAccelerationStructureModeKHR mode VkStructureType sType - const void* pNext - uint32_t maxPayloadSize - uint32_t maxAttributeSize - uint32_t maxCallableSize - - - VkStructureType sType - const void* pNext - VkDeferredOperationKHR operationHandle + const void* pNext + uint32_t maxPipelineRayPayloadSize + uint32_t maxPipelineRayHitAttributeSize VkStructureType sType - const void* pNext + const void* pNext uint32_t libraryCount const VkPipeline* pLibraries VkStructureType sType - void* pNext + void* pNext VkBool32 extendedDynamicState + + VkStructureType sType + void* pNext + VkBool32 extendedDynamicState2 + VkBool32 extendedDynamicState2LogicOp + VkBool32 extendedDynamicState2PatchControlPoints + VkStructureType sType - void* pNextPointer to next structure + void* pNextPointer to next structure + VkSurfaceTransformFlagBitsKHR transform + + + VkStructureType sType + const void* pNext VkSurfaceTransformFlagBitsKHR transform VkStructureType sType - void* pNextPointer to next structure + void* pNextPointer to next structure VkSurfaceTransformFlagBitsKHR transform VkRect2D renderArea VkStructureTypesType - void* pNext + void* pNext VkBool32 diagnosticsConfig VkStructureType sType - const void* pNext + const void* pNext VkDeviceDiagnosticsConfigFlagsNV flags + + VkStructureType sType + void* pNext + VkBool32 shaderZeroInitializeWorkgroupMemory + + + VkStructureType sType + void* pNext + VkBool32 shaderSubgroupUniformControlFlow + VkStructureType sType - void* pNext + void* pNext VkBool32 robustBufferAccess2 VkBool32 robustImageAccess2 VkBool32 nullDescriptor VkStructureType sType + void* pNext + VkDeviceSize robustStorageBufferAccessSizeAlignment + VkDeviceSize robustUniformBufferAccessSizeAlignment + + + VkStructureType sType + void* pNext + VkBool32 robustImageAccess + + + VkStructureType sType void* pNext - VkDeviceSize robustStorageBufferAccessSizeAlignment - VkDeviceSize robustUniformBufferAccessSizeAlignment + VkBool32 workgroupMemoryExplicitLayout + VkBool32 workgroupMemoryExplicitLayoutScalarBlockLayout + VkBool32 workgroupMemoryExplicitLayout8BitAccess + VkBool32 workgroupMemoryExplicitLayout16BitAccess + + + VkStructureType sType + void* pNext + VkBool32 constantAlphaColorBlendFactors + VkBool32 events + VkBool32 imageViewFormatReinterpretation + VkBool32 imageViewFormatSwizzle + VkBool32 imageView2DOn3DImage + VkBool32 multisampleArrayImage + VkBool32 mutableComparisonSamplers + VkBool32 pointPolygons + VkBool32 samplerMipLodBias + VkBool32 separateStencilMaskRef + VkBool32 shaderSampleRateInterpolationFunctions + VkBool32 tessellationIsolines + VkBool32 tessellationPointMode + VkBool32 triangleFans + VkBool32 vertexAttributeAccessBeyondStride + + + VkStructureType sType + void* pNext + uint32_t minVertexInputBindingStrideAlignment + + + VkStructureType sType + void* pNext + VkBool32 formatA4R4G4B4 + VkBool32 formatA4B4G4R4 + + + VkStructureType sType + const void* pNext + VkDeviceSize srcOffsetSpecified in bytes + VkDeviceSize dstOffsetSpecified in bytes + VkDeviceSize sizeSpecified in bytes + + + VkStructureType sType + const void* pNext + VkImageSubresourceLayers srcSubresource + VkOffset3D srcOffsetSpecified in pixels for both compressed and uncompressed images + VkImageSubresourceLayers dstSubresource + VkOffset3D dstOffsetSpecified in pixels for both compressed and uncompressed images + VkExtent3D extentSpecified in pixels for both compressed and uncompressed images + + + VkStructureType sType + const void* pNext + VkImageSubresourceLayers srcSubresource + VkOffset3D srcOffsets[2]Specified in pixels for both compressed and uncompressed images + VkImageSubresourceLayers dstSubresource + VkOffset3D dstOffsets[2]Specified in pixels for both compressed and uncompressed images + + + VkStructureType sType + const void* pNext + VkDeviceSize bufferOffsetSpecified in bytes + uint32_t bufferRowLengthSpecified in texels + uint32_t bufferImageHeight + VkImageSubresourceLayers imageSubresource + VkOffset3D imageOffsetSpecified in pixels for both compressed and uncompressed images + VkExtent3D imageExtentSpecified in pixels for both compressed and uncompressed images + + + VkStructureType sType + const void* pNext + VkImageSubresourceLayers srcSubresource + VkOffset3D srcOffset + VkImageSubresourceLayers dstSubresource + VkOffset3D dstOffset + VkExtent3D extent + + + VkStructureType sType + const void* pNext + VkBuffer srcBuffer + VkBuffer dstBuffer + uint32_t regionCount + const VkBufferCopy2KHR* pRegions + + + VkStructureType sType + const void* pNext + VkImage srcImage + VkImageLayout srcImageLayout + VkImage dstImage + VkImageLayout dstImageLayout + uint32_t regionCount + const VkImageCopy2KHR* pRegions + + + VkStructureType sType + const void* pNext + VkImage srcImage + VkImageLayout srcImageLayout + VkImage dstImage + VkImageLayout dstImageLayout + uint32_t regionCount + const VkImageBlit2KHR* pRegions + VkFilter filter + + + VkStructureType sType + const void* pNext + VkBuffer srcBuffer + VkImage dstImage + VkImageLayout dstImageLayout + uint32_t regionCount + const VkBufferImageCopy2KHR* pRegions + + + VkStructureType sType + const void* pNext + VkImage srcImage + VkImageLayout srcImageLayout + VkBuffer dstBuffer + uint32_t regionCount + const VkBufferImageCopy2KHR* pRegions + + + VkStructureType sType + const void* pNext + VkImage srcImage + VkImageLayout srcImageLayout + VkImage dstImage + VkImageLayout dstImageLayout + uint32_t regionCount + const VkImageResolve2KHR* pRegions + + + VkStructureType sType + void* pNext + VkBool32 shaderImageInt64Atomics + VkBool32 sparseImageInt64Atomics + + + VkStructureType sType + const void* pNext + const VkAttachmentReference2* pFragmentShadingRateAttachment + VkExtent2D shadingRateAttachmentTexelSize + + + VkStructureType sType + const void* pNext + VkExtent2D fragmentSize + VkFragmentShadingRateCombinerOpKHR combinerOps[2] + + + VkStructureType sType + void* pNext + VkBool32 pipelineFragmentShadingRate + VkBool32 primitiveFragmentShadingRate + VkBool32 attachmentFragmentShadingRate + + + VkStructureType sType + void* pNext + VkExtent2D minFragmentShadingRateAttachmentTexelSize + VkExtent2D maxFragmentShadingRateAttachmentTexelSize + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio + VkBool32 primitiveFragmentShadingRateWithMultipleViewports + VkBool32 layeredShadingRateAttachments + VkBool32 fragmentShadingRateNonTrivialCombinerOps + VkExtent2D maxFragmentSize + uint32_t maxFragmentSizeAspectRatio + uint32_t maxFragmentShadingRateCoverageSamples + VkSampleCountFlagBits maxFragmentShadingRateRasterizationSamples + VkBool32 fragmentShadingRateWithShaderDepthStencilWrites + VkBool32 fragmentShadingRateWithSampleMask + VkBool32 fragmentShadingRateWithShaderSampleMask + VkBool32 fragmentShadingRateWithConservativeRasterization + VkBool32 fragmentShadingRateWithFragmentShaderInterlock + VkBool32 fragmentShadingRateWithCustomSampleLocations + VkBool32 fragmentShadingRateStrictMultiplyCombiner + + + VkStructureType sType + void* pNext + VkSampleCountFlags sampleCounts + VkExtent2D fragmentSize + + + VkStructureTypesType + void* pNext + VkBool32 shaderTerminateInvocation + + + VkStructureType sType + void* pNext + VkBool32 fragmentShadingRateEnums + VkBool32 supersampleFragmentShadingRates + VkBool32 noInvocationFragmentShadingRates + + + VkStructureType sType + void* pNext + VkSampleCountFlagBits maxFragmentShadingRateInvocationCount + + + VkStructureType sType + const void* pNext + VkFragmentShadingRateTypeNV shadingRateType + VkFragmentShadingRateNV shadingRate + VkFragmentShadingRateCombinerOpKHR combinerOps[2] + + + VkStructureType sType + const void* pNext + VkDeviceSize accelerationStructureSize + VkDeviceSize updateScratchSize + VkDeviceSize buildScratchSize + + + VkStructureType sType + void* pNext + VkBool32 mutableDescriptorType + + + uint32_t descriptorTypeCount + const VkDescriptorType* pDescriptorTypes + + + VkStructureType sType + const void* pNext + uint32_t mutableDescriptorTypeListCount + const VkMutableDescriptorTypeListVALVE* pMutableDescriptorTypeLists + + + VkStructureType sType + void* pNext + VkBool32 vertexInputDynamicState + + + VkStructureTypesType + void* pNext + uint32_t binding + uint32_t stride + VkVertexInputRate inputRate + uint32_t divisor + + + VkStructureTypesType + void* pNext + uint32_t locationlocation of the shader vertex attrib + uint32_t bindingVertex buffer binding id + VkFormat formatformat of source data + uint32_t offsetOffset of first element in bytes from base of vertex + + + VkStructureType sType + void* pNext + VkBool32 colorWriteEnable + + + VkStructureType sType + const void* pNext + uint32_t attachmentCount# of pAttachments + const VkBool32* pColorWriteEnables + + + VkStructureType sType + const void* pNext + VkPipelineStageFlags2KHR srcStageMask + VkAccessFlags2KHR srcAccessMask + VkPipelineStageFlags2KHR dstStageMask + VkAccessFlags2KHR dstAccessMask + + + VkStructureType sType + const void* pNext + VkPipelineStageFlags2KHR srcStageMask + VkAccessFlags2KHR srcAccessMask + VkPipelineStageFlags2KHR dstStageMask + VkAccessFlags2KHR dstAccessMask + VkImageLayout oldLayout + VkImageLayout newLayout + uint32_t srcQueueFamilyIndex + uint32_t dstQueueFamilyIndex + VkImage image + VkImageSubresourceRange subresourceRange + + + VkStructureType sType + const void* pNext + VkPipelineStageFlags2KHR srcStageMask + VkAccessFlags2KHR srcAccessMask + VkPipelineStageFlags2KHR dstStageMask + VkAccessFlags2KHR dstAccessMask + uint32_t srcQueueFamilyIndex + uint32_t dstQueueFamilyIndex + VkBuffer buffer + VkDeviceSize offset + VkDeviceSize size + + + VkStructureType sType + const void* pNext + VkDependencyFlags dependencyFlags + uint32_t memoryBarrierCount + const VkMemoryBarrier2KHR* pMemoryBarriers + uint32_t bufferMemoryBarrierCount + const VkBufferMemoryBarrier2KHR* pBufferMemoryBarriers + uint32_t imageMemoryBarrierCount + const VkImageMemoryBarrier2KHR* pImageMemoryBarriers + + + VkStructureType sType + const void* pNext + VkSemaphore semaphore + uint64_t value + VkPipelineStageFlags2KHR stageMask + uint32_t deviceIndex + + + VkStructureType sType + const void* pNext + VkCommandBuffer commandBuffer + uint32_t deviceMask + + + VkStructureType sType + const void* pNext + VkSubmitFlagsKHR flags + uint32_t waitSemaphoreInfoCount + const VkSemaphoreSubmitInfoKHR* pWaitSemaphoreInfos + uint32_t commandBufferInfoCount + const VkCommandBufferSubmitInfoKHR* pCommandBufferInfos + uint32_t signalSemaphoreInfoCount + const VkSemaphoreSubmitInfoKHR* pSignalSemaphoreInfos + + + VkStructureType sType + void* pNext + VkPipelineStageFlags2KHR checkpointExecutionStageMask + + + VkStructureType sType + void* pNext + VkPipelineStageFlags2KHR stage + void* pCheckpointMarker + + + VkStructureType sType + void* pNext + VkBool32 synchronization2 + + + VkStructureTypesType + void* pNext + VkVideoCodecOperationFlagsKHR videoCodecOperations + + + VkStructureTypesType + void* pNext + uint32_t profileCount + const VkVideoProfileKHR* pProfiles + + + VkStructureTypesType + const void* pNext + VkImageUsageFlags imageUsage + const VkVideoProfilesKHR* pVideoProfiles + + + VkStructureTypesType + void* pNext + VkFormat format + + + VkStructureTypesType + void* pNext + VkVideoCodecOperationFlagBitsKHR videoCodecOperation + VkVideoChromaSubsamplingFlagsKHR chromaSubsampling + VkVideoComponentBitDepthFlagsKHR lumaBitDepth + VkVideoComponentBitDepthFlagsKHR chromaBitDepth + + + VkStructureTypesType + void* pNext + VkVideoCapabilitiesFlagsKHR capabilityFlags + VkDeviceSize minBitstreamBufferOffsetAlignment + VkDeviceSize minBitstreamBufferSizeAlignment + VkExtent2D videoPictureExtentGranularity + VkExtent2D minExtent + VkExtent2D maxExtent + uint32_t maxReferencePicturesSlotsCount + uint32_t maxReferencePicturesActiveCount + + + VkStructureTypesType + const void* pNext + uint32_t memoryBindIndex + VkMemoryRequirements2* pMemoryRequirements + + + VkStructureTypesType + const void* pNext + uint32_t memoryBindIndex + VkDeviceMemory memory + VkDeviceSize memoryOffset + VkDeviceSize memorySize + + + VkStructureTypesType + const void* pNext + VkOffset2D codedOffsetThe offset to be used for the picture resource, currently only used in field mode + VkExtent2D codedExtentThe extent to be used for the picture resource + uint32_t baseArrayLayerTThe first array layer to be accessed for the Decode or Encode Operations + VkImageView imageViewBindingThe ImageView binding of the resource + + + VkStructureTypesType + const void* pNext + int8_t slotIndexThe reference slot index + const VkVideoPictureResourceKHR* pPictureResourceThe reference picture resource + + + VkStructureTypesType + const void* pNext + VkVideoDecodeFlagsKHR flags + VkOffset2D codedOffset + VkExtent2D codedExtent + VkBuffer srcBuffer + VkDeviceSize srcBufferOffset + VkDeviceSize srcBufferRange + VkVideoPictureResourceKHR dstPictureResource + const VkVideoReferenceSlotKHR* pSetupReferenceSlot + uint32_t referenceSlotCount + const VkVideoReferenceSlotKHR* pReferenceSlots + + Video Decode Codec Standard specific structures + #include "vk_video/vulkan_video_codec_h264std.h" + + + + + + + + + + + + + + + + + + + #include "vk_video/vulkan_video_codec_h264std_decode.h" + + + + + + + + + VkStructureTypesType + const void* pNext + StdVideoH264ProfileIdc stdProfileIdc + VkVideoDecodeH264FieldLayoutFlagsEXT fieldLayout + + + VkStructureTypesType + void* pNext + uint32_t maxLevel + VkOffset2D fieldOffsetGranularity + VkExtensionProperties stdExtensionVersion + + + VkStructureTypesType + const void* pNext + VkVideoDecodeH264CreateFlagsEXT flags + const VkExtensionProperties* pStdExtensionVersion + + + + + VkStructureTypesType + const void* pNext + uint32_t spsStdCount + const StdVideoH264SequenceParameterSet* pSpsStd + uint32_t ppsStdCount + const StdVideoH264PictureParameterSet* pPpsStdList of Picture Parameters associated with the spsStd, above + + + VkStructureTypesType + const void* pNext + uint32_t maxSpsStdCount + uint32_t maxPpsStdCount + const VkVideoDecodeH264SessionParametersAddInfoEXT* pParametersAddInfo + + + VkStructureTypesType + const void* pNext + const StdVideoDecodeH264PictureInfo* pStdPictureInfo + uint32_t slicesCount + const uint32_t* pSlicesDataOffsets + + + VkStructureTypesType + const void* pNext + const StdVideoDecodeH264ReferenceInfo* pStdReferenceInfo + + + VkStructureTypesType + const void*pNext + const StdVideoDecodeH264Mvc* pStdMvc + + #include "vk_video/vulkan_video_codec_h265std.h" + + + + + + + + + + + + + + + + + #include "vk_video/vulkan_video_codec_h265std_decode.h" + + + + + + VkStructureTypesType + const void* pNext + StdVideoH265ProfileIdc stdProfileIdc + + + VkStructureTypesType + void* pNext + uint32_t maxLevel + VkExtensionProperties stdExtensionVersion + + + VkStructureTypesType + const void* pNext + VkVideoDecodeH265CreateFlagsEXT flags + const VkExtensionProperties* pStdExtensionVersion + + + VkStructureTypesType + const void* pNext + uint32_t spsStdCount + const StdVideoH265SequenceParameterSet* pSpsStd + uint32_t ppsStdCount + const StdVideoH265PictureParameterSet* pPpsStdList of Picture Parameters associated with the spsStd, above + + + VkStructureTypesType + const void* pNext + uint32_t maxSpsStdCount + uint32_t maxPpsStdCount + const VkVideoDecodeH265SessionParametersAddInfoEXT* pParametersAddInfo + + + VkStructureTypesType + const void* pNext + StdVideoDecodeH265PictureInfo* pStdPictureInfo + uint32_t slicesCount + const uint32_t* pSlicesDataOffsets + + + VkStructureTypesType + const void* pNext + const StdVideoDecodeH265ReferenceInfo* pStdReferenceInfo + + + VkStructureTypesType + const void* pNext + uint32_t queueFamilyIndex + VkVideoSessionCreateFlagsKHR flags + const VkVideoProfileKHR* pVideoProfile + VkFormat pictureFormat + VkExtent2D maxCodedExtent + VkFormat referencePicturesFormat + uint32_t maxReferencePicturesSlotsCount + uint32_t maxReferencePicturesActiveCount + + + VkStructureTypesType + const void* pNext + VkVideoSessionParametersKHR videoSessionParametersTemplate + VkVideoSessionKHR videoSession + + + VkStructureTypesType + const void* pNext + uint32_t updateSequenceCount + + + VkStructureTypesType + const void* pNext + VkVideoBeginCodingFlagsKHR flags + VkVideoCodingQualityPresetFlagsKHR codecQualityPreset + VkVideoSessionKHR videoSession + VkVideoSessionParametersKHR videoSessionParameters + uint32_t referenceSlotCount + const VkVideoReferenceSlotKHR* pReferenceSlots + + + VkStructureTypesType + const void* pNext + VkVideoEndCodingFlagsKHR flags + + + VkStructureTypesType + const void* pNext + VkVideoCodingControlFlagsKHR flags + + + VkStructureTypesType + const void* pNext + VkVideoEncodeFlagsKHR flags + uint32_t qualityLevel + VkExtent2D codedExtent + VkBuffer dstBitstreamBuffer + VkDeviceSize dstBitstreamBufferOffset + VkDeviceSize dstBitstreamBufferMaxRange + VkVideoPictureResourceKHR srcPictureResource + const VkVideoReferenceSlotKHR* pSetupReferenceSlot + uint32_t referenceSlotCount + const VkVideoReferenceSlotKHR* pReferenceSlots + + + VkStructureTypesType + const void* pNext + VkVideoEncodeRateControlFlagsKHR flags + VkVideoEncodeRateControlModeFlagBitsKHR rateControlMode + uint32_t averageBitrate + uint16_t peakToAverageBitrateRatio + uint16_t frameRateNumerator + uint16_t frameRateDenominator + uint32_t virtualBufferSizeInMs + + + VkStructureTypesType + const void* pNext + VkVideoEncodeH264CapabilitiesFlagsEXT flags + VkVideoEncodeH264InputModeFlagsEXT inputModeFlags + VkVideoEncodeH264OutputModeFlagsEXT outputModeFlags + VkExtent2D minPictureSizeInMbs + VkExtent2D maxPictureSizeInMbs + VkExtent2D inputImageDataAlignment + uint8_t maxNumL0ReferenceForP + uint8_t maxNumL0ReferenceForB + uint8_t maxNumL1Reference + uint8_t qualityLevelCount + VkExtensionProperties stdExtensionVersion + + + VkStructureTypesType + const void* pNext + VkVideoEncodeH264CreateFlagsEXT flags + VkExtent2D maxPictureSizeInMbs + const VkExtensionProperties* pStdExtensionVersion + + #include "vk_video/vulkan_video_codec_h264std_encode.h" + + + + + + + + + + VkStructureTypesType + const void* pNext + uint32_t spsStdCount + const StdVideoH264SequenceParameterSet* pSpsStd + uint32_t ppsStdCount + const StdVideoH264PictureParameterSet* pPpsStdList of Picture Parameters associated with the spsStd, above + + + VkStructureTypesType + const void* pNext + uint32_t maxSpsStdCount + uint32_t maxPpsStdCount + const VkVideoEncodeH264SessionParametersAddInfoEXT* pParametersAddInfo + + + VkStructureTypesType + const void* pNext + int8_t slotIndex + const StdVideoEncodeH264PictureInfo* pStdPictureInfo + + + VkStructureTypesType + const void* pNext + uint8_t refDefaultFinalList0EntryCount + const VkVideoEncodeH264DpbSlotInfoEXT* pRefDefaultFinalList0Entries + uint8_t refDefaultFinalList1EntryCount + const VkVideoEncodeH264DpbSlotInfoEXT* pRefDefaultFinalList1Entries + uint32_t naluSliceEntryCount + const VkVideoEncodeH264NaluSliceEXT* pNaluSliceEntries + const VkVideoEncodeH264DpbSlotInfoEXT* pCurrentPictureInfo + + + VkStructureTypesType + const void* pNext + uint8_t spsId + VkBool32 emitSpsEnable + uint32_t ppsIdEntryCount + const uint8_t* ppsIdEntries + + + VkStructureTypesType + const void* pNext + StdVideoH264ProfileIdc stdProfileIdc + + + VkStructureTypesType + const void* pNext + const StdVideoEncodeH264SliceHeader* pSliceHeaderStd + uint32_t mbCount + uint8_t refFinalList0EntryCount + const VkVideoEncodeH264DpbSlotInfoEXT* pRefFinalList0Entries + uint8_t refFinalList1EntryCount + const VkVideoEncodeH264DpbSlotInfoEXT* pRefFinalList1Entries + uint32_t precedingNaluBytes + uint8_t minQp + uint8_t maxQp + + + VkStructureType sType + void* pNext + VkBool32 inheritedViewportScissor2D + + + VkStructureType sType + const void* pNext + VkBool32 viewportScissor2D + uint32_t viewportDepthCount + const VkViewport* pViewportDepths + + + VkStructureType sType + void* pNext + VkBool32 ycbcr2plane444Formats + + + VkStructureType sType + void* pNext + VkBool32 provokingVertexLast + VkBool32 transformFeedbackPreservesProvokingVertex + + + VkStructureType sType + void* pNext + VkBool32 provokingVertexModePerPipeline + VkBool32 transformFeedbackPreservesTriangleFanProvokingVertex + + + VkStructureType sType + const void* pNext + VkProvokingVertexModeEXT provokingVertexMode + + + VkStructureType sType + const void* pNext + size_t dataSize + const void* pData + + + VkStructureType sType + const void* pNext + VkCuModuleNVX module + const char* pName + + + VkStructureType sType + const void* pNext + VkCuFunctionNVX function + uint32_t gridDimX + uint32_t gridDimY + uint32_t gridDimZ + uint32_t blockDimX + uint32_t blockDimY + uint32_t blockDimZ + uint32_t sharedMemBytes + size_t paramCount + const void* const * pParams + size_t extraCount + const void* const * pExtras - Vulkan enumerant (token) definitions - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -4993,12 +6077,6 @@ typedef void CAMetalLayer; - - - - - - @@ -5360,33 +6438,33 @@ typedef void CAMetalLayer; - + - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + Flags @@ -5396,6 +6474,12 @@ typedef void CAMetalLayer; + + + + + + @@ -5441,7 +6525,7 @@ typedef void CAMetalLayer; - + @@ -5466,7 +6550,7 @@ typedef void CAMetalLayer; - + @@ -5474,7 +6558,7 @@ typedef void CAMetalLayer; - + @@ -5490,8 +6574,7 @@ typedef void CAMetalLayer; - - + When VkSemaphoreCreateFlagBits is first extended, need to add a bitmask enums tag for it here @@ -5697,6 +6780,13 @@ typedef void CAMetalLayer; + + + + + + + @@ -5721,6 +6811,7 @@ typedef void CAMetalLayer; + @@ -5730,6 +6821,7 @@ typedef void CAMetalLayer; + @@ -5782,6 +6874,7 @@ typedef void CAMetalLayer; + @@ -5805,7 +6898,8 @@ typedef void CAMetalLayer; - + + @@ -5939,7 +7033,8 @@ typedef void CAMetalLayer; - + + Driver IDs are now represented as enums instead of the old @@ -5958,6 +7053,9 @@ typedef void CAMetalLayer; + + + @@ -6006,24 +7104,33 @@ typedef void CAMetalLayer; + + + + + + + + + - - - - + + + + @@ -6035,6 +7142,16 @@ typedef void CAMetalLayer; + + + + + + + + + + @@ -6107,8 +7224,10 @@ typedef void CAMetalLayer; - - + + + + @@ -6163,6 +7282,182 @@ typedef void CAMetalLayer; + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -6300,7 +7595,7 @@ typedef void CAMetalLayer; all sname:VkQueue objects created from pname:device - + VkResult vkAllocateMemory VkDevice device const VkMemoryAllocateInfo* pAllocateInfo @@ -6409,7 +7704,7 @@ typedef void CAMetalLayer; VkFence fence const VkAllocationCallbacks* pAllocator - + VkResult vkResetFences VkDevice device uint32_t fenceCount @@ -6464,7 +7759,7 @@ typedef void CAMetalLayer; VkDevice device VkEvent event - + VkResult vkResetEvent VkDevice device VkEvent event @@ -6637,7 +7932,7 @@ typedef void CAMetalLayer; VkPipelineLayout pipelineLayout const VkAllocationCallbacks* pAllocator - + VkResult vkCreateSampler VkDevice device const VkSamplerCreateInfo* pCreateInfo @@ -6751,7 +8046,7 @@ typedef void CAMetalLayer; VkCommandPool commandPool const VkAllocationCallbacks* pAllocator - + VkResult vkResetCommandPool VkDevice device VkCommandPool commandPool @@ -6785,7 +8080,7 @@ typedef void CAMetalLayer; the sname:VkCommandPool that pname:commandBuffer was allocated from - + VkResult vkResetCommandBuffer VkCommandBuffer commandBuffer VkCommandBufferResetFlags flags @@ -6877,7 +8172,7 @@ typedef void CAMetalLayer; const VkBuffer* pBuffers const VkDeviceSize* pOffsets - + void vkCmdDraw VkCommandBuffer commandBuffer uint32_t vertexCount @@ -6885,7 +8180,7 @@ typedef void CAMetalLayer; uint32_t firstVertex uint32_t firstInstance - + void vkCmdDrawIndexed VkCommandBuffer commandBuffer uint32_t indexCount @@ -6894,7 +8189,7 @@ typedef void CAMetalLayer; int32_t vertexOffset uint32_t firstInstance - + void vkCmdDrawIndirect VkCommandBuffer commandBuffer VkBuffer buffer @@ -6902,7 +8197,7 @@ typedef void CAMetalLayer; uint32_t drawCount uint32_t stride - + void vkCmdDrawIndexedIndirect VkCommandBuffer commandBuffer VkBuffer buffer @@ -6910,20 +8205,20 @@ typedef void CAMetalLayer; uint32_t drawCount uint32_t stride - + void vkCmdDispatch VkCommandBuffer commandBuffer uint32_t groupCountX uint32_t groupCountY uint32_t groupCountZ - + void vkCmdDispatchIndirect VkCommandBuffer commandBuffer VkBuffer buffer VkDeviceSize offset - + void vkCmdCopyBuffer VkCommandBuffer commandBuffer VkBuffer srcBuffer @@ -6931,7 +8226,7 @@ typedef void CAMetalLayer; uint32_t regionCount const VkBufferCopy* pRegions - + void vkCmdCopyImage VkCommandBuffer commandBuffer VkImage srcImage @@ -6941,7 +8236,7 @@ typedef void CAMetalLayer; uint32_t regionCount const VkImageCopy* pRegions - + void vkCmdBlitImage VkCommandBuffer commandBuffer VkImage srcImage @@ -6952,7 +8247,7 @@ typedef void CAMetalLayer; const VkImageBlit* pRegions VkFilter filter - + void vkCmdCopyBufferToImage VkCommandBuffer commandBuffer VkBuffer srcBuffer @@ -6961,7 +8256,7 @@ typedef void CAMetalLayer; uint32_t regionCount const VkBufferImageCopy* pRegions - + void vkCmdCopyImageToBuffer VkCommandBuffer commandBuffer VkImage srcImage @@ -6970,7 +8265,7 @@ typedef void CAMetalLayer; uint32_t regionCount const VkBufferImageCopy* pRegions - + void vkCmdUpdateBuffer VkCommandBuffer commandBuffer VkBuffer dstBuffer @@ -6978,7 +8273,7 @@ typedef void CAMetalLayer; VkDeviceSize dataSize const void* pData - + void vkCmdFillBuffer VkCommandBuffer commandBuffer VkBuffer dstBuffer @@ -6986,16 +8281,16 @@ typedef void CAMetalLayer; VkDeviceSize size uint32_t data - + void vkCmdClearColorImage VkCommandBuffer commandBuffer VkImage image VkImageLayout imageLayout - const VkClearColorValue* pColor + const VkClearColorValue* pColor uint32_t rangeCount const VkImageSubresourceRange* pRanges - + void vkCmdClearDepthStencilImage VkCommandBuffer commandBuffer VkImage image @@ -7004,7 +8299,7 @@ typedef void CAMetalLayer; uint32_t rangeCount const VkImageSubresourceRange* pRanges - + void vkCmdClearAttachments VkCommandBuffer commandBuffer uint32_t attachmentCount @@ -7012,7 +8307,7 @@ typedef void CAMetalLayer; uint32_t rectCount const VkClearRect* pRects - + void vkCmdResolveImage VkCommandBuffer commandBuffer VkImage srcImage @@ -7039,8 +8334,8 @@ typedef void CAMetalLayer; VkCommandBuffer commandBuffer uint32_t eventCount const VkEvent* pEvents - VkPipelineStageFlags srcStageMask - VkPipelineStageFlags dstStageMask + VkPipelineStageFlags srcStageMask + VkPipelineStageFlags dstStageMask uint32_t memoryBarrierCount const VkMemoryBarrier* pMemoryBarriers uint32_t bufferMemoryBarrierCount @@ -7090,14 +8385,14 @@ typedef void CAMetalLayer; uint32_t firstQuery uint32_t queryCount - + void vkCmdWriteTimestamp VkCommandBuffer commandBuffer VkPipelineStageFlagBits pipelineStage VkQueryPool queryPool uint32_t query - + void vkCmdCopyQueryPoolResults VkCommandBuffer commandBuffer VkQueryPool queryPool @@ -7117,18 +8412,18 @@ typedef void CAMetalLayer; uint32_t size const void* pValues - + void vkCmdBeginRenderPass VkCommandBuffer commandBuffer const VkRenderPassBeginInfo* pRenderPassBegin VkSubpassContents contents - + void vkCmdNextSubpass VkCommandBuffer commandBuffer VkSubpassContents contents - + void vkCmdEndRenderPass VkCommandBuffer commandBuffer @@ -7328,6 +8623,19 @@ typedef void CAMetalLayer; xcb_connection_t* connection xcb_visualid_t visual_id + + VkResult vkCreateDirectFBSurfaceEXT + VkInstance instance + const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT + VkPhysicalDevice physicalDevice + uint32_t queueFamilyIndex + IDirectFB* dfb + VkResult vkCreateImagePipeSurfaceFUCHSIA VkInstance instance @@ -7342,6 +8650,19 @@ typedef void CAMetalLayer; const VkAllocationCallbacks* pAllocator VkSurfaceKHR* pSurface + + VkResult vkCreateScreenSurfaceQNX + VkInstance instance + const VkScreenSurfaceCreateInfoQNX* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX + VkPhysicalDevice physicalDevice + uint32_t queueFamilyIndex + struct _screen_window* window + VkResult vkCreateDebugReportCallbackEXT VkInstance instance @@ -7521,7 +8842,7 @@ typedef void CAMetalLayer; const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo HANDLE* pHandle - + VkResult vkGetMemoryWin32HandlePropertiesKHR VkDevice device VkExternalMemoryHandleTypeFlagBits handleType @@ -7534,13 +8855,26 @@ typedef void CAMetalLayer; const VkMemoryGetFdInfoKHR* pGetFdInfo int* pFd - + VkResult vkGetMemoryFdPropertiesKHR VkDevice device VkExternalMemoryHandleTypeFlagBits handleType int fd VkMemoryFdPropertiesKHR* pMemoryFdProperties + + VkResult vkGetMemoryZirconHandleFUCHSIA + VkDevice device + const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo + zx_handle_t* pZirconHandle + + + VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA + VkDevice device + VkExternalMemoryHandleTypeFlagBits handleType + zx_handle_t zirconHandle + VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties + void vkGetPhysicalDeviceExternalSemaphoreProperties VkPhysicalDevice physicalDevice @@ -7570,6 +8904,17 @@ typedef void CAMetalLayer; VkDevice device const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo + + VkResult vkGetSemaphoreZirconHandleFUCHSIA + VkDevice device + const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo + zx_handle_t* pZirconHandle + + + VkResult vkImportSemaphoreZirconHandleFUCHSIA + VkDevice device + const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo + void vkGetPhysicalDeviceExternalFenceProperties VkPhysicalDevice physicalDevice @@ -7604,33 +8949,44 @@ typedef void CAMetalLayer; VkPhysicalDevice physicalDevice VkDisplayKHR display - + VkResult vkAcquireXlibDisplayEXT VkPhysicalDevice physicalDevice Display* dpy VkDisplayKHR display - + VkResult vkGetRandROutputDisplayEXT VkPhysicalDevice physicalDevice Display* dpy RROutput rrOutput VkDisplayKHR* pDisplay - + + VkResult vkAcquireWinrtDisplayNV + VkPhysicalDevice physicalDevice + VkDisplayKHR display + + + VkResult vkGetWinrtDisplayNV + VkPhysicalDevice physicalDevice + uint32_t deviceRelativeId + VkDisplayKHR* pDisplay + + VkResult vkDisplayPowerControlEXT VkDevice device VkDisplayKHR display const VkDisplayPowerInfoEXT* pDisplayPowerInfo - + VkResult vkRegisterDeviceEventEXT VkDevice device const VkDeviceEventInfoEXT* pDeviceEventInfo const VkAllocationCallbacks* pAllocator VkFence* pFence - + VkResult vkRegisterDisplayEventEXT VkDevice device VkDisplayKHR display @@ -7638,7 +8994,7 @@ typedef void CAMetalLayer; const VkAllocationCallbacks* pAllocator VkFence* pFence - + VkResult vkGetSwapchainCounterEXT VkDevice device VkSwapchainKHR swapchain @@ -7765,13 +9121,13 @@ typedef void CAMetalLayer; VkDevice device VkSwapchainKHR swapchain - + VkResult vkGetRefreshCycleDurationGOOGLE VkDevice device VkSwapchainKHR swapchain VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties - + VkResult vkGetPastPresentationTimingGOOGLE VkDevice device VkSwapchainKHR swapchain @@ -8058,14 +9414,14 @@ typedef void CAMetalLayer; VkDebugUtilsMessageTypeFlagsEXT messageTypes const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData - + VkResult vkGetMemoryHostPointerPropertiesEXT VkDevice device VkExternalMemoryHandleTypeFlagBits handleType const void* pHostPointer VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties - + void vkCmdWriteBufferMarkerAMD VkCommandBuffer commandBuffer VkPipelineStageFlagBits pipelineStage @@ -8081,21 +9437,21 @@ typedef void CAMetalLayer; VkRenderPass* pRenderPass - + void vkCmdBeginRenderPass2 VkCommandBuffer commandBuffer const VkRenderPassBeginInfo* pRenderPassBegin const VkSubpassBeginInfo* pSubpassBeginInfo - + void vkCmdNextSubpass2 VkCommandBuffer commandBuffer const VkSubpassBeginInfo* pSubpassBeginInfo const VkSubpassEndInfo* pSubpassEndInfo - + void vkCmdEndRenderPass2 VkCommandBuffer commandBuffer const VkSubpassEndInfo* pSubpassEndInfo @@ -8121,7 +9477,7 @@ typedef void CAMetalLayer; const VkSemaphoreSignalInfo* pSignalInfo - + VkResult vkGetAndroidHardwareBufferPropertiesANDROID VkDevice device const struct AHardwareBuffer* buffer @@ -8133,7 +9489,7 @@ typedef void CAMetalLayer; const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo struct AHardwareBuffer** pBuffer - + void vkCmdDrawIndirectCount VkCommandBuffer commandBuffer VkBuffer buffer @@ -8145,7 +9501,7 @@ typedef void CAMetalLayer; - + void vkCmdDrawIndexedIndirectCount VkCommandBuffer commandBuffer VkBuffer buffer @@ -8208,7 +9564,7 @@ typedef void CAMetalLayer; uint32_t query uint32_t index - + void vkCmdDrawIndirectByteCountEXT VkCommandBuffer commandBuffer uint32_t instanceCount @@ -8245,13 +9601,13 @@ typedef void CAMetalLayer; uint32_t customSampleOrderCount const VkCoarseSampleOrderCustomNV* pCustomSampleOrders - + void vkCmdDrawMeshTasksNV VkCommandBuffer commandBuffer uint32_t taskCount uint32_t firstTask - + void vkCmdDrawMeshTasksIndirectNV VkCommandBuffer commandBuffer VkBuffer buffer @@ -8259,7 +9615,7 @@ typedef void CAMetalLayer; uint32_t drawCount uint32_t stride - + void vkCmdDrawMeshTasksIndirectCountNV VkCommandBuffer commandBuffer VkBuffer buffer @@ -8288,12 +9644,11 @@ typedef void CAMetalLayer; VkAccelerationStructureKHR accelerationStructure const VkAllocationCallbacks* pAllocator - - void vkGetAccelerationStructureMemoryRequirementsKHR + void vkDestroyAccelerationStructureNV VkDevice device - const VkAccelerationStructureMemoryRequirementsInfoKHR* pInfo - VkMemoryRequirements2* pMemoryRequirements + VkAccelerationStructureNV accelerationStructure + const VkAllocationCallbacks* pAllocator void vkGetAccelerationStructureMemoryRequirementsNV @@ -8302,17 +9657,16 @@ typedef void CAMetalLayer; VkMemoryRequirements2KHR* pMemoryRequirements - VkResult vkBindAccelerationStructureMemoryKHR + VkResult vkBindAccelerationStructureMemoryNV VkDevice device uint32_t bindInfoCount - const VkBindAccelerationStructureMemoryInfoKHR* pBindInfos + const VkBindAccelerationStructureMemoryInfoNV* pBindInfos - void vkCmdCopyAccelerationStructureNV VkCommandBuffer commandBuffer - VkAccelerationStructureKHR dst - VkAccelerationStructureKHR src + VkAccelerationStructureNV dst + VkAccelerationStructureNV src VkCopyAccelerationStructureModeKHR mode @@ -8323,6 +9677,7 @@ typedef void CAMetalLayer; VkResult vkCopyAccelerationStructureKHR VkDevice device + VkDeferredOperationKHR deferredOperation const VkCopyAccelerationStructureInfoKHR* pInfo @@ -8333,6 +9688,7 @@ typedef void CAMetalLayer; VkResult vkCopyAccelerationStructureToMemoryKHR VkDevice device + VkDeferredOperationKHR deferredOperation const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo @@ -8343,6 +9699,7 @@ typedef void CAMetalLayer; VkResult vkCopyMemoryToAccelerationStructureKHR VkDevice device + VkDeferredOperationKHR deferredOperation const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo @@ -8354,7 +9711,15 @@ typedef void CAMetalLayer; VkQueryPool queryPool uint32_t firstQuery - + + void vkCmdWriteAccelerationStructuresPropertiesNV + VkCommandBuffer commandBuffer + uint32_t accelerationStructureCount + const VkAccelerationStructureNV* pAccelerationStructures + VkQueryType queryType + VkQueryPool queryPool + uint32_t firstQuery + void vkCmdBuildAccelerationStructureNV VkCommandBuffer commandBuffer @@ -8362,8 +9727,8 @@ typedef void CAMetalLayer; VkBuffer instanceData VkDeviceSize instanceOffset VkBool32 update - VkAccelerationStructureKHR dst - VkAccelerationStructureKHR src + VkAccelerationStructureNV dst + VkAccelerationStructureNV src VkBuffer scratch VkDeviceSize scratchOffset @@ -8380,10 +9745,10 @@ typedef void CAMetalLayer; void vkCmdTraceRaysKHR VkCommandBuffer commandBuffer - const VkStridedBufferRegionKHR* pRaygenShaderBindingTable - const VkStridedBufferRegionKHR* pMissShaderBindingTable - const VkStridedBufferRegionKHR* pHitShaderBindingTable - const VkStridedBufferRegionKHR* pCallableShaderBindingTable + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable uint32_t width uint32_t height uint32_t depth @@ -8428,7 +9793,7 @@ typedef void CAMetalLayer; VkResult vkGetAccelerationStructureHandleNV VkDevice device - VkAccelerationStructureKHR accelerationStructure + VkAccelerationStructureNV accelerationStructure size_t dataSize void* pData @@ -8444,6 +9809,7 @@ typedef void CAMetalLayer; VkResult vkCreateRayTracingPipelinesKHR VkDevice device + VkDeferredOperationKHR deferredOperation VkPipelineCache pipelineCache uint32_t createInfoCount const VkRayTracingPipelineCreateInfoKHR* pCreateInfos @@ -8459,24 +9825,36 @@ typedef void CAMetalLayer; void vkCmdTraceRaysIndirectKHR VkCommandBuffer commandBuffer - const VkStridedBufferRegionKHR* pRaygenShaderBindingTable - const VkStridedBufferRegionKHR* pMissShaderBindingTable - const VkStridedBufferRegionKHR* pHitShaderBindingTable - const VkStridedBufferRegionKHR* pCallableShaderBindingTable - VkBuffer buffer - VkDeviceSize offset + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable + VkDeviceAddress indirectDeviceAddress - - VkResult vkGetDeviceAccelerationStructureCompatibilityKHR + + void vkGetDeviceAccelerationStructureCompatibilityKHR VkDevice device - const VkAccelerationStructureVersionKHR* version + const VkAccelerationStructureVersionInfoKHR* pVersionInfo + VkAccelerationStructureCompatibilityKHR* pCompatibility + + + VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR + VkDevice device + VkPipeline pipeline + uint32_t group + VkShaderGroupShaderKHR groupShader + + + void vkCmdSetRayTracingPipelineStackSizeKHR + VkCommandBuffer commandBuffer + uint32_t pipelineStackSize uint32_t vkGetImageViewHandleNVX VkDevice device const VkImageViewHandleInfoNVX* pInfo - + VkResult vkGetImageViewAddressNVX VkDevice device VkImageView imageView @@ -8519,7 +9897,7 @@ typedef void CAMetalLayer; const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo uint32_t* pNumPasses - + VkResult vkAcquireProfilingLockKHR VkDevice device const VkAcquireProfilingLockInfoKHR* pInfo @@ -8528,7 +9906,7 @@ typedef void CAMetalLayer; void vkReleaseProfilingLockKHR VkDevice device - + VkResult vkGetImageDrmFormatModifierPropertiesEXT VkDevice device VkImage image @@ -8593,7 +9971,7 @@ typedef void CAMetalLayer; VkResult vkReleasePerformanceConfigurationINTEL VkDevice device - VkPerformanceConfigurationINTEL configuration + VkPerformanceConfigurationINTEL configuration VkResult vkQueueSetPerformanceConfigurationINTEL @@ -8639,7 +10017,7 @@ typedef void CAMetalLayer; uint32_t lineStippleFactor uint16_t lineStipplePattern - + VkResult vkGetPhysicalDeviceToolPropertiesEXT VkPhysicalDevice physicalDevice uint32_t* pToolCount @@ -8653,26 +10031,28 @@ typedef void CAMetalLayer; VkAccelerationStructureKHR* pAccelerationStructure - void vkCmdBuildAccelerationStructureKHR + void vkCmdBuildAccelerationStructuresKHR VkCommandBuffer commandBuffer uint32_t infoCount const VkAccelerationStructureBuildGeometryInfoKHR* pInfos - const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos - void vkCmdBuildAccelerationStructureIndirectKHR + void vkCmdBuildAccelerationStructuresIndirectKHR VkCommandBuffer commandBuffer - const VkAccelerationStructureBuildGeometryInfoKHR* pInfo - VkBuffer indirectBuffer - VkDeviceSize indirectOffset - uint32_t indirectStride + uint32_t infoCount + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos + const VkDeviceAddress* pIndirectDeviceAddresses + const uint32_t* pIndirectStrides + const uint32_t* const* ppMaxPrimitiveCounts - VkResult vkBuildAccelerationStructureKHR + VkResult vkBuildAccelerationStructuresKHR VkDevice device + VkDeferredOperationKHR deferredOperation uint32_t infoCount const VkAccelerationStructureBuildGeometryInfoKHR* pInfos - const VkAccelerationStructureBuildOffsetInfoKHR* const* ppOffsetInfos + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR @@ -8777,6 +10157,31 @@ typedef void CAMetalLayer; VkStencilOp depthFailOp VkCompareOp compareOp + + void vkCmdSetPatchControlPointsEXT + VkCommandBuffer commandBuffer + uint32_t patchControlPoints + + + void vkCmdSetRasterizerDiscardEnableEXT + VkCommandBuffer commandBuffer + VkBool32 rasterizerDiscardEnable + + + void vkCmdSetDepthBiasEnableEXT + VkCommandBuffer commandBuffer + VkBool32 depthBiasEnable + + + void vkCmdSetLogicOpEXT + VkCommandBuffer commandBuffer + VkLogicOp logicOp + + + void vkCmdSetPrimitiveRestartEnableEXT + VkCommandBuffer commandBuffer + VkBool32 primitiveRestartEnable + VkResult vkCreatePrivateDataSlotEXT VkDevice device @@ -8806,13 +10211,252 @@ typedef void CAMetalLayer; VkPrivateDataSlotEXT privateDataSlot uint64_t* pData + + void vkCmdCopyBuffer2KHR + VkCommandBuffer commandBuffer + const VkCopyBufferInfo2KHR* pCopyBufferInfo + + + void vkCmdCopyImage2KHR + VkCommandBuffer commandBuffer + const VkCopyImageInfo2KHR* pCopyImageInfo + + + void vkCmdBlitImage2KHR + VkCommandBuffer commandBuffer + const VkBlitImageInfo2KHR* pBlitImageInfo + + + void vkCmdCopyBufferToImage2KHR + VkCommandBuffer commandBuffer + const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo + + + void vkCmdCopyImageToBuffer2KHR + VkCommandBuffer commandBuffer + const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo + + + void vkCmdResolveImage2KHR + VkCommandBuffer commandBuffer + const VkResolveImageInfo2KHR* pResolveImageInfo + + + void vkCmdSetFragmentShadingRateKHR + VkCommandBuffer commandBuffer + const VkExtent2D* pFragmentSize + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] + + + VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR + VkPhysicalDevice physicalDevice + uint32_t* pFragmentShadingRateCount + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates + + + void vkCmdSetFragmentShadingRateEnumNV + VkCommandBuffer commandBuffer + VkFragmentShadingRateNV shadingRate + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] + + + void vkGetAccelerationStructureBuildSizesKHR + VkDevice device + VkAccelerationStructureBuildTypeKHR buildType + const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo + const uint32_t* pMaxPrimitiveCounts + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo + + + void vkCmdSetVertexInputEXT + VkCommandBuffer commandBuffer + uint32_t vertexBindingDescriptionCount + const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions + uint32_t vertexAttributeDescriptionCount + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions + + + void vkCmdSetColorWriteEnableEXT + VkCommandBuffer commandBuffer + uint32_t attachmentCount + const VkBool32* pColorWriteEnables + + + void vkCmdSetEvent2KHR + VkCommandBuffer commandBuffer + VkEvent event + const VkDependencyInfoKHR* pDependencyInfo + + + void vkCmdResetEvent2KHR + VkCommandBuffer commandBuffer + VkEvent event + VkPipelineStageFlags2KHR stageMask + + + void vkCmdWaitEvents2KHR + VkCommandBuffer commandBuffer + uint32_t eventCount + const VkEvent* pEvents + const VkDependencyInfoKHR* pDependencyInfos + + + void vkCmdPipelineBarrier2KHR + VkCommandBuffer commandBuffer + const VkDependencyInfoKHR* pDependencyInfo + + + VkResult vkQueueSubmit2KHR + VkQueue queue + uint32_t submitCount + const VkSubmitInfo2KHR* pSubmits + VkFence fence + + + void vkCmdWriteTimestamp2KHR + VkCommandBuffer commandBuffer + VkPipelineStageFlags2KHR stage + VkQueryPool queryPool + uint32_t query + + + void vkCmdWriteBufferMarker2AMD + VkCommandBuffer commandBuffer + VkPipelineStageFlags2KHR stage + VkBuffer dstBuffer + VkDeviceSize dstOffset + uint32_t marker + + + void vkGetQueueCheckpointData2NV + VkQueue queue + uint32_t* pCheckpointDataCount + VkCheckpointData2NV* pCheckpointData + + + VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR + VkPhysicalDevice physicalDevice + const VkVideoProfileKHR* pVideoProfile + VkVideoCapabilitiesKHR* pCapabilities + + + VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR + VkPhysicalDevice physicalDevice + const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo + uint32_t* pVideoFormatPropertyCount + VkVideoFormatPropertiesKHR* pVideoFormatProperties + + + VkResult vkCreateVideoSessionKHR + VkDevice device + const VkVideoSessionCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkVideoSessionKHR* pVideoSession + + + void vkDestroyVideoSessionKHR + VkDevice device + VkVideoSessionKHR videoSession + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreateVideoSessionParametersKHR + VkDevice device + const VkVideoSessionParametersCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkVideoSessionParametersKHR* pVideoSessionParameters + + + VkResult vkUpdateVideoSessionParametersKHR + VkDevice device + VkVideoSessionParametersKHR videoSessionParameters + const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo + + + void vkDestroyVideoSessionParametersKHR + VkDevice device + VkVideoSessionParametersKHR videoSessionParameters + const VkAllocationCallbacks* pAllocator + + + VkResult vkGetVideoSessionMemoryRequirementsKHR + VkDevice device + VkVideoSessionKHR videoSession + uint32_t* pVideoSessionMemoryRequirementsCount + VkVideoGetMemoryPropertiesKHR* pVideoSessionMemoryRequirements + + + VkResult vkBindVideoSessionMemoryKHR + VkDevice device + VkVideoSessionKHR videoSession + uint32_t videoSessionBindMemoryCount + const VkVideoBindMemoryKHR* pVideoSessionBindMemories + + + void vkCmdDecodeVideoKHR + VkCommandBuffer commandBuffer + const VkVideoDecodeInfoKHR* pFrameInfo + + + void vkCmdBeginVideoCodingKHR + VkCommandBuffer commandBuffer + const VkVideoBeginCodingInfoKHR* pBeginInfo + + + void vkCmdControlVideoCodingKHR + VkCommandBuffer commandBuffer + const VkVideoCodingControlInfoKHR* pCodingControlInfo + + + void vkCmdEndVideoCodingKHR + VkCommandBuffer commandBuffer + const VkVideoEndCodingInfoKHR* pEndCodingInfo + + + void vkCmdEncodeVideoKHR + VkCommandBuffer commandBuffer + const VkVideoEncodeInfoKHR* pEncodeInfo + + + VkResult vkCreateCuModuleNVX + VkDevice device + const VkCuModuleCreateInfoNVX* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkCuModuleNVX* pModule + + + VkResult vkCreateCuFunctionNVX + VkDevice device + const VkCuFunctionCreateInfoNVX* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkCuFunctionNVX* pFunction + + + void vkDestroyCuModuleNVX + VkDevice device + VkCuModuleNVX module + const VkAllocationCallbacks* pAllocator + + + void vkDestroyCuFunctionNVX + VkDevice device + VkCuFunctionNVX function + const VkAllocationCallbacks* pAllocator + + + void vkCmdCuLaunchKernelNVX + VkCommandBuffer commandBuffer + const VkCuLaunchInfoNVX* pLaunchInfo + + + @@ -8848,6 +10492,11 @@ typedef void CAMetalLayer; + + + + + @@ -8859,7 +10508,6 @@ typedef void CAMetalLayer; - @@ -8904,7 +10552,6 @@ typedef void CAMetalLayer; - @@ -9009,6 +10656,7 @@ typedef void CAMetalLayer; + @@ -9758,7 +11406,6 @@ typedef void CAMetalLayer; - @@ -9846,7 +11493,7 @@ typedef void CAMetalLayer; - + @@ -9871,7 +11518,7 @@ typedef void CAMetalLayer; - + @@ -9913,8 +11560,8 @@ typedef void CAMetalLayer; - - + + @@ -10033,12 +11680,12 @@ typedef void CAMetalLayer; - + - + @@ -10141,43 +11788,112 @@ typedef void CAMetalLayer; - + - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + @@ -10237,10 +11953,27 @@ typedef void CAMetalLayer; - + - - + + + + + + + + + + + + + + + + + + + @@ -10299,22 +12032,65 @@ typedef void CAMetalLayer; - + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + @@ -10345,6 +12121,8 @@ typedef void CAMetalLayer; + + @@ -10870,7 +12648,7 @@ typedef void CAMetalLayer; - + @@ -11344,9 +13122,9 @@ typedef void CAMetalLayer; - + - + @@ -11354,9 +13132,9 @@ typedef void CAMetalLayer; - + - + @@ -11403,7 +13181,7 @@ typedef void CAMetalLayer; - + @@ -11675,65 +13453,41 @@ typedef void CAMetalLayer; - + - - - - - + + + - - - + - - - - + + - - - - - - - - + - - - + - - - - + + + + - - - - - - - - + + - - - - + @@ -11748,34 +13502,30 @@ typedef void CAMetalLayer; - - - - - - + + - - - + - + + + + + - - - - - + + + @@ -11783,14 +13533,63 @@ typedef void CAMetalLayer; + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - + + + + + + + + + + @@ -11936,7 +13735,6 @@ typedef void CAMetalLayer; - @@ -11971,7 +13769,7 @@ typedef void CAMetalLayer; - + @@ -12014,10 +13812,14 @@ typedef void CAMetalLayer; - + - - + + + + + + @@ -12027,11 +13829,11 @@ typedef void CAMetalLayer; - + - - - + + + @@ -12058,8 +13860,8 @@ typedef void CAMetalLayer; - - + + @@ -12072,15 +13874,15 @@ typedef void CAMetalLayer; - + - + - + - - + + @@ -12130,9 +13932,6 @@ typedef void CAMetalLayer; - - - @@ -12161,7 +13960,6 @@ typedef void CAMetalLayer; - @@ -12183,7 +13981,7 @@ typedef void CAMetalLayer; - + @@ -12306,7 +14104,7 @@ typedef void CAMetalLayer; - + @@ -12329,10 +14127,28 @@ typedef void CAMetalLayer; - + - - + + + + + + + + + + + + + + + + + + + + @@ -12398,6 +14214,8 @@ typedef void CAMetalLayer; + @@ -12651,10 +14469,12 @@ typedef void CAMetalLayer; - + - - + + + + @@ -12748,11 +14568,29 @@ typedef void CAMetalLayer; - + - - - + + + + + + + + + + + + + + + + + + + + + @@ -12805,10 +14643,12 @@ typedef void CAMetalLayer; - + - - + + + + @@ -12934,7 +14774,7 @@ typedef void CAMetalLayer; - + @@ -13002,10 +14842,17 @@ typedef void CAMetalLayer; - + - - + + + + + + + + + @@ -13088,10 +14935,12 @@ typedef void CAMetalLayer; - + - - + + + + @@ -13168,14 +15017,12 @@ typedef void CAMetalLayer; - + - + - - @@ -13272,7 +15119,7 @@ typedef void CAMetalLayer; - + @@ -13300,10 +15147,14 @@ typedef void CAMetalLayer; - + - - + + + + + + @@ -13330,12 +15181,11 @@ typedef void CAMetalLayer; - + - @@ -13346,10 +15196,19 @@ typedef void CAMetalLayer; - + - - + + + + + + + + + + + @@ -13368,7 +15227,7 @@ typedef void CAMetalLayer; - + @@ -13429,7 +15288,7 @@ typedef void CAMetalLayer; - + @@ -13511,18 +15370,50 @@ typedef void CAMetalLayer; + + + - + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + @@ -13533,8 +15424,8 @@ typedef void CAMetalLayer; - - + + @@ -13578,7 +15469,6 @@ typedef void CAMetalLayer; - @@ -13612,10 +15502,101 @@ typedef void CAMetalLayer; - + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -13640,6 +15621,8 @@ typedef void CAMetalLayer; + + @@ -13652,6 +15635,8 @@ typedef void CAMetalLayer; + + @@ -13666,10 +15651,12 @@ typedef void CAMetalLayer; - + - - + + + + @@ -13678,25 +15665,39 @@ typedef void CAMetalLayer; - + - - + + + + - + - - + + + + + + + + + + + - + + + + - + @@ -13708,10 +15709,16 @@ typedef void CAMetalLayer; - + - - + + + + + + + + @@ -13720,17 +15727,23 @@ typedef void CAMetalLayer; - + - - - + + + + + + + - + - - + + + + @@ -13739,22 +15752,54 @@ typedef void CAMetalLayer; - + - - + + + + - + - - + + + + - + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -13769,10 +15814,14 @@ typedef void CAMetalLayer; - + - - + + + + + + @@ -13799,29 +15848,1036 @@ typedef void CAMetalLayer; - + - - + + + + - + - - + + + + + + + - + - - + + - + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/externals/Vulkan-Headers/registry/vkconventions.py b/externals/Vulkan-Headers/registry/vkconventions.py index e12e0ff67..a77beb6f0 100755 --- a/externals/Vulkan-Headers/registry/vkconventions.py +++ b/externals/Vulkan-Headers/registry/vkconventions.py @@ -1,6 +1,6 @@ #!/usr/bin/python3 -i # -# Copyright (c) 2013-2020 The Khronos Group Inc. +# Copyright 2013-2021 The Khronos Group Inc. # # SPDX-License-Identifier: Apache-2.0 @@ -87,6 +87,7 @@ class VulkanConventions(ConventionsBase): def generate_structure_type_from_name(self, structname): """Generate a structure type name, like VK_STRUCTURE_TYPE_CREATE_INSTANCE_INFO""" + structure_type_parts = [] # Tokenize into "words" for elem in MAIN_RE.findall(structname): @@ -95,7 +96,18 @@ class VulkanConventions(ConventionsBase): structure_type_parts.append('VK_STRUCTURE_TYPE') else: structure_type_parts.append(word.upper()) - return '_'.join(structure_type_parts) + name = '_'.join(structure_type_parts) + + # The simple-minded rules need modification for some structure names + subpats = [ + [ r'_H_(26[45])_', r'_H\1_' ], + [ r'_VULKAN_([0-9])([0-9])_', r'_VULKAN_\1_\2_' ], + [ r'_DIRECT_FB_', r'_DIRECTFB_' ], + ] + + for subpat in subpats: + name = re.sub(subpat[0], subpat[1], name) + return name @property def warning_comment(self): diff --git a/externals/sirit/CMakeLists.txt b/externals/sirit/CMakeLists.txt index 047041f27..3b0574fe5 100755 --- a/externals/sirit/CMakeLists.txt +++ b/externals/sirit/CMakeLists.txt @@ -17,7 +17,7 @@ if (NOT CMAKE_BUILD_TYPE) endif() # Set hard requirements for C++ -set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD 20) set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_EXTENSIONS OFF) diff --git a/externals/sirit/externals/SPIRV-Headers/.gitignore b/externals/sirit/externals/SPIRV-Headers/.gitignore index 9bcdd5aba..f33592c90 100755 --- a/externals/sirit/externals/SPIRV-Headers/.gitignore +++ b/externals/sirit/externals/SPIRV-Headers/.gitignore @@ -1,2 +1,3 @@ build out +.DS_Store diff --git a/externals/sirit/externals/SPIRV-Headers/BUILD.bazel b/externals/sirit/externals/SPIRV-Headers/BUILD.bazel new file mode 100755 index 000000000..9cb46bfc8 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/BUILD.bazel @@ -0,0 +1,166 @@ +package( + default_visibility = ["//visibility:public"], +) + +licenses(["notice"]) + +exports_files(["LICENSE"]) + +filegroup( + name = "spirv_core_grammar_1.0", + srcs = ["include/spirv/1.0/spirv.core.grammar.json"], +) + +filegroup( + name = "spirv_glsl_grammar_1.0", + srcs = ["include/spirv/1.0/extinst.glsl.std.450.grammar.json"], +) + +filegroup( + name = "spirv_opencl_grammar_1.0", + srcs = ["include/spirv/1.0/extinst.opencl.std.100.grammar.json"], +) + +filegroup( + name = "spirv_core_grammar_1.1", + srcs = ["include/spirv/1.1/spirv.core.grammar.json"], +) + +filegroup( + name = "spirv_glsl_grammar_1.1", + srcs = ["include/spirv/1.1/extinst.glsl.std.450.grammar.json"], +) + +filegroup( + name = "spirv_opencl_grammar_1.1", + srcs = ["include/spirv/1.1/extinst.opencl.std.100.grammar.json"], +) + +filegroup( + name = "spirv_core_grammar_1.2", + srcs = ["include/spirv/1.2/spirv.core.grammar.json"], +) + +filegroup( + name = "spirv_glsl_grammar_1.2", + srcs = ["include/spirv/1.2/extinst.glsl.std.450.grammar.json"], +) + +filegroup( + name = "spirv_opencl_grammar_1.2", + srcs = ["include/spirv/1.2/extinst.opencl.std.100.grammar.json"], +) + +filegroup( + name = "spirv_core_grammar_unified1", + srcs = ["include/spirv/unified1/spirv.core.grammar.json"], +) + +filegroup( + name = "spirv_glsl_grammar_unified1", + srcs = ["include/spirv/unified1/extinst.glsl.std.450.grammar.json"], +) + +filegroup( + name = "spirv_opencl_grammar_unified1", + srcs = ["include/spirv/unified1/extinst.opencl.std.100.grammar.json"], +) + +filegroup( + name = "spirv_xml_registry", + srcs = ["include/spirv/spir-v.xml"], +) + +filegroup( + name = "spirv_ext_inst_debuginfo_grammar_unified1", + srcs = ["include/spirv/unified1/extinst.debuginfo.grammar.json"], +) + +filegroup( + name = "spirv_ext_inst_nonsemantic_clspvreflection_grammar_unified1", + srcs = ["include/spirv/unified1/extinst.nonsemantic.clspvreflection.grammar.json"], +) + +filegroup( + name = "spirv_ext_inst_nonsemantic_debugprintf_grammar_unified1", + srcs = ["include/spirv/unified1/extinst.nonsemantic.debugprintf.grammar.json"], +) + +filegroup( + name = "spirv_ext_inst_opencl_debuginfo_100_grammar_unified1", + srcs = ["include/spirv/unified1/extinst.opencl.debuginfo.100.grammar.json"], +) + +filegroup( + name = "spirv_ext_inst_spv_amd_gcn_shader_grammar_unified1", + srcs = ["include/spirv/unified1/extinst.spv-amd-gcn-shader.grammar.json"], +) + +filegroup( + name = "spirv_ext_inst_spv_amd_shader_ballot_grammar_unified1", + srcs = ["include/spirv/unified1/extinst.spv-amd-shader-ballot.grammar.json"], +) + +filegroup( + name = "spirv_ext_inst_spv_amd_shader_explicit_vertex_parameter_grammar_unified1", + srcs = ["include/spirv/unified1/extinst.spv-amd-shader-explicit-vertex-parameter.grammar.json"], +) + +filegroup( + name = "spirv_ext_inst_spv_amd_shader_trinary_minmax_grammar_unified1", + srcs = ["include/spirv/unified1/extinst.spv-amd-shader-trinary-minmax.grammar.json"], +) + +cc_library( + name = "spirv_common_headers", + hdrs = [ + "include/spirv/1.0/GLSL.std.450.h", + "include/spirv/1.0/OpenCL.std.h", + "include/spirv/1.1/GLSL.std.450.h", + "include/spirv/1.1/OpenCL.std.h", + "include/spirv/1.2/GLSL.std.450.h", + "include/spirv/1.2/OpenCL.std.h", + "include/spirv/unified1/GLSL.std.450.h", + "include/spirv/unified1/NonSemanticClspvReflection.h", + "include/spirv/unified1/NonSemanticDebugPrintf.h", + "include/spirv/unified1/OpenCL.std.h", + ], + includes = ["include"], +) + +cc_library( + name = "spirv_c_headers", + hdrs = [ + "include/spirv/1.0/spirv.h", + "include/spirv/1.1/spirv.h", + "include/spirv/1.2/spirv.h", + "include/spirv/unified1/spirv.h", + ], + includes = ["include"], + deps = [":spirv_common_headers"], +) + +cc_library( + name = "spirv_cpp_headers", + hdrs = [ + "include/spirv/1.0/spirv.hpp", + "include/spirv/1.1/spirv.hpp", + "include/spirv/1.2/spirv.hpp", + "include/spirv/unified1/spirv.hpp", + ], + includes = ["include"], + deps = [":spirv_common_headers"], +) + +cc_library( + name = "spirv_cpp11_headers", + hdrs = [ + "include/spirv/1.0/spirv.hpp11", + "include/spirv/1.1/spirv.hpp11", + "include/spirv/1.2/spirv.hpp11", + "include/spirv/unified1/spirv.hpp11", + ], + includes = ["include"], + deps = [":spirv_common_headers"], +) + diff --git a/externals/sirit/externals/SPIRV-Headers/BUILD.gn b/externals/sirit/externals/SPIRV-Headers/BUILD.gn new file mode 100755 index 000000000..be3f43b3f --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/BUILD.gn @@ -0,0 +1,44 @@ +# Copyright (c) 2020 Google LLC +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and/or associated documentation files (the "Materials"), +# to deal in the Materials without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Materials, and to permit persons to whom the +# Materials are furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Materials. +# +# MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +# STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +# HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +# +# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +# IN THE MATERIALS. + +config("spv_headers_public_config") { + include_dirs = [ "include" ] +} + +source_set("spv_headers") { + sources = [ + "include/spirv/1.2/GLSL.std.450.h", + "include/spirv/1.2/OpenCL.std.h", + "include/spirv/1.2/spirv.h", + "include/spirv/1.2/spirv.hpp", + "include/spirv/unified1/GLSL.std.450.h", + "include/spirv/unified1/NonSemanticClspvReflection.h", + "include/spirv/unified1/NonSemanticDebugPrintf.h", + "include/spirv/unified1/OpenCL.std.h", + "include/spirv/unified1/spirv.h", + "include/spirv/unified1/spirv.hpp", + ] + + public_configs = [ ":spv_headers_public_config" ] +} diff --git a/externals/sirit/externals/SPIRV-Headers/CMakeLists.txt b/externals/sirit/externals/SPIRV-Headers/CMakeLists.txt index 2488baf0a..eb4694786 100755 --- a/externals/sirit/externals/SPIRV-Headers/CMakeLists.txt +++ b/externals/sirit/externals/SPIRV-Headers/CMakeLists.txt @@ -28,8 +28,8 @@ # The SPIR-V headers from the SPIR-V Registry # https://www.khronos.org/registry/spir-v/ # -cmake_minimum_required(VERSION 2.8.11) -project(SPIRV-Headers) +cmake_minimum_required(VERSION 3.0) +project(SPIRV-Headers VERSION 1.5.1) # There are two ways to use this project. # @@ -44,17 +44,84 @@ project(SPIRV-Headers) # 2. cmake .. # 3. cmake --build . --target install -file(GLOB_RECURSE HEADER_FILES - RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} - include/spirv/*) -foreach(HEADER_FILE ${HEADER_FILES}) - get_filename_component(HEADER_INSTALL_DIR ${HEADER_FILE} PATH) - install(FILES ${HEADER_FILE} DESTINATION ${HEADER_INSTALL_DIR}) -endforeach() - # legacy add_custom_target(install-headers COMMAND cmake -E copy_directory ${CMAKE_CURRENT_SOURCE_DIR}/include/spirv $ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/include/spirv) -add_subdirectory(example) +option(SPIRV_HEADERS_SKIP_EXAMPLES "Skip building examples" + ${SPIRV_HEADERS_SKIP_EXAMPLES}) + +option(SPIRV_HEADERS_SKIP_INSTALL "Skip install" + ${SPIRV_HEADERS_SKIP_INSTALL}) + +if(NOT ${SPIRV_HEADERS_SKIP_EXAMPLES}) + set(SPIRV_HEADERS_ENABLE_EXAMPLES ON) +endif() + +if(NOT ${SPIRV_HEADERS_SKIP_INSTALL}) + set(SPIRV_HEADERS_ENABLE_INSTALL ON) +endif() + +if (SPIRV_HEADERS_ENABLE_EXAMPLES) + message(STATUS "Building SPIRV-Header examples") + add_subdirectory(example) +endif() + +include(GNUInstallDirs) +add_library(${PROJECT_NAME} INTERFACE) +target_include_directories(${PROJECT_NAME} INTERFACE + $ +) + +# Installation + +if (SPIRV_HEADERS_ENABLE_INSTALL) + message(STATUS "Installing SPIRV-Header") + + set(config_install_dir "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") + + set(generated_dir "${CMAKE_CURRENT_BINARY_DIR}/generated") + + set(version_config "${generated_dir}/${PROJECT_NAME}ConfigVersion.cmake") + set(project_config "${generated_dir}/${PROJECT_NAME}Config.cmake") + set(TARGETS_EXPORT_NAME "${PROJECT_NAME}Targets") + set(namespace "${PROJECT_NAME}::") + + include(CMakePackageConfigHelpers) + write_basic_package_version_file( + "${version_config}" + COMPATIBILITY SameMajorVersion + ) + + configure_package_config_file( + "cmake/Config.cmake.in" + "${project_config}" + INSTALL_DESTINATION "${config_install_dir}" + ) + + install( + TARGETS ${PROJECT_NAME} + EXPORT "${TARGETS_EXPORT_NAME}" + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} + INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} + ) + + install( + DIRECTORY include/spirv + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} + ) + + install( + FILES "${project_config}" "${version_config}" + DESTINATION "${config_install_dir}" + ) + + install( + EXPORT "${TARGETS_EXPORT_NAME}" + NAMESPACE "${namespace}" + DESTINATION "${config_install_dir}" + ) +endif() diff --git a/externals/sirit/externals/SPIRV-Headers/CODE_OF_CONDUCT.md b/externals/sirit/externals/SPIRV-Headers/CODE_OF_CONDUCT.md new file mode 100755 index 000000000..a11610bd3 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/CODE_OF_CONDUCT.md @@ -0,0 +1 @@ +A reminder that this issue tracker is managed by the Khronos Group. Interactions here should follow the Khronos Code of Conduct (https://www.khronos.org/developers/code-of-conduct), which prohibits aggressive or derogatory language. Please keep the discussion friendly and civil. diff --git a/externals/sirit/externals/SPIRV-Headers/README.md b/externals/sirit/externals/SPIRV-Headers/README.md index 846b20d80..b1aa96455 100755 --- a/externals/sirit/externals/SPIRV-Headers/README.md +++ b/externals/sirit/externals/SPIRV-Headers/README.md @@ -23,12 +23,33 @@ When a new version or revision of the SPIR-V specification is published, the SPIR-V Working Group will push new commits onto master, updating the files under [include](include). -The SPIR-V XML registry file is updated by Khronos whenever a new enum range is allocated. +[The SPIR-V XML registry file](include/spirv/spir-v.xml) +is updated by Khronos whenever a new enum range is allocated. -Pull requests can be made to +Pull requests can be made to - request allocation of new enum ranges in the XML registry file +- register a new magic number for a SPIR-V generator - reserve specific tokens in the JSON grammar +### Registering a SPIR-V Generator Magic Number + +Tools that generate SPIR-V should use a magic number in the SPIR-V to help identify the +generator. + +Care should be taken to follow existing precedent in populating the details of reserved tokens. +This includes: +- keeping generator numbers in numeric order +- filling out all the existing fields + +### Reserving tokens in the JSON grammar + +Care should be taken to follow existing precedent in populating the details of reserved tokens. +This includes: +- pointing to what extension has more information, when possible +- keeping enumerants in numeric order +- when there are aliases, listing the preferred spelling first +- adding the statement `"version" : "None"` + ## How to install the headers ``` @@ -45,6 +66,7 @@ If you want to install them somewhere else, then use ## Using the headers without installing +### Using CMake A CMake-based project can use the headers without installing, as follows: 1. Add an `add_subdirectory` directive to include this source tree. @@ -61,7 +83,56 @@ A CMake-based project can use the headers without installing, as follows: See also the [example](example/) subdirectory. But since that example is *inside* this repostory, it doesn't use and `add_subdirectory` directive. -## Generating the headers from the JSON grammar +### Using Bazel +A Bazel-based project can use the headers without installing, as follows: + +1. Add SPIRV-Headers as a submodule of your project, and add a +`local_repository` to your `WORKSPACE` file. For example, if you place +SPIRV-Headers under `external/spirv-headers`, then add the following to your +`WORKSPACE` file: + +``` +local_repository( + name = "spirv_headers", + path = "external/spirv-headers", +) +``` + +2. Add one of the following to the `deps` attribute of your build target based +on your needs: +``` +@spirv_headers//:spirv_c_headers +@spirv_headers//:spirv_cpp_headers +@spirv_headers//:spirv_cpp11_headers +``` + +For example: + +``` +cc_library( + name = "project", + srcs = [ + # Path to project sources + ], + hdrs = [ + # Path to project headers + ], + deps = [ + "@spirv_tools//:spirv_c_headers", + # Other dependencies, + ], +) +``` + +3. In your C or C++ source code use `#include` directives that explicitly mention + the `spirv` path component. +``` +#include "spirv/unified1/GLSL.std.450.h" +#include "spirv/unified1/OpenCL.std.h" +#include "spirv/unified1/spirv.hpp" +``` + +## Generating headers from the JSON grammar for the SPIR-V core instruction set This will generally be done by Khronos, for a change to the JSON grammar. However, the project for the tool to do this is included in this repository, @@ -79,6 +150,26 @@ Notes: and that influences the languages used, for legacy reasons - the C++ structures built may similarly include more than strictly necessary, for the same reason +## Generating C headers for extended instruction sets + +The [GLSL.std.450.h](include/spirv/unified1/GLSL.std.450.h) +and [OpenCL.std.h](include/spirv/unified1/OpenCL.std.h) extended instruction set headers +are maintained manually. + +The C/C++ header for each of the other extended instruction sets +is generated from the corresponding JSON grammar file. For example, the +[OpenCLDebugInfo100.h](include/spirv/unified1/OpenCLDebugInfo100.h) header +is generated from the +[extinst.opencl.debuginfo.100.grammar.json](include/spirv/unified1/extinst.opencl.debuginfo.100.grammar.json) +grammar file. + +To generate these C/C++ headers, first make sure `python3` is in your PATH, then +invoke the build script as follows: +``` +cd tools/buildHeaders +python3 bin/makeExtinstHeaders.py +``` + ## FAQ * *How are different versions published?* diff --git a/externals/sirit/externals/SPIRV-Headers/WORKSPACE b/externals/sirit/externals/SPIRV-Headers/WORKSPACE new file mode 100755 index 000000000..e69de29bb diff --git a/externals/sirit/externals/SPIRV-Headers/cmake/Config.cmake.in b/externals/sirit/externals/SPIRV-Headers/cmake/Config.cmake.in new file mode 100755 index 000000000..38bbde7b3 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/cmake/Config.cmake.in @@ -0,0 +1,4 @@ +@PACKAGE_INIT@ + +include("${CMAKE_CURRENT_LIST_DIR}/@TARGETS_EXPORT_NAME@.cmake") +check_required_components("@PROJECT_NAME@") diff --git a/externals/sirit/externals/SPIRV-Headers/example/CMakeLists.txt b/externals/sirit/externals/SPIRV-Headers/example/CMakeLists.txt index dff65d918..8b22f600a 100755 --- a/externals/sirit/externals/SPIRV-Headers/example/CMakeLists.txt +++ b/externals/sirit/externals/SPIRV-Headers/example/CMakeLists.txt @@ -2,8 +2,3 @@ add_library(SPIRV-Headers-example ${CMAKE_CURRENT_SOURCE_DIR}/example.cpp) target_include_directories(SPIRV-Headers-example PRIVATE ${SPIRV-Headers_SOURCE_DIR}/include) - -add_library(SPIRV-Headers-example-1.1 - ${CMAKE_CURRENT_SOURCE_DIR}/example-1.1.cpp) -target_include_directories(SPIRV-Headers-example-1.1 - PRIVATE ${SPIRV-Headers_SOURCE_DIR}/include) diff --git a/externals/sirit/externals/SPIRV-Headers/example/example.cpp b/externals/sirit/externals/SPIRV-Headers/example/example.cpp index 222c101d4..d79b62f22 100755 --- a/externals/sirit/externals/SPIRV-Headers/example/example.cpp +++ b/externals/sirit/externals/SPIRV-Headers/example/example.cpp @@ -24,9 +24,9 @@ // TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE // MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. -#include -#include -#include +#include +#include +#include namespace { diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/1.0/spirv.cs b/externals/sirit/externals/SPIRV-Headers/include/spirv/1.0/spirv.cs new file mode 100755 index 000000000..de325cc4a --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/1.0/spirv.cs @@ -0,0 +1,993 @@ +// Copyright (c) 2014-2018 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and/or associated documentation files (the "Materials"), +// to deal in the Materials without restriction, including without limitation +// the rights to use, copy, modify, merge, publish, distribute, sublicense, +// and/or sell copies of the Materials, and to permit persons to whom the +// Materials are furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +// IN THE MATERIALS. + +// This header is automatically generated by the same tool that creates +// the Binary Section of the SPIR-V specification. + +// Enumeration tokens for SPIR-V, in various styles: +// C, C++, C++11, JSON, Lua, Python, C# +// +// - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL +// - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL +// - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL +// - Lua will use tables, e.g.: spv.SourceLanguage.GLSL +// - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +// - C# will use enum classes in the Specification class located in the "Spv" namespace, e.g.: Spv.Specification.SourceLanguage.GLSL +// +// Some tokens act like mask values, which can be OR'd together, +// while others are mutually exclusive. The mask-like ones have +// "Mask" in their name, and a parallel enum that has the shift +// amount (1 << x) for each corresponding enumerant. + +namespace Spv +{ + + public static class Specification + { + public const uint MagicNumber = 0x07230203; + public const uint Version = 0x00010000; + public const uint Revision = 12; + public const uint OpCodeMask = 0xffff; + public const uint WordCountShift = 16; + + public enum SourceLanguage + { + Unknown = 0, + ESSL = 1, + GLSL = 2, + OpenCL_C = 3, + OpenCL_CPP = 4, + HLSL = 5, + } + + public enum ExecutionModel + { + Vertex = 0, + TessellationControl = 1, + TessellationEvaluation = 2, + Geometry = 3, + Fragment = 4, + GLCompute = 5, + Kernel = 6, + } + + public enum AddressingModel + { + Logical = 0, + Physical32 = 1, + Physical64 = 2, + } + + public enum MemoryModel + { + Simple = 0, + GLSL450 = 1, + OpenCL = 2, + } + + public enum ExecutionMode + { + Invocations = 0, + SpacingEqual = 1, + SpacingFractionalEven = 2, + SpacingFractionalOdd = 3, + VertexOrderCw = 4, + VertexOrderCcw = 5, + PixelCenterInteger = 6, + OriginUpperLeft = 7, + OriginLowerLeft = 8, + EarlyFragmentTests = 9, + PointMode = 10, + Xfb = 11, + DepthReplacing = 12, + DepthGreater = 14, + DepthLess = 15, + DepthUnchanged = 16, + LocalSize = 17, + LocalSizeHint = 18, + InputPoints = 19, + InputLines = 20, + InputLinesAdjacency = 21, + Triangles = 22, + InputTrianglesAdjacency = 23, + Quads = 24, + Isolines = 25, + OutputVertices = 26, + OutputPoints = 27, + OutputLineStrip = 28, + OutputTriangleStrip = 29, + VecTypeHint = 30, + ContractionOff = 31, + PostDepthCoverage = 4446, + StencilRefReplacingEXT = 5027, + } + + public enum StorageClass + { + UniformConstant = 0, + Input = 1, + Uniform = 2, + Output = 3, + Workgroup = 4, + CrossWorkgroup = 5, + Private = 6, + Function = 7, + Generic = 8, + PushConstant = 9, + AtomicCounter = 10, + Image = 11, + StorageBuffer = 12, + } + + public enum Dim + { + Dim1D = 0, + Dim2D = 1, + Dim3D = 2, + Cube = 3, + Rect = 4, + Buffer = 5, + SubpassData = 6, + } + + public enum SamplerAddressingMode + { + None = 0, + ClampToEdge = 1, + Clamp = 2, + Repeat = 3, + RepeatMirrored = 4, + } + + public enum SamplerFilterMode + { + Nearest = 0, + Linear = 1, + } + + public enum ImageFormat + { + Unknown = 0, + Rgba32f = 1, + Rgba16f = 2, + R32f = 3, + Rgba8 = 4, + Rgba8Snorm = 5, + Rg32f = 6, + Rg16f = 7, + R11fG11fB10f = 8, + R16f = 9, + Rgba16 = 10, + Rgb10A2 = 11, + Rg16 = 12, + Rg8 = 13, + R16 = 14, + R8 = 15, + Rgba16Snorm = 16, + Rg16Snorm = 17, + Rg8Snorm = 18, + R16Snorm = 19, + R8Snorm = 20, + Rgba32i = 21, + Rgba16i = 22, + Rgba8i = 23, + R32i = 24, + Rg32i = 25, + Rg16i = 26, + Rg8i = 27, + R16i = 28, + R8i = 29, + Rgba32ui = 30, + Rgba16ui = 31, + Rgba8ui = 32, + R32ui = 33, + Rgb10a2ui = 34, + Rg32ui = 35, + Rg16ui = 36, + Rg8ui = 37, + R16ui = 38, + R8ui = 39, + } + + public enum ImageChannelOrder + { + R = 0, + A = 1, + RG = 2, + RA = 3, + RGB = 4, + RGBA = 5, + BGRA = 6, + ARGB = 7, + Intensity = 8, + Luminance = 9, + Rx = 10, + RGx = 11, + RGBx = 12, + Depth = 13, + DepthStencil = 14, + sRGB = 15, + sRGBx = 16, + sRGBA = 17, + sBGRA = 18, + ABGR = 19, + } + + public enum ImageChannelDataType + { + SnormInt8 = 0, + SnormInt16 = 1, + UnormInt8 = 2, + UnormInt16 = 3, + UnormShort565 = 4, + UnormShort555 = 5, + UnormInt101010 = 6, + SignedInt8 = 7, + SignedInt16 = 8, + SignedInt32 = 9, + UnsignedInt8 = 10, + UnsignedInt16 = 11, + UnsignedInt32 = 12, + HalfFloat = 13, + Float = 14, + UnormInt24 = 15, + UnormInt101010_2 = 16, + } + + public enum ImageOperandsShift + { + Bias = 0, + Lod = 1, + Grad = 2, + ConstOffset = 3, + Offset = 4, + ConstOffsets = 5, + Sample = 6, + MinLod = 7, + } + + public enum ImageOperandsMask + { + MaskNone = 0, + Bias = 0x00000001, + Lod = 0x00000002, + Grad = 0x00000004, + ConstOffset = 0x00000008, + Offset = 0x00000010, + ConstOffsets = 0x00000020, + Sample = 0x00000040, + MinLod = 0x00000080, + } + + public enum FPFastMathModeShift + { + NotNaN = 0, + NotInf = 1, + NSZ = 2, + AllowRecip = 3, + Fast = 4, + } + + public enum FPFastMathModeMask + { + MaskNone = 0, + NotNaN = 0x00000001, + NotInf = 0x00000002, + NSZ = 0x00000004, + AllowRecip = 0x00000008, + Fast = 0x00000010, + } + + public enum FPRoundingMode + { + RTE = 0, + RTZ = 1, + RTP = 2, + RTN = 3, + } + + public enum LinkageType + { + Export = 0, + Import = 1, + } + + public enum AccessQualifier + { + ReadOnly = 0, + WriteOnly = 1, + ReadWrite = 2, + } + + public enum FunctionParameterAttribute + { + Zext = 0, + Sext = 1, + ByVal = 2, + Sret = 3, + NoAlias = 4, + NoCapture = 5, + NoWrite = 6, + NoReadWrite = 7, + } + + public enum Decoration + { + RelaxedPrecision = 0, + SpecId = 1, + Block = 2, + BufferBlock = 3, + RowMajor = 4, + ColMajor = 5, + ArrayStride = 6, + MatrixStride = 7, + GLSLShared = 8, + GLSLPacked = 9, + CPacked = 10, + BuiltIn = 11, + NoPerspective = 13, + Flat = 14, + Patch = 15, + Centroid = 16, + Sample = 17, + Invariant = 18, + Restrict = 19, + Aliased = 20, + Volatile = 21, + Constant = 22, + Coherent = 23, + NonWritable = 24, + NonReadable = 25, + Uniform = 26, + SaturatedConversion = 28, + Stream = 29, + Location = 30, + Component = 31, + Index = 32, + Binding = 33, + DescriptorSet = 34, + Offset = 35, + XfbBuffer = 36, + XfbStride = 37, + FuncParamAttr = 38, + FPRoundingMode = 39, + FPFastMathMode = 40, + LinkageAttributes = 41, + NoContraction = 42, + InputAttachmentIndex = 43, + Alignment = 44, + ExplicitInterpAMD = 4999, + OverrideCoverageNV = 5248, + PassthroughNV = 5250, + ViewportRelativeNV = 5252, + SecondaryViewportRelativeNV = 5256, + HlslCounterBufferGOOGLE = 5634, + HlslSemanticGOOGLE = 5635, + } + + public enum BuiltIn + { + Position = 0, + PointSize = 1, + ClipDistance = 3, + CullDistance = 4, + VertexId = 5, + InstanceId = 6, + PrimitiveId = 7, + InvocationId = 8, + Layer = 9, + ViewportIndex = 10, + TessLevelOuter = 11, + TessLevelInner = 12, + TessCoord = 13, + PatchVertices = 14, + FragCoord = 15, + PointCoord = 16, + FrontFacing = 17, + SampleId = 18, + SamplePosition = 19, + SampleMask = 20, + FragDepth = 22, + HelperInvocation = 23, + NumWorkgroups = 24, + WorkgroupSize = 25, + WorkgroupId = 26, + LocalInvocationId = 27, + GlobalInvocationId = 28, + LocalInvocationIndex = 29, + WorkDim = 30, + GlobalSize = 31, + EnqueuedWorkgroupSize = 32, + GlobalOffset = 33, + GlobalLinearId = 34, + SubgroupSize = 36, + SubgroupMaxSize = 37, + NumSubgroups = 38, + NumEnqueuedSubgroups = 39, + SubgroupId = 40, + SubgroupLocalInvocationId = 41, + VertexIndex = 42, + InstanceIndex = 43, + SubgroupEqMaskKHR = 4416, + SubgroupGeMaskKHR = 4417, + SubgroupGtMaskKHR = 4418, + SubgroupLeMaskKHR = 4419, + SubgroupLtMaskKHR = 4420, + BaseVertex = 4424, + BaseInstance = 4425, + DrawIndex = 4426, + DeviceIndex = 4438, + ViewIndex = 4440, + BaryCoordNoPerspAMD = 4992, + BaryCoordNoPerspCentroidAMD = 4993, + BaryCoordNoPerspSampleAMD = 4994, + BaryCoordSmoothAMD = 4995, + BaryCoordSmoothCentroidAMD = 4996, + BaryCoordSmoothSampleAMD = 4997, + BaryCoordPullModelAMD = 4998, + FragStencilRefEXT = 5014, + ViewportMaskNV = 5253, + SecondaryPositionNV = 5257, + SecondaryViewportMaskNV = 5258, + PositionPerViewNV = 5261, + ViewportMaskPerViewNV = 5262, + } + + public enum SelectionControlShift + { + Flatten = 0, + DontFlatten = 1, + } + + public enum SelectionControlMask + { + MaskNone = 0, + Flatten = 0x00000001, + DontFlatten = 0x00000002, + } + + public enum LoopControlShift + { + Unroll = 0, + DontUnroll = 1, + } + + public enum LoopControlMask + { + MaskNone = 0, + Unroll = 0x00000001, + DontUnroll = 0x00000002, + } + + public enum FunctionControlShift + { + Inline = 0, + DontInline = 1, + Pure = 2, + Const = 3, + } + + public enum FunctionControlMask + { + MaskNone = 0, + Inline = 0x00000001, + DontInline = 0x00000002, + Pure = 0x00000004, + Const = 0x00000008, + } + + public enum MemorySemanticsShift + { + Acquire = 1, + Release = 2, + AcquireRelease = 3, + SequentiallyConsistent = 4, + UniformMemory = 6, + SubgroupMemory = 7, + WorkgroupMemory = 8, + CrossWorkgroupMemory = 9, + AtomicCounterMemory = 10, + ImageMemory = 11, + } + + public enum MemorySemanticsMask + { + MaskNone = 0, + Acquire = 0x00000002, + Release = 0x00000004, + AcquireRelease = 0x00000008, + SequentiallyConsistent = 0x00000010, + UniformMemory = 0x00000040, + SubgroupMemory = 0x00000080, + WorkgroupMemory = 0x00000100, + CrossWorkgroupMemory = 0x00000200, + AtomicCounterMemory = 0x00000400, + ImageMemory = 0x00000800, + } + + public enum MemoryAccessShift + { + Volatile = 0, + Aligned = 1, + Nontemporal = 2, + } + + public enum MemoryAccessMask + { + MaskNone = 0, + Volatile = 0x00000001, + Aligned = 0x00000002, + Nontemporal = 0x00000004, + } + + public enum Scope + { + CrossDevice = 0, + Device = 1, + Workgroup = 2, + Subgroup = 3, + Invocation = 4, + } + + public enum GroupOperation + { + Reduce = 0, + InclusiveScan = 1, + ExclusiveScan = 2, + } + + public enum KernelEnqueueFlags + { + NoWait = 0, + WaitKernel = 1, + WaitWorkGroup = 2, + } + + public enum KernelProfilingInfoShift + { + CmdExecTime = 0, + } + + public enum KernelProfilingInfoMask + { + MaskNone = 0, + CmdExecTime = 0x00000001, + } + + public enum Capability + { + Matrix = 0, + Shader = 1, + Geometry = 2, + Tessellation = 3, + Addresses = 4, + Linkage = 5, + Kernel = 6, + Vector16 = 7, + Float16Buffer = 8, + Float16 = 9, + Float64 = 10, + Int64 = 11, + Int64Atomics = 12, + ImageBasic = 13, + ImageReadWrite = 14, + ImageMipmap = 15, + Pipes = 17, + Groups = 18, + DeviceEnqueue = 19, + LiteralSampler = 20, + AtomicStorage = 21, + Int16 = 22, + TessellationPointSize = 23, + GeometryPointSize = 24, + ImageGatherExtended = 25, + StorageImageMultisample = 27, + UniformBufferArrayDynamicIndexing = 28, + SampledImageArrayDynamicIndexing = 29, + StorageBufferArrayDynamicIndexing = 30, + StorageImageArrayDynamicIndexing = 31, + ClipDistance = 32, + CullDistance = 33, + ImageCubeArray = 34, + SampleRateShading = 35, + ImageRect = 36, + SampledRect = 37, + GenericPointer = 38, + Int8 = 39, + InputAttachment = 40, + SparseResidency = 41, + MinLod = 42, + Sampled1D = 43, + Image1D = 44, + SampledCubeArray = 45, + SampledBuffer = 46, + ImageBuffer = 47, + ImageMSArray = 48, + StorageImageExtendedFormats = 49, + ImageQuery = 50, + DerivativeControl = 51, + InterpolationFunction = 52, + TransformFeedback = 53, + GeometryStreams = 54, + StorageImageReadWithoutFormat = 55, + StorageImageWriteWithoutFormat = 56, + MultiViewport = 57, + SubgroupBallotKHR = 4423, + DrawParameters = 4427, + SubgroupVoteKHR = 4431, + StorageBuffer16BitAccess = 4433, + StorageUniformBufferBlock16 = 4433, + StorageUniform16 = 4434, + UniformAndStorageBuffer16BitAccess = 4434, + StoragePushConstant16 = 4435, + StorageInputOutput16 = 4436, + DeviceGroup = 4437, + MultiView = 4439, + VariablePointersStorageBuffer = 4441, + VariablePointers = 4442, + AtomicStorageOps = 4445, + SampleMaskPostDepthCoverage = 4447, + ImageGatherBiasLodAMD = 5009, + FragmentMaskAMD = 5010, + StencilExportEXT = 5013, + ImageReadWriteLodAMD = 5015, + SampleMaskOverrideCoverageNV = 5249, + GeometryShaderPassthroughNV = 5251, + ShaderViewportIndexLayerEXT = 5254, + ShaderViewportIndexLayerNV = 5254, + ShaderViewportMaskNV = 5255, + ShaderStereoViewNV = 5259, + PerViewAttributesNV = 5260, + SubgroupShuffleINTEL = 5568, + SubgroupBufferBlockIOINTEL = 5569, + SubgroupImageBlockIOINTEL = 5570, + } + + public enum Op + { + OpNop = 0, + OpUndef = 1, + OpSourceContinued = 2, + OpSource = 3, + OpSourceExtension = 4, + OpName = 5, + OpMemberName = 6, + OpString = 7, + OpLine = 8, + OpExtension = 10, + OpExtInstImport = 11, + OpExtInst = 12, + OpMemoryModel = 14, + OpEntryPoint = 15, + OpExecutionMode = 16, + OpCapability = 17, + OpTypeVoid = 19, + OpTypeBool = 20, + OpTypeInt = 21, + OpTypeFloat = 22, + OpTypeVector = 23, + OpTypeMatrix = 24, + OpTypeImage = 25, + OpTypeSampler = 26, + OpTypeSampledImage = 27, + OpTypeArray = 28, + OpTypeRuntimeArray = 29, + OpTypeStruct = 30, + OpTypeOpaque = 31, + OpTypePointer = 32, + OpTypeFunction = 33, + OpTypeEvent = 34, + OpTypeDeviceEvent = 35, + OpTypeReserveId = 36, + OpTypeQueue = 37, + OpTypePipe = 38, + OpTypeForwardPointer = 39, + OpConstantTrue = 41, + OpConstantFalse = 42, + OpConstant = 43, + OpConstantComposite = 44, + OpConstantSampler = 45, + OpConstantNull = 46, + OpSpecConstantTrue = 48, + OpSpecConstantFalse = 49, + OpSpecConstant = 50, + OpSpecConstantComposite = 51, + OpSpecConstantOp = 52, + OpFunction = 54, + OpFunctionParameter = 55, + OpFunctionEnd = 56, + OpFunctionCall = 57, + OpVariable = 59, + OpImageTexelPointer = 60, + OpLoad = 61, + OpStore = 62, + OpCopyMemory = 63, + OpCopyMemorySized = 64, + OpAccessChain = 65, + OpInBoundsAccessChain = 66, + OpPtrAccessChain = 67, + OpArrayLength = 68, + OpGenericPtrMemSemantics = 69, + OpInBoundsPtrAccessChain = 70, + OpDecorate = 71, + OpMemberDecorate = 72, + OpDecorationGroup = 73, + OpGroupDecorate = 74, + OpGroupMemberDecorate = 75, + OpVectorExtractDynamic = 77, + OpVectorInsertDynamic = 78, + OpVectorShuffle = 79, + OpCompositeConstruct = 80, + OpCompositeExtract = 81, + OpCompositeInsert = 82, + OpCopyObject = 83, + OpTranspose = 84, + OpSampledImage = 86, + OpImageSampleImplicitLod = 87, + OpImageSampleExplicitLod = 88, + OpImageSampleDrefImplicitLod = 89, + OpImageSampleDrefExplicitLod = 90, + OpImageSampleProjImplicitLod = 91, + OpImageSampleProjExplicitLod = 92, + OpImageSampleProjDrefImplicitLod = 93, + OpImageSampleProjDrefExplicitLod = 94, + OpImageFetch = 95, + OpImageGather = 96, + OpImageDrefGather = 97, + OpImageRead = 98, + OpImageWrite = 99, + OpImage = 100, + OpImageQueryFormat = 101, + OpImageQueryOrder = 102, + OpImageQuerySizeLod = 103, + OpImageQuerySize = 104, + OpImageQueryLod = 105, + OpImageQueryLevels = 106, + OpImageQuerySamples = 107, + OpConvertFToU = 109, + OpConvertFToS = 110, + OpConvertSToF = 111, + OpConvertUToF = 112, + OpUConvert = 113, + OpSConvert = 114, + OpFConvert = 115, + OpQuantizeToF16 = 116, + OpConvertPtrToU = 117, + OpSatConvertSToU = 118, + OpSatConvertUToS = 119, + OpConvertUToPtr = 120, + OpPtrCastToGeneric = 121, + OpGenericCastToPtr = 122, + OpGenericCastToPtrExplicit = 123, + OpBitcast = 124, + OpSNegate = 126, + OpFNegate = 127, + OpIAdd = 128, + OpFAdd = 129, + OpISub = 130, + OpFSub = 131, + OpIMul = 132, + OpFMul = 133, + OpUDiv = 134, + OpSDiv = 135, + OpFDiv = 136, + OpUMod = 137, + OpSRem = 138, + OpSMod = 139, + OpFRem = 140, + OpFMod = 141, + OpVectorTimesScalar = 142, + OpMatrixTimesScalar = 143, + OpVectorTimesMatrix = 144, + OpMatrixTimesVector = 145, + OpMatrixTimesMatrix = 146, + OpOuterProduct = 147, + OpDot = 148, + OpIAddCarry = 149, + OpISubBorrow = 150, + OpUMulExtended = 151, + OpSMulExtended = 152, + OpAny = 154, + OpAll = 155, + OpIsNan = 156, + OpIsInf = 157, + OpIsFinite = 158, + OpIsNormal = 159, + OpSignBitSet = 160, + OpLessOrGreater = 161, + OpOrdered = 162, + OpUnordered = 163, + OpLogicalEqual = 164, + OpLogicalNotEqual = 165, + OpLogicalOr = 166, + OpLogicalAnd = 167, + OpLogicalNot = 168, + OpSelect = 169, + OpIEqual = 170, + OpINotEqual = 171, + OpUGreaterThan = 172, + OpSGreaterThan = 173, + OpUGreaterThanEqual = 174, + OpSGreaterThanEqual = 175, + OpULessThan = 176, + OpSLessThan = 177, + OpULessThanEqual = 178, + OpSLessThanEqual = 179, + OpFOrdEqual = 180, + OpFUnordEqual = 181, + OpFOrdNotEqual = 182, + OpFUnordNotEqual = 183, + OpFOrdLessThan = 184, + OpFUnordLessThan = 185, + OpFOrdGreaterThan = 186, + OpFUnordGreaterThan = 187, + OpFOrdLessThanEqual = 188, + OpFUnordLessThanEqual = 189, + OpFOrdGreaterThanEqual = 190, + OpFUnordGreaterThanEqual = 191, + OpShiftRightLogical = 194, + OpShiftRightArithmetic = 195, + OpShiftLeftLogical = 196, + OpBitwiseOr = 197, + OpBitwiseXor = 198, + OpBitwiseAnd = 199, + OpNot = 200, + OpBitFieldInsert = 201, + OpBitFieldSExtract = 202, + OpBitFieldUExtract = 203, + OpBitReverse = 204, + OpBitCount = 205, + OpDPdx = 207, + OpDPdy = 208, + OpFwidth = 209, + OpDPdxFine = 210, + OpDPdyFine = 211, + OpFwidthFine = 212, + OpDPdxCoarse = 213, + OpDPdyCoarse = 214, + OpFwidthCoarse = 215, + OpEmitVertex = 218, + OpEndPrimitive = 219, + OpEmitStreamVertex = 220, + OpEndStreamPrimitive = 221, + OpControlBarrier = 224, + OpMemoryBarrier = 225, + OpAtomicLoad = 227, + OpAtomicStore = 228, + OpAtomicExchange = 229, + OpAtomicCompareExchange = 230, + OpAtomicCompareExchangeWeak = 231, + OpAtomicIIncrement = 232, + OpAtomicIDecrement = 233, + OpAtomicIAdd = 234, + OpAtomicISub = 235, + OpAtomicSMin = 236, + OpAtomicUMin = 237, + OpAtomicSMax = 238, + OpAtomicUMax = 239, + OpAtomicAnd = 240, + OpAtomicOr = 241, + OpAtomicXor = 242, + OpPhi = 245, + OpLoopMerge = 246, + OpSelectionMerge = 247, + OpLabel = 248, + OpBranch = 249, + OpBranchConditional = 250, + OpSwitch = 251, + OpKill = 252, + OpReturn = 253, + OpReturnValue = 254, + OpUnreachable = 255, + OpLifetimeStart = 256, + OpLifetimeStop = 257, + OpGroupAsyncCopy = 259, + OpGroupWaitEvents = 260, + OpGroupAll = 261, + OpGroupAny = 262, + OpGroupBroadcast = 263, + OpGroupIAdd = 264, + OpGroupFAdd = 265, + OpGroupFMin = 266, + OpGroupUMin = 267, + OpGroupSMin = 268, + OpGroupFMax = 269, + OpGroupUMax = 270, + OpGroupSMax = 271, + OpReadPipe = 274, + OpWritePipe = 275, + OpReservedReadPipe = 276, + OpReservedWritePipe = 277, + OpReserveReadPipePackets = 278, + OpReserveWritePipePackets = 279, + OpCommitReadPipe = 280, + OpCommitWritePipe = 281, + OpIsValidReserveId = 282, + OpGetNumPipePackets = 283, + OpGetMaxPipePackets = 284, + OpGroupReserveReadPipePackets = 285, + OpGroupReserveWritePipePackets = 286, + OpGroupCommitReadPipe = 287, + OpGroupCommitWritePipe = 288, + OpEnqueueMarker = 291, + OpEnqueueKernel = 292, + OpGetKernelNDrangeSubGroupCount = 293, + OpGetKernelNDrangeMaxSubGroupSize = 294, + OpGetKernelWorkGroupSize = 295, + OpGetKernelPreferredWorkGroupSizeMultiple = 296, + OpRetainEvent = 297, + OpReleaseEvent = 298, + OpCreateUserEvent = 299, + OpIsValidEvent = 300, + OpSetUserEventStatus = 301, + OpCaptureEventProfilingInfo = 302, + OpGetDefaultQueue = 303, + OpBuildNDRange = 304, + OpImageSparseSampleImplicitLod = 305, + OpImageSparseSampleExplicitLod = 306, + OpImageSparseSampleDrefImplicitLod = 307, + OpImageSparseSampleDrefExplicitLod = 308, + OpImageSparseSampleProjImplicitLod = 309, + OpImageSparseSampleProjExplicitLod = 310, + OpImageSparseSampleProjDrefImplicitLod = 311, + OpImageSparseSampleProjDrefExplicitLod = 312, + OpImageSparseFetch = 313, + OpImageSparseGather = 314, + OpImageSparseDrefGather = 315, + OpImageSparseTexelsResident = 316, + OpNoLine = 317, + OpAtomicFlagTestAndSet = 318, + OpAtomicFlagClear = 319, + OpImageSparseRead = 320, + OpDecorateId = 332, + OpSubgroupBallotKHR = 4421, + OpSubgroupFirstInvocationKHR = 4422, + OpSubgroupAllKHR = 4428, + OpSubgroupAnyKHR = 4429, + OpSubgroupAllEqualKHR = 4430, + OpSubgroupReadInvocationKHR = 4432, + OpGroupIAddNonUniformAMD = 5000, + OpGroupFAddNonUniformAMD = 5001, + OpGroupFMinNonUniformAMD = 5002, + OpGroupUMinNonUniformAMD = 5003, + OpGroupSMinNonUniformAMD = 5004, + OpGroupFMaxNonUniformAMD = 5005, + OpGroupUMaxNonUniformAMD = 5006, + OpGroupSMaxNonUniformAMD = 5007, + OpFragmentMaskFetchAMD = 5011, + OpFragmentFetchAMD = 5012, + OpSubgroupShuffleINTEL = 5571, + OpSubgroupShuffleDownINTEL = 5572, + OpSubgroupShuffleUpINTEL = 5573, + OpSubgroupShuffleXorINTEL = 5574, + OpSubgroupBlockReadINTEL = 5575, + OpSubgroupBlockWriteINTEL = 5576, + OpSubgroupImageBlockReadINTEL = 5577, + OpSubgroupImageBlockWriteINTEL = 5578, + OpDecorateStringGOOGLE = 5632, + OpMemberDecorateStringGOOGLE = 5633, + } + } +} + diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/1.1/spirv.cs b/externals/sirit/externals/SPIRV-Headers/include/spirv/1.1/spirv.cs new file mode 100755 index 000000000..99194e514 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/1.1/spirv.cs @@ -0,0 +1,1015 @@ +// Copyright (c) 2014-2018 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and/or associated documentation files (the "Materials"), +// to deal in the Materials without restriction, including without limitation +// the rights to use, copy, modify, merge, publish, distribute, sublicense, +// and/or sell copies of the Materials, and to permit persons to whom the +// Materials are furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +// IN THE MATERIALS. + +// This header is automatically generated by the same tool that creates +// the Binary Section of the SPIR-V specification. + +// Enumeration tokens for SPIR-V, in various styles: +// C, C++, C++11, JSON, Lua, Python, C# +// +// - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL +// - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL +// - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL +// - Lua will use tables, e.g.: spv.SourceLanguage.GLSL +// - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +// - C# will use enum classes in the Specification class located in the "Spv" namespace, e.g.: Spv.Specification.SourceLanguage.GLSL +// +// Some tokens act like mask values, which can be OR'd together, +// while others are mutually exclusive. The mask-like ones have +// "Mask" in their name, and a parallel enum that has the shift +// amount (1 << x) for each corresponding enumerant. + +namespace Spv +{ + + public static class Specification + { + public const uint MagicNumber = 0x07230203; + public const uint Version = 0x00010100; + public const uint Revision = 8; + public const uint OpCodeMask = 0xffff; + public const uint WordCountShift = 16; + + public enum SourceLanguage + { + Unknown = 0, + ESSL = 1, + GLSL = 2, + OpenCL_C = 3, + OpenCL_CPP = 4, + HLSL = 5, + } + + public enum ExecutionModel + { + Vertex = 0, + TessellationControl = 1, + TessellationEvaluation = 2, + Geometry = 3, + Fragment = 4, + GLCompute = 5, + Kernel = 6, + } + + public enum AddressingModel + { + Logical = 0, + Physical32 = 1, + Physical64 = 2, + } + + public enum MemoryModel + { + Simple = 0, + GLSL450 = 1, + OpenCL = 2, + } + + public enum ExecutionMode + { + Invocations = 0, + SpacingEqual = 1, + SpacingFractionalEven = 2, + SpacingFractionalOdd = 3, + VertexOrderCw = 4, + VertexOrderCcw = 5, + PixelCenterInteger = 6, + OriginUpperLeft = 7, + OriginLowerLeft = 8, + EarlyFragmentTests = 9, + PointMode = 10, + Xfb = 11, + DepthReplacing = 12, + DepthGreater = 14, + DepthLess = 15, + DepthUnchanged = 16, + LocalSize = 17, + LocalSizeHint = 18, + InputPoints = 19, + InputLines = 20, + InputLinesAdjacency = 21, + Triangles = 22, + InputTrianglesAdjacency = 23, + Quads = 24, + Isolines = 25, + OutputVertices = 26, + OutputPoints = 27, + OutputLineStrip = 28, + OutputTriangleStrip = 29, + VecTypeHint = 30, + ContractionOff = 31, + Initializer = 33, + Finalizer = 34, + SubgroupSize = 35, + SubgroupsPerWorkgroup = 36, + PostDepthCoverage = 4446, + StencilRefReplacingEXT = 5027, + } + + public enum StorageClass + { + UniformConstant = 0, + Input = 1, + Uniform = 2, + Output = 3, + Workgroup = 4, + CrossWorkgroup = 5, + Private = 6, + Function = 7, + Generic = 8, + PushConstant = 9, + AtomicCounter = 10, + Image = 11, + StorageBuffer = 12, + } + + public enum Dim + { + Dim1D = 0, + Dim2D = 1, + Dim3D = 2, + Cube = 3, + Rect = 4, + Buffer = 5, + SubpassData = 6, + } + + public enum SamplerAddressingMode + { + None = 0, + ClampToEdge = 1, + Clamp = 2, + Repeat = 3, + RepeatMirrored = 4, + } + + public enum SamplerFilterMode + { + Nearest = 0, + Linear = 1, + } + + public enum ImageFormat + { + Unknown = 0, + Rgba32f = 1, + Rgba16f = 2, + R32f = 3, + Rgba8 = 4, + Rgba8Snorm = 5, + Rg32f = 6, + Rg16f = 7, + R11fG11fB10f = 8, + R16f = 9, + Rgba16 = 10, + Rgb10A2 = 11, + Rg16 = 12, + Rg8 = 13, + R16 = 14, + R8 = 15, + Rgba16Snorm = 16, + Rg16Snorm = 17, + Rg8Snorm = 18, + R16Snorm = 19, + R8Snorm = 20, + Rgba32i = 21, + Rgba16i = 22, + Rgba8i = 23, + R32i = 24, + Rg32i = 25, + Rg16i = 26, + Rg8i = 27, + R16i = 28, + R8i = 29, + Rgba32ui = 30, + Rgba16ui = 31, + Rgba8ui = 32, + R32ui = 33, + Rgb10a2ui = 34, + Rg32ui = 35, + Rg16ui = 36, + Rg8ui = 37, + R16ui = 38, + R8ui = 39, + } + + public enum ImageChannelOrder + { + R = 0, + A = 1, + RG = 2, + RA = 3, + RGB = 4, + RGBA = 5, + BGRA = 6, + ARGB = 7, + Intensity = 8, + Luminance = 9, + Rx = 10, + RGx = 11, + RGBx = 12, + Depth = 13, + DepthStencil = 14, + sRGB = 15, + sRGBx = 16, + sRGBA = 17, + sBGRA = 18, + ABGR = 19, + } + + public enum ImageChannelDataType + { + SnormInt8 = 0, + SnormInt16 = 1, + UnormInt8 = 2, + UnormInt16 = 3, + UnormShort565 = 4, + UnormShort555 = 5, + UnormInt101010 = 6, + SignedInt8 = 7, + SignedInt16 = 8, + SignedInt32 = 9, + UnsignedInt8 = 10, + UnsignedInt16 = 11, + UnsignedInt32 = 12, + HalfFloat = 13, + Float = 14, + UnormInt24 = 15, + UnormInt101010_2 = 16, + } + + public enum ImageOperandsShift + { + Bias = 0, + Lod = 1, + Grad = 2, + ConstOffset = 3, + Offset = 4, + ConstOffsets = 5, + Sample = 6, + MinLod = 7, + } + + public enum ImageOperandsMask + { + MaskNone = 0, + Bias = 0x00000001, + Lod = 0x00000002, + Grad = 0x00000004, + ConstOffset = 0x00000008, + Offset = 0x00000010, + ConstOffsets = 0x00000020, + Sample = 0x00000040, + MinLod = 0x00000080, + } + + public enum FPFastMathModeShift + { + NotNaN = 0, + NotInf = 1, + NSZ = 2, + AllowRecip = 3, + Fast = 4, + } + + public enum FPFastMathModeMask + { + MaskNone = 0, + NotNaN = 0x00000001, + NotInf = 0x00000002, + NSZ = 0x00000004, + AllowRecip = 0x00000008, + Fast = 0x00000010, + } + + public enum FPRoundingMode + { + RTE = 0, + RTZ = 1, + RTP = 2, + RTN = 3, + } + + public enum LinkageType + { + Export = 0, + Import = 1, + } + + public enum AccessQualifier + { + ReadOnly = 0, + WriteOnly = 1, + ReadWrite = 2, + } + + public enum FunctionParameterAttribute + { + Zext = 0, + Sext = 1, + ByVal = 2, + Sret = 3, + NoAlias = 4, + NoCapture = 5, + NoWrite = 6, + NoReadWrite = 7, + } + + public enum Decoration + { + RelaxedPrecision = 0, + SpecId = 1, + Block = 2, + BufferBlock = 3, + RowMajor = 4, + ColMajor = 5, + ArrayStride = 6, + MatrixStride = 7, + GLSLShared = 8, + GLSLPacked = 9, + CPacked = 10, + BuiltIn = 11, + NoPerspective = 13, + Flat = 14, + Patch = 15, + Centroid = 16, + Sample = 17, + Invariant = 18, + Restrict = 19, + Aliased = 20, + Volatile = 21, + Constant = 22, + Coherent = 23, + NonWritable = 24, + NonReadable = 25, + Uniform = 26, + SaturatedConversion = 28, + Stream = 29, + Location = 30, + Component = 31, + Index = 32, + Binding = 33, + DescriptorSet = 34, + Offset = 35, + XfbBuffer = 36, + XfbStride = 37, + FuncParamAttr = 38, + FPRoundingMode = 39, + FPFastMathMode = 40, + LinkageAttributes = 41, + NoContraction = 42, + InputAttachmentIndex = 43, + Alignment = 44, + MaxByteOffset = 45, + ExplicitInterpAMD = 4999, + OverrideCoverageNV = 5248, + PassthroughNV = 5250, + ViewportRelativeNV = 5252, + SecondaryViewportRelativeNV = 5256, + HlslCounterBufferGOOGLE = 5634, + HlslSemanticGOOGLE = 5635, + } + + public enum BuiltIn + { + Position = 0, + PointSize = 1, + ClipDistance = 3, + CullDistance = 4, + VertexId = 5, + InstanceId = 6, + PrimitiveId = 7, + InvocationId = 8, + Layer = 9, + ViewportIndex = 10, + TessLevelOuter = 11, + TessLevelInner = 12, + TessCoord = 13, + PatchVertices = 14, + FragCoord = 15, + PointCoord = 16, + FrontFacing = 17, + SampleId = 18, + SamplePosition = 19, + SampleMask = 20, + FragDepth = 22, + HelperInvocation = 23, + NumWorkgroups = 24, + WorkgroupSize = 25, + WorkgroupId = 26, + LocalInvocationId = 27, + GlobalInvocationId = 28, + LocalInvocationIndex = 29, + WorkDim = 30, + GlobalSize = 31, + EnqueuedWorkgroupSize = 32, + GlobalOffset = 33, + GlobalLinearId = 34, + SubgroupSize = 36, + SubgroupMaxSize = 37, + NumSubgroups = 38, + NumEnqueuedSubgroups = 39, + SubgroupId = 40, + SubgroupLocalInvocationId = 41, + VertexIndex = 42, + InstanceIndex = 43, + SubgroupEqMaskKHR = 4416, + SubgroupGeMaskKHR = 4417, + SubgroupGtMaskKHR = 4418, + SubgroupLeMaskKHR = 4419, + SubgroupLtMaskKHR = 4420, + BaseVertex = 4424, + BaseInstance = 4425, + DrawIndex = 4426, + DeviceIndex = 4438, + ViewIndex = 4440, + BaryCoordNoPerspAMD = 4992, + BaryCoordNoPerspCentroidAMD = 4993, + BaryCoordNoPerspSampleAMD = 4994, + BaryCoordSmoothAMD = 4995, + BaryCoordSmoothCentroidAMD = 4996, + BaryCoordSmoothSampleAMD = 4997, + BaryCoordPullModelAMD = 4998, + FragStencilRefEXT = 5014, + ViewportMaskNV = 5253, + SecondaryPositionNV = 5257, + SecondaryViewportMaskNV = 5258, + PositionPerViewNV = 5261, + ViewportMaskPerViewNV = 5262, + } + + public enum SelectionControlShift + { + Flatten = 0, + DontFlatten = 1, + } + + public enum SelectionControlMask + { + MaskNone = 0, + Flatten = 0x00000001, + DontFlatten = 0x00000002, + } + + public enum LoopControlShift + { + Unroll = 0, + DontUnroll = 1, + DependencyInfinite = 2, + DependencyLength = 3, + } + + public enum LoopControlMask + { + MaskNone = 0, + Unroll = 0x00000001, + DontUnroll = 0x00000002, + DependencyInfinite = 0x00000004, + DependencyLength = 0x00000008, + } + + public enum FunctionControlShift + { + Inline = 0, + DontInline = 1, + Pure = 2, + Const = 3, + } + + public enum FunctionControlMask + { + MaskNone = 0, + Inline = 0x00000001, + DontInline = 0x00000002, + Pure = 0x00000004, + Const = 0x00000008, + } + + public enum MemorySemanticsShift + { + Acquire = 1, + Release = 2, + AcquireRelease = 3, + SequentiallyConsistent = 4, + UniformMemory = 6, + SubgroupMemory = 7, + WorkgroupMemory = 8, + CrossWorkgroupMemory = 9, + AtomicCounterMemory = 10, + ImageMemory = 11, + } + + public enum MemorySemanticsMask + { + MaskNone = 0, + Acquire = 0x00000002, + Release = 0x00000004, + AcquireRelease = 0x00000008, + SequentiallyConsistent = 0x00000010, + UniformMemory = 0x00000040, + SubgroupMemory = 0x00000080, + WorkgroupMemory = 0x00000100, + CrossWorkgroupMemory = 0x00000200, + AtomicCounterMemory = 0x00000400, + ImageMemory = 0x00000800, + } + + public enum MemoryAccessShift + { + Volatile = 0, + Aligned = 1, + Nontemporal = 2, + } + + public enum MemoryAccessMask + { + MaskNone = 0, + Volatile = 0x00000001, + Aligned = 0x00000002, + Nontemporal = 0x00000004, + } + + public enum Scope + { + CrossDevice = 0, + Device = 1, + Workgroup = 2, + Subgroup = 3, + Invocation = 4, + } + + public enum GroupOperation + { + Reduce = 0, + InclusiveScan = 1, + ExclusiveScan = 2, + } + + public enum KernelEnqueueFlags + { + NoWait = 0, + WaitKernel = 1, + WaitWorkGroup = 2, + } + + public enum KernelProfilingInfoShift + { + CmdExecTime = 0, + } + + public enum KernelProfilingInfoMask + { + MaskNone = 0, + CmdExecTime = 0x00000001, + } + + public enum Capability + { + Matrix = 0, + Shader = 1, + Geometry = 2, + Tessellation = 3, + Addresses = 4, + Linkage = 5, + Kernel = 6, + Vector16 = 7, + Float16Buffer = 8, + Float16 = 9, + Float64 = 10, + Int64 = 11, + Int64Atomics = 12, + ImageBasic = 13, + ImageReadWrite = 14, + ImageMipmap = 15, + Pipes = 17, + Groups = 18, + DeviceEnqueue = 19, + LiteralSampler = 20, + AtomicStorage = 21, + Int16 = 22, + TessellationPointSize = 23, + GeometryPointSize = 24, + ImageGatherExtended = 25, + StorageImageMultisample = 27, + UniformBufferArrayDynamicIndexing = 28, + SampledImageArrayDynamicIndexing = 29, + StorageBufferArrayDynamicIndexing = 30, + StorageImageArrayDynamicIndexing = 31, + ClipDistance = 32, + CullDistance = 33, + ImageCubeArray = 34, + SampleRateShading = 35, + ImageRect = 36, + SampledRect = 37, + GenericPointer = 38, + Int8 = 39, + InputAttachment = 40, + SparseResidency = 41, + MinLod = 42, + Sampled1D = 43, + Image1D = 44, + SampledCubeArray = 45, + SampledBuffer = 46, + ImageBuffer = 47, + ImageMSArray = 48, + StorageImageExtendedFormats = 49, + ImageQuery = 50, + DerivativeControl = 51, + InterpolationFunction = 52, + TransformFeedback = 53, + GeometryStreams = 54, + StorageImageReadWithoutFormat = 55, + StorageImageWriteWithoutFormat = 56, + MultiViewport = 57, + SubgroupDispatch = 58, + NamedBarrier = 59, + PipeStorage = 60, + SubgroupBallotKHR = 4423, + DrawParameters = 4427, + SubgroupVoteKHR = 4431, + StorageBuffer16BitAccess = 4433, + StorageUniformBufferBlock16 = 4433, + StorageUniform16 = 4434, + UniformAndStorageBuffer16BitAccess = 4434, + StoragePushConstant16 = 4435, + StorageInputOutput16 = 4436, + DeviceGroup = 4437, + MultiView = 4439, + VariablePointersStorageBuffer = 4441, + VariablePointers = 4442, + AtomicStorageOps = 4445, + SampleMaskPostDepthCoverage = 4447, + ImageGatherBiasLodAMD = 5009, + FragmentMaskAMD = 5010, + StencilExportEXT = 5013, + ImageReadWriteLodAMD = 5015, + SampleMaskOverrideCoverageNV = 5249, + GeometryShaderPassthroughNV = 5251, + ShaderViewportIndexLayerEXT = 5254, + ShaderViewportIndexLayerNV = 5254, + ShaderViewportMaskNV = 5255, + ShaderStereoViewNV = 5259, + PerViewAttributesNV = 5260, + SubgroupShuffleINTEL = 5568, + SubgroupBufferBlockIOINTEL = 5569, + SubgroupImageBlockIOINTEL = 5570, + } + + public enum Op + { + OpNop = 0, + OpUndef = 1, + OpSourceContinued = 2, + OpSource = 3, + OpSourceExtension = 4, + OpName = 5, + OpMemberName = 6, + OpString = 7, + OpLine = 8, + OpExtension = 10, + OpExtInstImport = 11, + OpExtInst = 12, + OpMemoryModel = 14, + OpEntryPoint = 15, + OpExecutionMode = 16, + OpCapability = 17, + OpTypeVoid = 19, + OpTypeBool = 20, + OpTypeInt = 21, + OpTypeFloat = 22, + OpTypeVector = 23, + OpTypeMatrix = 24, + OpTypeImage = 25, + OpTypeSampler = 26, + OpTypeSampledImage = 27, + OpTypeArray = 28, + OpTypeRuntimeArray = 29, + OpTypeStruct = 30, + OpTypeOpaque = 31, + OpTypePointer = 32, + OpTypeFunction = 33, + OpTypeEvent = 34, + OpTypeDeviceEvent = 35, + OpTypeReserveId = 36, + OpTypeQueue = 37, + OpTypePipe = 38, + OpTypeForwardPointer = 39, + OpConstantTrue = 41, + OpConstantFalse = 42, + OpConstant = 43, + OpConstantComposite = 44, + OpConstantSampler = 45, + OpConstantNull = 46, + OpSpecConstantTrue = 48, + OpSpecConstantFalse = 49, + OpSpecConstant = 50, + OpSpecConstantComposite = 51, + OpSpecConstantOp = 52, + OpFunction = 54, + OpFunctionParameter = 55, + OpFunctionEnd = 56, + OpFunctionCall = 57, + OpVariable = 59, + OpImageTexelPointer = 60, + OpLoad = 61, + OpStore = 62, + OpCopyMemory = 63, + OpCopyMemorySized = 64, + OpAccessChain = 65, + OpInBoundsAccessChain = 66, + OpPtrAccessChain = 67, + OpArrayLength = 68, + OpGenericPtrMemSemantics = 69, + OpInBoundsPtrAccessChain = 70, + OpDecorate = 71, + OpMemberDecorate = 72, + OpDecorationGroup = 73, + OpGroupDecorate = 74, + OpGroupMemberDecorate = 75, + OpVectorExtractDynamic = 77, + OpVectorInsertDynamic = 78, + OpVectorShuffle = 79, + OpCompositeConstruct = 80, + OpCompositeExtract = 81, + OpCompositeInsert = 82, + OpCopyObject = 83, + OpTranspose = 84, + OpSampledImage = 86, + OpImageSampleImplicitLod = 87, + OpImageSampleExplicitLod = 88, + OpImageSampleDrefImplicitLod = 89, + OpImageSampleDrefExplicitLod = 90, + OpImageSampleProjImplicitLod = 91, + OpImageSampleProjExplicitLod = 92, + OpImageSampleProjDrefImplicitLod = 93, + OpImageSampleProjDrefExplicitLod = 94, + OpImageFetch = 95, + OpImageGather = 96, + OpImageDrefGather = 97, + OpImageRead = 98, + OpImageWrite = 99, + OpImage = 100, + OpImageQueryFormat = 101, + OpImageQueryOrder = 102, + OpImageQuerySizeLod = 103, + OpImageQuerySize = 104, + OpImageQueryLod = 105, + OpImageQueryLevels = 106, + OpImageQuerySamples = 107, + OpConvertFToU = 109, + OpConvertFToS = 110, + OpConvertSToF = 111, + OpConvertUToF = 112, + OpUConvert = 113, + OpSConvert = 114, + OpFConvert = 115, + OpQuantizeToF16 = 116, + OpConvertPtrToU = 117, + OpSatConvertSToU = 118, + OpSatConvertUToS = 119, + OpConvertUToPtr = 120, + OpPtrCastToGeneric = 121, + OpGenericCastToPtr = 122, + OpGenericCastToPtrExplicit = 123, + OpBitcast = 124, + OpSNegate = 126, + OpFNegate = 127, + OpIAdd = 128, + OpFAdd = 129, + OpISub = 130, + OpFSub = 131, + OpIMul = 132, + OpFMul = 133, + OpUDiv = 134, + OpSDiv = 135, + OpFDiv = 136, + OpUMod = 137, + OpSRem = 138, + OpSMod = 139, + OpFRem = 140, + OpFMod = 141, + OpVectorTimesScalar = 142, + OpMatrixTimesScalar = 143, + OpVectorTimesMatrix = 144, + OpMatrixTimesVector = 145, + OpMatrixTimesMatrix = 146, + OpOuterProduct = 147, + OpDot = 148, + OpIAddCarry = 149, + OpISubBorrow = 150, + OpUMulExtended = 151, + OpSMulExtended = 152, + OpAny = 154, + OpAll = 155, + OpIsNan = 156, + OpIsInf = 157, + OpIsFinite = 158, + OpIsNormal = 159, + OpSignBitSet = 160, + OpLessOrGreater = 161, + OpOrdered = 162, + OpUnordered = 163, + OpLogicalEqual = 164, + OpLogicalNotEqual = 165, + OpLogicalOr = 166, + OpLogicalAnd = 167, + OpLogicalNot = 168, + OpSelect = 169, + OpIEqual = 170, + OpINotEqual = 171, + OpUGreaterThan = 172, + OpSGreaterThan = 173, + OpUGreaterThanEqual = 174, + OpSGreaterThanEqual = 175, + OpULessThan = 176, + OpSLessThan = 177, + OpULessThanEqual = 178, + OpSLessThanEqual = 179, + OpFOrdEqual = 180, + OpFUnordEqual = 181, + OpFOrdNotEqual = 182, + OpFUnordNotEqual = 183, + OpFOrdLessThan = 184, + OpFUnordLessThan = 185, + OpFOrdGreaterThan = 186, + OpFUnordGreaterThan = 187, + OpFOrdLessThanEqual = 188, + OpFUnordLessThanEqual = 189, + OpFOrdGreaterThanEqual = 190, + OpFUnordGreaterThanEqual = 191, + OpShiftRightLogical = 194, + OpShiftRightArithmetic = 195, + OpShiftLeftLogical = 196, + OpBitwiseOr = 197, + OpBitwiseXor = 198, + OpBitwiseAnd = 199, + OpNot = 200, + OpBitFieldInsert = 201, + OpBitFieldSExtract = 202, + OpBitFieldUExtract = 203, + OpBitReverse = 204, + OpBitCount = 205, + OpDPdx = 207, + OpDPdy = 208, + OpFwidth = 209, + OpDPdxFine = 210, + OpDPdyFine = 211, + OpFwidthFine = 212, + OpDPdxCoarse = 213, + OpDPdyCoarse = 214, + OpFwidthCoarse = 215, + OpEmitVertex = 218, + OpEndPrimitive = 219, + OpEmitStreamVertex = 220, + OpEndStreamPrimitive = 221, + OpControlBarrier = 224, + OpMemoryBarrier = 225, + OpAtomicLoad = 227, + OpAtomicStore = 228, + OpAtomicExchange = 229, + OpAtomicCompareExchange = 230, + OpAtomicCompareExchangeWeak = 231, + OpAtomicIIncrement = 232, + OpAtomicIDecrement = 233, + OpAtomicIAdd = 234, + OpAtomicISub = 235, + OpAtomicSMin = 236, + OpAtomicUMin = 237, + OpAtomicSMax = 238, + OpAtomicUMax = 239, + OpAtomicAnd = 240, + OpAtomicOr = 241, + OpAtomicXor = 242, + OpPhi = 245, + OpLoopMerge = 246, + OpSelectionMerge = 247, + OpLabel = 248, + OpBranch = 249, + OpBranchConditional = 250, + OpSwitch = 251, + OpKill = 252, + OpReturn = 253, + OpReturnValue = 254, + OpUnreachable = 255, + OpLifetimeStart = 256, + OpLifetimeStop = 257, + OpGroupAsyncCopy = 259, + OpGroupWaitEvents = 260, + OpGroupAll = 261, + OpGroupAny = 262, + OpGroupBroadcast = 263, + OpGroupIAdd = 264, + OpGroupFAdd = 265, + OpGroupFMin = 266, + OpGroupUMin = 267, + OpGroupSMin = 268, + OpGroupFMax = 269, + OpGroupUMax = 270, + OpGroupSMax = 271, + OpReadPipe = 274, + OpWritePipe = 275, + OpReservedReadPipe = 276, + OpReservedWritePipe = 277, + OpReserveReadPipePackets = 278, + OpReserveWritePipePackets = 279, + OpCommitReadPipe = 280, + OpCommitWritePipe = 281, + OpIsValidReserveId = 282, + OpGetNumPipePackets = 283, + OpGetMaxPipePackets = 284, + OpGroupReserveReadPipePackets = 285, + OpGroupReserveWritePipePackets = 286, + OpGroupCommitReadPipe = 287, + OpGroupCommitWritePipe = 288, + OpEnqueueMarker = 291, + OpEnqueueKernel = 292, + OpGetKernelNDrangeSubGroupCount = 293, + OpGetKernelNDrangeMaxSubGroupSize = 294, + OpGetKernelWorkGroupSize = 295, + OpGetKernelPreferredWorkGroupSizeMultiple = 296, + OpRetainEvent = 297, + OpReleaseEvent = 298, + OpCreateUserEvent = 299, + OpIsValidEvent = 300, + OpSetUserEventStatus = 301, + OpCaptureEventProfilingInfo = 302, + OpGetDefaultQueue = 303, + OpBuildNDRange = 304, + OpImageSparseSampleImplicitLod = 305, + OpImageSparseSampleExplicitLod = 306, + OpImageSparseSampleDrefImplicitLod = 307, + OpImageSparseSampleDrefExplicitLod = 308, + OpImageSparseSampleProjImplicitLod = 309, + OpImageSparseSampleProjExplicitLod = 310, + OpImageSparseSampleProjDrefImplicitLod = 311, + OpImageSparseSampleProjDrefExplicitLod = 312, + OpImageSparseFetch = 313, + OpImageSparseGather = 314, + OpImageSparseDrefGather = 315, + OpImageSparseTexelsResident = 316, + OpNoLine = 317, + OpAtomicFlagTestAndSet = 318, + OpAtomicFlagClear = 319, + OpImageSparseRead = 320, + OpSizeOf = 321, + OpTypePipeStorage = 322, + OpConstantPipeStorage = 323, + OpCreatePipeFromPipeStorage = 324, + OpGetKernelLocalSizeForSubgroupCount = 325, + OpGetKernelMaxNumSubgroups = 326, + OpTypeNamedBarrier = 327, + OpNamedBarrierInitialize = 328, + OpMemoryNamedBarrier = 329, + OpModuleProcessed = 330, + OpDecorateId = 332, + OpSubgroupBallotKHR = 4421, + OpSubgroupFirstInvocationKHR = 4422, + OpSubgroupAllKHR = 4428, + OpSubgroupAnyKHR = 4429, + OpSubgroupAllEqualKHR = 4430, + OpSubgroupReadInvocationKHR = 4432, + OpGroupIAddNonUniformAMD = 5000, + OpGroupFAddNonUniformAMD = 5001, + OpGroupFMinNonUniformAMD = 5002, + OpGroupUMinNonUniformAMD = 5003, + OpGroupSMinNonUniformAMD = 5004, + OpGroupFMaxNonUniformAMD = 5005, + OpGroupUMaxNonUniformAMD = 5006, + OpGroupSMaxNonUniformAMD = 5007, + OpFragmentMaskFetchAMD = 5011, + OpFragmentFetchAMD = 5012, + OpSubgroupShuffleINTEL = 5571, + OpSubgroupShuffleDownINTEL = 5572, + OpSubgroupShuffleUpINTEL = 5573, + OpSubgroupShuffleXorINTEL = 5574, + OpSubgroupBlockReadINTEL = 5575, + OpSubgroupBlockWriteINTEL = 5576, + OpSubgroupImageBlockReadINTEL = 5577, + OpSubgroupImageBlockWriteINTEL = 5578, + OpDecorateStringGOOGLE = 5632, + OpMemberDecorateStringGOOGLE = 5633, + } + } +} + diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/1.2/spirv.cs b/externals/sirit/externals/SPIRV-Headers/include/spirv/1.2/spirv.cs new file mode 100755 index 000000000..493303d6a --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/1.2/spirv.cs @@ -0,0 +1,1021 @@ +// Copyright (c) 2014-2018 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and/or associated documentation files (the "Materials"), +// to deal in the Materials without restriction, including without limitation +// the rights to use, copy, modify, merge, publish, distribute, sublicense, +// and/or sell copies of the Materials, and to permit persons to whom the +// Materials are furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +// IN THE MATERIALS. + +// This header is automatically generated by the same tool that creates +// the Binary Section of the SPIR-V specification. + +// Enumeration tokens for SPIR-V, in various styles: +// C, C++, C++11, JSON, Lua, Python, C# +// +// - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL +// - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL +// - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL +// - Lua will use tables, e.g.: spv.SourceLanguage.GLSL +// - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +// - C# will use enum classes in the Specification class located in the "Spv" namespace, e.g.: Spv.Specification.SourceLanguage.GLSL +// +// Some tokens act like mask values, which can be OR'd together, +// while others are mutually exclusive. The mask-like ones have +// "Mask" in their name, and a parallel enum that has the shift +// amount (1 << x) for each corresponding enumerant. + +namespace Spv +{ + + public static class Specification + { + public const uint MagicNumber = 0x07230203; + public const uint Version = 0x00010200; + public const uint Revision = 2; + public const uint OpCodeMask = 0xffff; + public const uint WordCountShift = 16; + + public enum SourceLanguage + { + Unknown = 0, + ESSL = 1, + GLSL = 2, + OpenCL_C = 3, + OpenCL_CPP = 4, + HLSL = 5, + } + + public enum ExecutionModel + { + Vertex = 0, + TessellationControl = 1, + TessellationEvaluation = 2, + Geometry = 3, + Fragment = 4, + GLCompute = 5, + Kernel = 6, + } + + public enum AddressingModel + { + Logical = 0, + Physical32 = 1, + Physical64 = 2, + } + + public enum MemoryModel + { + Simple = 0, + GLSL450 = 1, + OpenCL = 2, + } + + public enum ExecutionMode + { + Invocations = 0, + SpacingEqual = 1, + SpacingFractionalEven = 2, + SpacingFractionalOdd = 3, + VertexOrderCw = 4, + VertexOrderCcw = 5, + PixelCenterInteger = 6, + OriginUpperLeft = 7, + OriginLowerLeft = 8, + EarlyFragmentTests = 9, + PointMode = 10, + Xfb = 11, + DepthReplacing = 12, + DepthGreater = 14, + DepthLess = 15, + DepthUnchanged = 16, + LocalSize = 17, + LocalSizeHint = 18, + InputPoints = 19, + InputLines = 20, + InputLinesAdjacency = 21, + Triangles = 22, + InputTrianglesAdjacency = 23, + Quads = 24, + Isolines = 25, + OutputVertices = 26, + OutputPoints = 27, + OutputLineStrip = 28, + OutputTriangleStrip = 29, + VecTypeHint = 30, + ContractionOff = 31, + Initializer = 33, + Finalizer = 34, + SubgroupSize = 35, + SubgroupsPerWorkgroup = 36, + SubgroupsPerWorkgroupId = 37, + LocalSizeId = 38, + LocalSizeHintId = 39, + PostDepthCoverage = 4446, + StencilRefReplacingEXT = 5027, + } + + public enum StorageClass + { + UniformConstant = 0, + Input = 1, + Uniform = 2, + Output = 3, + Workgroup = 4, + CrossWorkgroup = 5, + Private = 6, + Function = 7, + Generic = 8, + PushConstant = 9, + AtomicCounter = 10, + Image = 11, + StorageBuffer = 12, + } + + public enum Dim + { + Dim1D = 0, + Dim2D = 1, + Dim3D = 2, + Cube = 3, + Rect = 4, + Buffer = 5, + SubpassData = 6, + } + + public enum SamplerAddressingMode + { + None = 0, + ClampToEdge = 1, + Clamp = 2, + Repeat = 3, + RepeatMirrored = 4, + } + + public enum SamplerFilterMode + { + Nearest = 0, + Linear = 1, + } + + public enum ImageFormat + { + Unknown = 0, + Rgba32f = 1, + Rgba16f = 2, + R32f = 3, + Rgba8 = 4, + Rgba8Snorm = 5, + Rg32f = 6, + Rg16f = 7, + R11fG11fB10f = 8, + R16f = 9, + Rgba16 = 10, + Rgb10A2 = 11, + Rg16 = 12, + Rg8 = 13, + R16 = 14, + R8 = 15, + Rgba16Snorm = 16, + Rg16Snorm = 17, + Rg8Snorm = 18, + R16Snorm = 19, + R8Snorm = 20, + Rgba32i = 21, + Rgba16i = 22, + Rgba8i = 23, + R32i = 24, + Rg32i = 25, + Rg16i = 26, + Rg8i = 27, + R16i = 28, + R8i = 29, + Rgba32ui = 30, + Rgba16ui = 31, + Rgba8ui = 32, + R32ui = 33, + Rgb10a2ui = 34, + Rg32ui = 35, + Rg16ui = 36, + Rg8ui = 37, + R16ui = 38, + R8ui = 39, + } + + public enum ImageChannelOrder + { + R = 0, + A = 1, + RG = 2, + RA = 3, + RGB = 4, + RGBA = 5, + BGRA = 6, + ARGB = 7, + Intensity = 8, + Luminance = 9, + Rx = 10, + RGx = 11, + RGBx = 12, + Depth = 13, + DepthStencil = 14, + sRGB = 15, + sRGBx = 16, + sRGBA = 17, + sBGRA = 18, + ABGR = 19, + } + + public enum ImageChannelDataType + { + SnormInt8 = 0, + SnormInt16 = 1, + UnormInt8 = 2, + UnormInt16 = 3, + UnormShort565 = 4, + UnormShort555 = 5, + UnormInt101010 = 6, + SignedInt8 = 7, + SignedInt16 = 8, + SignedInt32 = 9, + UnsignedInt8 = 10, + UnsignedInt16 = 11, + UnsignedInt32 = 12, + HalfFloat = 13, + Float = 14, + UnormInt24 = 15, + UnormInt101010_2 = 16, + } + + public enum ImageOperandsShift + { + Bias = 0, + Lod = 1, + Grad = 2, + ConstOffset = 3, + Offset = 4, + ConstOffsets = 5, + Sample = 6, + MinLod = 7, + } + + public enum ImageOperandsMask + { + MaskNone = 0, + Bias = 0x00000001, + Lod = 0x00000002, + Grad = 0x00000004, + ConstOffset = 0x00000008, + Offset = 0x00000010, + ConstOffsets = 0x00000020, + Sample = 0x00000040, + MinLod = 0x00000080, + } + + public enum FPFastMathModeShift + { + NotNaN = 0, + NotInf = 1, + NSZ = 2, + AllowRecip = 3, + Fast = 4, + } + + public enum FPFastMathModeMask + { + MaskNone = 0, + NotNaN = 0x00000001, + NotInf = 0x00000002, + NSZ = 0x00000004, + AllowRecip = 0x00000008, + Fast = 0x00000010, + } + + public enum FPRoundingMode + { + RTE = 0, + RTZ = 1, + RTP = 2, + RTN = 3, + } + + public enum LinkageType + { + Export = 0, + Import = 1, + } + + public enum AccessQualifier + { + ReadOnly = 0, + WriteOnly = 1, + ReadWrite = 2, + } + + public enum FunctionParameterAttribute + { + Zext = 0, + Sext = 1, + ByVal = 2, + Sret = 3, + NoAlias = 4, + NoCapture = 5, + NoWrite = 6, + NoReadWrite = 7, + } + + public enum Decoration + { + RelaxedPrecision = 0, + SpecId = 1, + Block = 2, + BufferBlock = 3, + RowMajor = 4, + ColMajor = 5, + ArrayStride = 6, + MatrixStride = 7, + GLSLShared = 8, + GLSLPacked = 9, + CPacked = 10, + BuiltIn = 11, + NoPerspective = 13, + Flat = 14, + Patch = 15, + Centroid = 16, + Sample = 17, + Invariant = 18, + Restrict = 19, + Aliased = 20, + Volatile = 21, + Constant = 22, + Coherent = 23, + NonWritable = 24, + NonReadable = 25, + Uniform = 26, + SaturatedConversion = 28, + Stream = 29, + Location = 30, + Component = 31, + Index = 32, + Binding = 33, + DescriptorSet = 34, + Offset = 35, + XfbBuffer = 36, + XfbStride = 37, + FuncParamAttr = 38, + FPRoundingMode = 39, + FPFastMathMode = 40, + LinkageAttributes = 41, + NoContraction = 42, + InputAttachmentIndex = 43, + Alignment = 44, + MaxByteOffset = 45, + AlignmentId = 46, + MaxByteOffsetId = 47, + ExplicitInterpAMD = 4999, + OverrideCoverageNV = 5248, + PassthroughNV = 5250, + ViewportRelativeNV = 5252, + SecondaryViewportRelativeNV = 5256, + HlslCounterBufferGOOGLE = 5634, + HlslSemanticGOOGLE = 5635, + } + + public enum BuiltIn + { + Position = 0, + PointSize = 1, + ClipDistance = 3, + CullDistance = 4, + VertexId = 5, + InstanceId = 6, + PrimitiveId = 7, + InvocationId = 8, + Layer = 9, + ViewportIndex = 10, + TessLevelOuter = 11, + TessLevelInner = 12, + TessCoord = 13, + PatchVertices = 14, + FragCoord = 15, + PointCoord = 16, + FrontFacing = 17, + SampleId = 18, + SamplePosition = 19, + SampleMask = 20, + FragDepth = 22, + HelperInvocation = 23, + NumWorkgroups = 24, + WorkgroupSize = 25, + WorkgroupId = 26, + LocalInvocationId = 27, + GlobalInvocationId = 28, + LocalInvocationIndex = 29, + WorkDim = 30, + GlobalSize = 31, + EnqueuedWorkgroupSize = 32, + GlobalOffset = 33, + GlobalLinearId = 34, + SubgroupSize = 36, + SubgroupMaxSize = 37, + NumSubgroups = 38, + NumEnqueuedSubgroups = 39, + SubgroupId = 40, + SubgroupLocalInvocationId = 41, + VertexIndex = 42, + InstanceIndex = 43, + SubgroupEqMaskKHR = 4416, + SubgroupGeMaskKHR = 4417, + SubgroupGtMaskKHR = 4418, + SubgroupLeMaskKHR = 4419, + SubgroupLtMaskKHR = 4420, + BaseVertex = 4424, + BaseInstance = 4425, + DrawIndex = 4426, + DeviceIndex = 4438, + ViewIndex = 4440, + BaryCoordNoPerspAMD = 4992, + BaryCoordNoPerspCentroidAMD = 4993, + BaryCoordNoPerspSampleAMD = 4994, + BaryCoordSmoothAMD = 4995, + BaryCoordSmoothCentroidAMD = 4996, + BaryCoordSmoothSampleAMD = 4997, + BaryCoordPullModelAMD = 4998, + FragStencilRefEXT = 5014, + ViewportMaskNV = 5253, + SecondaryPositionNV = 5257, + SecondaryViewportMaskNV = 5258, + PositionPerViewNV = 5261, + ViewportMaskPerViewNV = 5262, + } + + public enum SelectionControlShift + { + Flatten = 0, + DontFlatten = 1, + } + + public enum SelectionControlMask + { + MaskNone = 0, + Flatten = 0x00000001, + DontFlatten = 0x00000002, + } + + public enum LoopControlShift + { + Unroll = 0, + DontUnroll = 1, + DependencyInfinite = 2, + DependencyLength = 3, + } + + public enum LoopControlMask + { + MaskNone = 0, + Unroll = 0x00000001, + DontUnroll = 0x00000002, + DependencyInfinite = 0x00000004, + DependencyLength = 0x00000008, + } + + public enum FunctionControlShift + { + Inline = 0, + DontInline = 1, + Pure = 2, + Const = 3, + } + + public enum FunctionControlMask + { + MaskNone = 0, + Inline = 0x00000001, + DontInline = 0x00000002, + Pure = 0x00000004, + Const = 0x00000008, + } + + public enum MemorySemanticsShift + { + Acquire = 1, + Release = 2, + AcquireRelease = 3, + SequentiallyConsistent = 4, + UniformMemory = 6, + SubgroupMemory = 7, + WorkgroupMemory = 8, + CrossWorkgroupMemory = 9, + AtomicCounterMemory = 10, + ImageMemory = 11, + } + + public enum MemorySemanticsMask + { + MaskNone = 0, + Acquire = 0x00000002, + Release = 0x00000004, + AcquireRelease = 0x00000008, + SequentiallyConsistent = 0x00000010, + UniformMemory = 0x00000040, + SubgroupMemory = 0x00000080, + WorkgroupMemory = 0x00000100, + CrossWorkgroupMemory = 0x00000200, + AtomicCounterMemory = 0x00000400, + ImageMemory = 0x00000800, + } + + public enum MemoryAccessShift + { + Volatile = 0, + Aligned = 1, + Nontemporal = 2, + } + + public enum MemoryAccessMask + { + MaskNone = 0, + Volatile = 0x00000001, + Aligned = 0x00000002, + Nontemporal = 0x00000004, + } + + public enum Scope + { + CrossDevice = 0, + Device = 1, + Workgroup = 2, + Subgroup = 3, + Invocation = 4, + } + + public enum GroupOperation + { + Reduce = 0, + InclusiveScan = 1, + ExclusiveScan = 2, + } + + public enum KernelEnqueueFlags + { + NoWait = 0, + WaitKernel = 1, + WaitWorkGroup = 2, + } + + public enum KernelProfilingInfoShift + { + CmdExecTime = 0, + } + + public enum KernelProfilingInfoMask + { + MaskNone = 0, + CmdExecTime = 0x00000001, + } + + public enum Capability + { + Matrix = 0, + Shader = 1, + Geometry = 2, + Tessellation = 3, + Addresses = 4, + Linkage = 5, + Kernel = 6, + Vector16 = 7, + Float16Buffer = 8, + Float16 = 9, + Float64 = 10, + Int64 = 11, + Int64Atomics = 12, + ImageBasic = 13, + ImageReadWrite = 14, + ImageMipmap = 15, + Pipes = 17, + Groups = 18, + DeviceEnqueue = 19, + LiteralSampler = 20, + AtomicStorage = 21, + Int16 = 22, + TessellationPointSize = 23, + GeometryPointSize = 24, + ImageGatherExtended = 25, + StorageImageMultisample = 27, + UniformBufferArrayDynamicIndexing = 28, + SampledImageArrayDynamicIndexing = 29, + StorageBufferArrayDynamicIndexing = 30, + StorageImageArrayDynamicIndexing = 31, + ClipDistance = 32, + CullDistance = 33, + ImageCubeArray = 34, + SampleRateShading = 35, + ImageRect = 36, + SampledRect = 37, + GenericPointer = 38, + Int8 = 39, + InputAttachment = 40, + SparseResidency = 41, + MinLod = 42, + Sampled1D = 43, + Image1D = 44, + SampledCubeArray = 45, + SampledBuffer = 46, + ImageBuffer = 47, + ImageMSArray = 48, + StorageImageExtendedFormats = 49, + ImageQuery = 50, + DerivativeControl = 51, + InterpolationFunction = 52, + TransformFeedback = 53, + GeometryStreams = 54, + StorageImageReadWithoutFormat = 55, + StorageImageWriteWithoutFormat = 56, + MultiViewport = 57, + SubgroupDispatch = 58, + NamedBarrier = 59, + PipeStorage = 60, + SubgroupBallotKHR = 4423, + DrawParameters = 4427, + SubgroupVoteKHR = 4431, + StorageBuffer16BitAccess = 4433, + StorageUniformBufferBlock16 = 4433, + StorageUniform16 = 4434, + UniformAndStorageBuffer16BitAccess = 4434, + StoragePushConstant16 = 4435, + StorageInputOutput16 = 4436, + DeviceGroup = 4437, + MultiView = 4439, + VariablePointersStorageBuffer = 4441, + VariablePointers = 4442, + AtomicStorageOps = 4445, + SampleMaskPostDepthCoverage = 4447, + ImageGatherBiasLodAMD = 5009, + FragmentMaskAMD = 5010, + StencilExportEXT = 5013, + ImageReadWriteLodAMD = 5015, + SampleMaskOverrideCoverageNV = 5249, + GeometryShaderPassthroughNV = 5251, + ShaderViewportIndexLayerEXT = 5254, + ShaderViewportIndexLayerNV = 5254, + ShaderViewportMaskNV = 5255, + ShaderStereoViewNV = 5259, + PerViewAttributesNV = 5260, + SubgroupShuffleINTEL = 5568, + SubgroupBufferBlockIOINTEL = 5569, + SubgroupImageBlockIOINTEL = 5570, + } + + public enum Op + { + OpNop = 0, + OpUndef = 1, + OpSourceContinued = 2, + OpSource = 3, + OpSourceExtension = 4, + OpName = 5, + OpMemberName = 6, + OpString = 7, + OpLine = 8, + OpExtension = 10, + OpExtInstImport = 11, + OpExtInst = 12, + OpMemoryModel = 14, + OpEntryPoint = 15, + OpExecutionMode = 16, + OpCapability = 17, + OpTypeVoid = 19, + OpTypeBool = 20, + OpTypeInt = 21, + OpTypeFloat = 22, + OpTypeVector = 23, + OpTypeMatrix = 24, + OpTypeImage = 25, + OpTypeSampler = 26, + OpTypeSampledImage = 27, + OpTypeArray = 28, + OpTypeRuntimeArray = 29, + OpTypeStruct = 30, + OpTypeOpaque = 31, + OpTypePointer = 32, + OpTypeFunction = 33, + OpTypeEvent = 34, + OpTypeDeviceEvent = 35, + OpTypeReserveId = 36, + OpTypeQueue = 37, + OpTypePipe = 38, + OpTypeForwardPointer = 39, + OpConstantTrue = 41, + OpConstantFalse = 42, + OpConstant = 43, + OpConstantComposite = 44, + OpConstantSampler = 45, + OpConstantNull = 46, + OpSpecConstantTrue = 48, + OpSpecConstantFalse = 49, + OpSpecConstant = 50, + OpSpecConstantComposite = 51, + OpSpecConstantOp = 52, + OpFunction = 54, + OpFunctionParameter = 55, + OpFunctionEnd = 56, + OpFunctionCall = 57, + OpVariable = 59, + OpImageTexelPointer = 60, + OpLoad = 61, + OpStore = 62, + OpCopyMemory = 63, + OpCopyMemorySized = 64, + OpAccessChain = 65, + OpInBoundsAccessChain = 66, + OpPtrAccessChain = 67, + OpArrayLength = 68, + OpGenericPtrMemSemantics = 69, + OpInBoundsPtrAccessChain = 70, + OpDecorate = 71, + OpMemberDecorate = 72, + OpDecorationGroup = 73, + OpGroupDecorate = 74, + OpGroupMemberDecorate = 75, + OpVectorExtractDynamic = 77, + OpVectorInsertDynamic = 78, + OpVectorShuffle = 79, + OpCompositeConstruct = 80, + OpCompositeExtract = 81, + OpCompositeInsert = 82, + OpCopyObject = 83, + OpTranspose = 84, + OpSampledImage = 86, + OpImageSampleImplicitLod = 87, + OpImageSampleExplicitLod = 88, + OpImageSampleDrefImplicitLod = 89, + OpImageSampleDrefExplicitLod = 90, + OpImageSampleProjImplicitLod = 91, + OpImageSampleProjExplicitLod = 92, + OpImageSampleProjDrefImplicitLod = 93, + OpImageSampleProjDrefExplicitLod = 94, + OpImageFetch = 95, + OpImageGather = 96, + OpImageDrefGather = 97, + OpImageRead = 98, + OpImageWrite = 99, + OpImage = 100, + OpImageQueryFormat = 101, + OpImageQueryOrder = 102, + OpImageQuerySizeLod = 103, + OpImageQuerySize = 104, + OpImageQueryLod = 105, + OpImageQueryLevels = 106, + OpImageQuerySamples = 107, + OpConvertFToU = 109, + OpConvertFToS = 110, + OpConvertSToF = 111, + OpConvertUToF = 112, + OpUConvert = 113, + OpSConvert = 114, + OpFConvert = 115, + OpQuantizeToF16 = 116, + OpConvertPtrToU = 117, + OpSatConvertSToU = 118, + OpSatConvertUToS = 119, + OpConvertUToPtr = 120, + OpPtrCastToGeneric = 121, + OpGenericCastToPtr = 122, + OpGenericCastToPtrExplicit = 123, + OpBitcast = 124, + OpSNegate = 126, + OpFNegate = 127, + OpIAdd = 128, + OpFAdd = 129, + OpISub = 130, + OpFSub = 131, + OpIMul = 132, + OpFMul = 133, + OpUDiv = 134, + OpSDiv = 135, + OpFDiv = 136, + OpUMod = 137, + OpSRem = 138, + OpSMod = 139, + OpFRem = 140, + OpFMod = 141, + OpVectorTimesScalar = 142, + OpMatrixTimesScalar = 143, + OpVectorTimesMatrix = 144, + OpMatrixTimesVector = 145, + OpMatrixTimesMatrix = 146, + OpOuterProduct = 147, + OpDot = 148, + OpIAddCarry = 149, + OpISubBorrow = 150, + OpUMulExtended = 151, + OpSMulExtended = 152, + OpAny = 154, + OpAll = 155, + OpIsNan = 156, + OpIsInf = 157, + OpIsFinite = 158, + OpIsNormal = 159, + OpSignBitSet = 160, + OpLessOrGreater = 161, + OpOrdered = 162, + OpUnordered = 163, + OpLogicalEqual = 164, + OpLogicalNotEqual = 165, + OpLogicalOr = 166, + OpLogicalAnd = 167, + OpLogicalNot = 168, + OpSelect = 169, + OpIEqual = 170, + OpINotEqual = 171, + OpUGreaterThan = 172, + OpSGreaterThan = 173, + OpUGreaterThanEqual = 174, + OpSGreaterThanEqual = 175, + OpULessThan = 176, + OpSLessThan = 177, + OpULessThanEqual = 178, + OpSLessThanEqual = 179, + OpFOrdEqual = 180, + OpFUnordEqual = 181, + OpFOrdNotEqual = 182, + OpFUnordNotEqual = 183, + OpFOrdLessThan = 184, + OpFUnordLessThan = 185, + OpFOrdGreaterThan = 186, + OpFUnordGreaterThan = 187, + OpFOrdLessThanEqual = 188, + OpFUnordLessThanEqual = 189, + OpFOrdGreaterThanEqual = 190, + OpFUnordGreaterThanEqual = 191, + OpShiftRightLogical = 194, + OpShiftRightArithmetic = 195, + OpShiftLeftLogical = 196, + OpBitwiseOr = 197, + OpBitwiseXor = 198, + OpBitwiseAnd = 199, + OpNot = 200, + OpBitFieldInsert = 201, + OpBitFieldSExtract = 202, + OpBitFieldUExtract = 203, + OpBitReverse = 204, + OpBitCount = 205, + OpDPdx = 207, + OpDPdy = 208, + OpFwidth = 209, + OpDPdxFine = 210, + OpDPdyFine = 211, + OpFwidthFine = 212, + OpDPdxCoarse = 213, + OpDPdyCoarse = 214, + OpFwidthCoarse = 215, + OpEmitVertex = 218, + OpEndPrimitive = 219, + OpEmitStreamVertex = 220, + OpEndStreamPrimitive = 221, + OpControlBarrier = 224, + OpMemoryBarrier = 225, + OpAtomicLoad = 227, + OpAtomicStore = 228, + OpAtomicExchange = 229, + OpAtomicCompareExchange = 230, + OpAtomicCompareExchangeWeak = 231, + OpAtomicIIncrement = 232, + OpAtomicIDecrement = 233, + OpAtomicIAdd = 234, + OpAtomicISub = 235, + OpAtomicSMin = 236, + OpAtomicUMin = 237, + OpAtomicSMax = 238, + OpAtomicUMax = 239, + OpAtomicAnd = 240, + OpAtomicOr = 241, + OpAtomicXor = 242, + OpPhi = 245, + OpLoopMerge = 246, + OpSelectionMerge = 247, + OpLabel = 248, + OpBranch = 249, + OpBranchConditional = 250, + OpSwitch = 251, + OpKill = 252, + OpReturn = 253, + OpReturnValue = 254, + OpUnreachable = 255, + OpLifetimeStart = 256, + OpLifetimeStop = 257, + OpGroupAsyncCopy = 259, + OpGroupWaitEvents = 260, + OpGroupAll = 261, + OpGroupAny = 262, + OpGroupBroadcast = 263, + OpGroupIAdd = 264, + OpGroupFAdd = 265, + OpGroupFMin = 266, + OpGroupUMin = 267, + OpGroupSMin = 268, + OpGroupFMax = 269, + OpGroupUMax = 270, + OpGroupSMax = 271, + OpReadPipe = 274, + OpWritePipe = 275, + OpReservedReadPipe = 276, + OpReservedWritePipe = 277, + OpReserveReadPipePackets = 278, + OpReserveWritePipePackets = 279, + OpCommitReadPipe = 280, + OpCommitWritePipe = 281, + OpIsValidReserveId = 282, + OpGetNumPipePackets = 283, + OpGetMaxPipePackets = 284, + OpGroupReserveReadPipePackets = 285, + OpGroupReserveWritePipePackets = 286, + OpGroupCommitReadPipe = 287, + OpGroupCommitWritePipe = 288, + OpEnqueueMarker = 291, + OpEnqueueKernel = 292, + OpGetKernelNDrangeSubGroupCount = 293, + OpGetKernelNDrangeMaxSubGroupSize = 294, + OpGetKernelWorkGroupSize = 295, + OpGetKernelPreferredWorkGroupSizeMultiple = 296, + OpRetainEvent = 297, + OpReleaseEvent = 298, + OpCreateUserEvent = 299, + OpIsValidEvent = 300, + OpSetUserEventStatus = 301, + OpCaptureEventProfilingInfo = 302, + OpGetDefaultQueue = 303, + OpBuildNDRange = 304, + OpImageSparseSampleImplicitLod = 305, + OpImageSparseSampleExplicitLod = 306, + OpImageSparseSampleDrefImplicitLod = 307, + OpImageSparseSampleDrefExplicitLod = 308, + OpImageSparseSampleProjImplicitLod = 309, + OpImageSparseSampleProjExplicitLod = 310, + OpImageSparseSampleProjDrefImplicitLod = 311, + OpImageSparseSampleProjDrefExplicitLod = 312, + OpImageSparseFetch = 313, + OpImageSparseGather = 314, + OpImageSparseDrefGather = 315, + OpImageSparseTexelsResident = 316, + OpNoLine = 317, + OpAtomicFlagTestAndSet = 318, + OpAtomicFlagClear = 319, + OpImageSparseRead = 320, + OpSizeOf = 321, + OpTypePipeStorage = 322, + OpConstantPipeStorage = 323, + OpCreatePipeFromPipeStorage = 324, + OpGetKernelLocalSizeForSubgroupCount = 325, + OpGetKernelMaxNumSubgroups = 326, + OpTypeNamedBarrier = 327, + OpNamedBarrierInitialize = 328, + OpMemoryNamedBarrier = 329, + OpModuleProcessed = 330, + OpExecutionModeId = 331, + OpDecorateId = 332, + OpSubgroupBallotKHR = 4421, + OpSubgroupFirstInvocationKHR = 4422, + OpSubgroupAllKHR = 4428, + OpSubgroupAnyKHR = 4429, + OpSubgroupAllEqualKHR = 4430, + OpSubgroupReadInvocationKHR = 4432, + OpGroupIAddNonUniformAMD = 5000, + OpGroupFAddNonUniformAMD = 5001, + OpGroupFMinNonUniformAMD = 5002, + OpGroupUMinNonUniformAMD = 5003, + OpGroupSMinNonUniformAMD = 5004, + OpGroupFMaxNonUniformAMD = 5005, + OpGroupUMaxNonUniformAMD = 5006, + OpGroupSMaxNonUniformAMD = 5007, + OpFragmentMaskFetchAMD = 5011, + OpFragmentFetchAMD = 5012, + OpSubgroupShuffleINTEL = 5571, + OpSubgroupShuffleDownINTEL = 5572, + OpSubgroupShuffleUpINTEL = 5573, + OpSubgroupShuffleXorINTEL = 5574, + OpSubgroupBlockReadINTEL = 5575, + OpSubgroupBlockWriteINTEL = 5576, + OpSubgroupImageBlockReadINTEL = 5577, + OpSubgroupImageBlockWriteINTEL = 5578, + OpDecorateStringGOOGLE = 5632, + OpMemberDecorateStringGOOGLE = 5633, + } + } +} + diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/spir-v.xml b/externals/sirit/externals/SPIRV-Headers/include/spirv/spir-v.xml index b05bfa7c4..f4c8add85 100755 --- a/externals/sirit/externals/SPIRV-Headers/include/spirv/spir-v.xml +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/spir-v.xml @@ -23,10 +23,11 @@ MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. --> @@ -52,7 +53,7 @@ - + @@ -69,13 +70,102 @@ - + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - + + + + + - - + + + + + + + + + + + + + + + + + diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_gcn_shader.h b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_gcn_shader.h new file mode 100755 index 000000000..80165ae5c --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_gcn_shader.h @@ -0,0 +1,52 @@ +// Copyright (c) 2020 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and/or associated documentation files (the +// "Materials"), to deal in the Materials without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Materials, and to +// permit persons to whom the Materials are furnished to do so, subject to +// the following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS +// KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS +// SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT +// https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +// + +#ifndef SPIRV_UNIFIED1_AMD_gcn_shader_H_ +#define SPIRV_UNIFIED1_AMD_gcn_shader_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +enum { + AMD_gcn_shaderRevision = 2, + AMD_gcn_shaderRevision_BitWidthPadding = 0x7fffffff +}; + +enum AMD_gcn_shaderInstructions { + AMD_gcn_shaderCubeFaceIndexAMD = 1, + AMD_gcn_shaderCubeFaceCoordAMD = 2, + AMD_gcn_shaderTimeAMD = 3, + AMD_gcn_shaderInstructionsMax = 0x7fffffff +}; + + +#ifdef __cplusplus +} +#endif + +#endif // SPIRV_UNIFIED1_AMD_gcn_shader_H_ diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_ballot.h b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_ballot.h new file mode 100755 index 000000000..8a8bb6ece --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_ballot.h @@ -0,0 +1,53 @@ +// Copyright (c) 2020 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and/or associated documentation files (the +// "Materials"), to deal in the Materials without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Materials, and to +// permit persons to whom the Materials are furnished to do so, subject to +// the following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS +// KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS +// SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT +// https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +// + +#ifndef SPIRV_UNIFIED1_AMD_shader_ballot_H_ +#define SPIRV_UNIFIED1_AMD_shader_ballot_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +enum { + AMD_shader_ballotRevision = 5, + AMD_shader_ballotRevision_BitWidthPadding = 0x7fffffff +}; + +enum AMD_shader_ballotInstructions { + AMD_shader_ballotSwizzleInvocationsAMD = 1, + AMD_shader_ballotSwizzleInvocationsMaskedAMD = 2, + AMD_shader_ballotWriteInvocationAMD = 3, + AMD_shader_ballotMbcntAMD = 4, + AMD_shader_ballotInstructionsMax = 0x7fffffff +}; + + +#ifdef __cplusplus +} +#endif + +#endif // SPIRV_UNIFIED1_AMD_shader_ballot_H_ diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_explicit_vertex_parameter.h b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_explicit_vertex_parameter.h new file mode 100755 index 000000000..12b6480f1 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_explicit_vertex_parameter.h @@ -0,0 +1,50 @@ +// Copyright (c) 2020 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and/or associated documentation files (the +// "Materials"), to deal in the Materials without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Materials, and to +// permit persons to whom the Materials are furnished to do so, subject to +// the following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS +// KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS +// SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT +// https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +// + +#ifndef SPIRV_UNIFIED1_AMD_shader_explicit_vertex_parameter_H_ +#define SPIRV_UNIFIED1_AMD_shader_explicit_vertex_parameter_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +enum { + AMD_shader_explicit_vertex_parameterRevision = 4, + AMD_shader_explicit_vertex_parameterRevision_BitWidthPadding = 0x7fffffff +}; + +enum AMD_shader_explicit_vertex_parameterInstructions { + AMD_shader_explicit_vertex_parameterInterpolateAtVertexAMD = 1, + AMD_shader_explicit_vertex_parameterInstructionsMax = 0x7fffffff +}; + + +#ifdef __cplusplus +} +#endif + +#endif // SPIRV_UNIFIED1_AMD_shader_explicit_vertex_parameter_H_ diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_trinary_minmax.h b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_trinary_minmax.h new file mode 100755 index 000000000..1b14997d2 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/AMD_shader_trinary_minmax.h @@ -0,0 +1,58 @@ +// Copyright (c) 2020 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and/or associated documentation files (the +// "Materials"), to deal in the Materials without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Materials, and to +// permit persons to whom the Materials are furnished to do so, subject to +// the following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS +// KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS +// SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT +// https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +// + +#ifndef SPIRV_UNIFIED1_AMD_shader_trinary_minmax_H_ +#define SPIRV_UNIFIED1_AMD_shader_trinary_minmax_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +enum { + AMD_shader_trinary_minmaxRevision = 4, + AMD_shader_trinary_minmaxRevision_BitWidthPadding = 0x7fffffff +}; + +enum AMD_shader_trinary_minmaxInstructions { + AMD_shader_trinary_minmaxFMin3AMD = 1, + AMD_shader_trinary_minmaxUMin3AMD = 2, + AMD_shader_trinary_minmaxSMin3AMD = 3, + AMD_shader_trinary_minmaxFMax3AMD = 4, + AMD_shader_trinary_minmaxUMax3AMD = 5, + AMD_shader_trinary_minmaxSMax3AMD = 6, + AMD_shader_trinary_minmaxFMid3AMD = 7, + AMD_shader_trinary_minmaxUMid3AMD = 8, + AMD_shader_trinary_minmaxSMid3AMD = 9, + AMD_shader_trinary_minmaxInstructionsMax = 0x7fffffff +}; + + +#ifdef __cplusplus +} +#endif + +#endif // SPIRV_UNIFIED1_AMD_shader_trinary_minmax_H_ diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/DebugInfo.h b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/DebugInfo.h new file mode 100755 index 000000000..4657556bf --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/DebugInfo.h @@ -0,0 +1,144 @@ +// Copyright (c) 2017 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and/or associated documentation files (the "Materials"), +// to deal in the Materials without restriction, including without limitation +// the rights to use, copy, modify, merge, publish, distribute, sublicense, +// and/or sell copies of the Materials, and to permit persons to whom the +// Materials are furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +// IN THE MATERIALS. + +#ifndef SPIRV_UNIFIED1_DebugInfo_H_ +#define SPIRV_UNIFIED1_DebugInfo_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +enum { + DebugInfoVersion = 100, + DebugInfoVersion_BitWidthPadding = 0x7fffffff +}; +enum { + DebugInfoRevision = 1, + DebugInfoRevision_BitWidthPadding = 0x7fffffff +}; + +enum DebugInfoInstructions { + DebugInfoDebugInfoNone = 0, + DebugInfoDebugCompilationUnit = 1, + DebugInfoDebugTypeBasic = 2, + DebugInfoDebugTypePointer = 3, + DebugInfoDebugTypeQualifier = 4, + DebugInfoDebugTypeArray = 5, + DebugInfoDebugTypeVector = 6, + DebugInfoDebugTypedef = 7, + DebugInfoDebugTypeFunction = 8, + DebugInfoDebugTypeEnum = 9, + DebugInfoDebugTypeComposite = 10, + DebugInfoDebugTypeMember = 11, + DebugInfoDebugTypeInheritance = 12, + DebugInfoDebugTypePtrToMember = 13, + DebugInfoDebugTypeTemplate = 14, + DebugInfoDebugTypeTemplateParameter = 15, + DebugInfoDebugTypeTemplateTemplateParameter = 16, + DebugInfoDebugTypeTemplateParameterPack = 17, + DebugInfoDebugGlobalVariable = 18, + DebugInfoDebugFunctionDeclaration = 19, + DebugInfoDebugFunction = 20, + DebugInfoDebugLexicalBlock = 21, + DebugInfoDebugLexicalBlockDiscriminator = 22, + DebugInfoDebugScope = 23, + DebugInfoDebugNoScope = 24, + DebugInfoDebugInlinedAt = 25, + DebugInfoDebugLocalVariable = 26, + DebugInfoDebugInlinedVariable = 27, + DebugInfoDebugDeclare = 28, + DebugInfoDebugValue = 29, + DebugInfoDebugOperation = 30, + DebugInfoDebugExpression = 31, + DebugInfoDebugMacroDef = 32, + DebugInfoDebugMacroUndef = 33, + DebugInfoInstructionsMax = 0x7fffffff +}; + + +enum DebugInfoDebugInfoFlags { + DebugInfoNone = 0x0000, + DebugInfoFlagIsProtected = 0x01, + DebugInfoFlagIsPrivate = 0x02, + DebugInfoFlagIsPublic = 0x03, + DebugInfoFlagIsLocal = 0x04, + DebugInfoFlagIsDefinition = 0x08, + DebugInfoFlagFwdDecl = 0x10, + DebugInfoFlagArtificial = 0x20, + DebugInfoFlagExplicit = 0x40, + DebugInfoFlagPrototyped = 0x80, + DebugInfoFlagObjectPointer = 0x100, + DebugInfoFlagStaticMember = 0x200, + DebugInfoFlagIndirectVariable = 0x400, + DebugInfoFlagLValueReference = 0x800, + DebugInfoFlagRValueReference = 0x1000, + DebugInfoFlagIsOptimized = 0x2000, + DebugInfoDebugInfoFlagsMax = 0x7fffffff +}; + +enum DebugInfoDebugBaseTypeAttributeEncoding { + DebugInfoUnspecified = 0, + DebugInfoAddress = 1, + DebugInfoBoolean = 2, + DebugInfoFloat = 4, + DebugInfoSigned = 5, + DebugInfoSignedChar = 6, + DebugInfoUnsigned = 7, + DebugInfoUnsignedChar = 8, + DebugInfoDebugBaseTypeAttributeEncodingMax = 0x7fffffff +}; + +enum DebugInfoDebugCompositeType { + DebugInfoClass = 0, + DebugInfoStructure = 1, + DebugInfoUnion = 2, + DebugInfoDebugCompositeTypeMax = 0x7fffffff +}; + +enum DebugInfoDebugTypeQualifier { + DebugInfoConstType = 0, + DebugInfoVolatileType = 1, + DebugInfoRestrictType = 2, + DebugInfoDebugTypeQualifierMax = 0x7fffffff +}; + +enum DebugInfoDebugOperation { + DebugInfoDeref = 0, + DebugInfoPlus = 1, + DebugInfoMinus = 2, + DebugInfoPlusUconst = 3, + DebugInfoBitPiece = 4, + DebugInfoSwap = 5, + DebugInfoXderef = 6, + DebugInfoStackValue = 7, + DebugInfoConstu = 8, + DebugInfoDebugOperationMax = 0x7fffffff +}; + + +#ifdef __cplusplus +} +#endif + +#endif // SPIRV_UNIFIED1_DebugInfo_H_ diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/NonSemanticClspvReflection.h b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/NonSemanticClspvReflection.h new file mode 100755 index 000000000..fa7061d84 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/NonSemanticClspvReflection.h @@ -0,0 +1,73 @@ +// Copyright (c) 2020 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and/or associated documentation files (the +// "Materials"), to deal in the Materials without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Materials, and to +// permit persons to whom the Materials are furnished to do so, subject to +// the following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS +// KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS +// SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT +// https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +// + +#ifndef SPIRV_UNIFIED1_NonSemanticClspvReflection_H_ +#define SPIRV_UNIFIED1_NonSemanticClspvReflection_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +enum { + NonSemanticClspvReflectionRevision = 1, + NonSemanticClspvReflectionRevision_BitWidthPadding = 0x7fffffff +}; + +enum NonSemanticClspvReflectionInstructions { + NonSemanticClspvReflectionKernel = 1, + NonSemanticClspvReflectionArgumentInfo = 2, + NonSemanticClspvReflectionArgumentStorageBuffer = 3, + NonSemanticClspvReflectionArgumentUniform = 4, + NonSemanticClspvReflectionArgumentPodStorageBuffer = 5, + NonSemanticClspvReflectionArgumentPodUniform = 6, + NonSemanticClspvReflectionArgumentPodPushConstant = 7, + NonSemanticClspvReflectionArgumentSampledImage = 8, + NonSemanticClspvReflectionArgumentStorageImage = 9, + NonSemanticClspvReflectionArgumentSampler = 10, + NonSemanticClspvReflectionArgumentWorkgroup = 11, + NonSemanticClspvReflectionSpecConstantWorkgroupSize = 12, + NonSemanticClspvReflectionSpecConstantGlobalOffset = 13, + NonSemanticClspvReflectionSpecConstantWorkDim = 14, + NonSemanticClspvReflectionPushConstantGlobalOffset = 15, + NonSemanticClspvReflectionPushConstantEnqueuedLocalSize = 16, + NonSemanticClspvReflectionPushConstantGlobalSize = 17, + NonSemanticClspvReflectionPushConstantRegionOffset = 18, + NonSemanticClspvReflectionPushConstantNumWorkgroups = 19, + NonSemanticClspvReflectionPushConstantRegionGroupOffset = 20, + NonSemanticClspvReflectionConstantDataStorageBuffer = 21, + NonSemanticClspvReflectionConstantDataUniform = 22, + NonSemanticClspvReflectionLiteralSampler = 23, + NonSemanticClspvReflectionPropertyRequiredWorkgroupSize = 24, + NonSemanticClspvReflectionInstructionsMax = 0x7fffffff +}; + + +#ifdef __cplusplus +} +#endif + +#endif // SPIRV_UNIFIED1_NonSemanticClspvReflection_H_ diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/NonSemanticDebugPrintf.h b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/NonSemanticDebugPrintf.h new file mode 100755 index 000000000..83796d75e --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/NonSemanticDebugPrintf.h @@ -0,0 +1,50 @@ +// Copyright (c) 2020 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and/or associated documentation files (the +// "Materials"), to deal in the Materials without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Materials, and to +// permit persons to whom the Materials are furnished to do so, subject to +// the following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS +// KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS +// SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT +// https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +// + +#ifndef SPIRV_UNIFIED1_NonSemanticDebugPrintf_H_ +#define SPIRV_UNIFIED1_NonSemanticDebugPrintf_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +enum { + NonSemanticDebugPrintfRevision = 1, + NonSemanticDebugPrintfRevision_BitWidthPadding = 0x7fffffff +}; + +enum NonSemanticDebugPrintfInstructions { + NonSemanticDebugPrintfDebugPrintf = 1, + NonSemanticDebugPrintfInstructionsMax = 0x7fffffff +}; + + +#ifdef __cplusplus +} +#endif + +#endif // SPIRV_UNIFIED1_NonSemanticDebugPrintf_H_ diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/OpenCL.std.h b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/OpenCL.std.h index fe759e1bc..2745e30df 100755 --- a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/OpenCL.std.h +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/OpenCL.std.h @@ -1,5 +1,5 @@ /* -** Copyright (c) 2015-2017 The Khronos Group Inc. +** Copyright (c) 2015-2019 The Khronos Group Inc. ** ** Permission is hereby granted, free of charge, to any person obtaining a copy ** of this software and/or associated documentation files (the "Materials"), @@ -27,6 +27,7 @@ #ifndef OPENCLstd_H #define OPENCLstd_H +#ifdef __cplusplus namespace OpenCLLIB { enum Entrypoints { @@ -212,4 +213,189 @@ enum Entrypoints { } // end namespace OpenCLLIB +#else + +enum OpenCLstd_Entrypoints { + + // Section 2.1: Math extended instructions + OpenCLstd_Acos = 0, + OpenCLstd_Acosh = 1, + OpenCLstd_Acospi = 2, + OpenCLstd_Asin = 3, + OpenCLstd_Asinh = 4, + OpenCLstd_Asinpi = 5, + OpenCLstd_Atan = 6, + OpenCLstd_Atan2 = 7, + OpenCLstd_Atanh = 8, + OpenCLstd_Atanpi = 9, + OpenCLstd_Atan2pi = 10, + OpenCLstd_Cbrt = 11, + OpenCLstd_Ceil = 12, + OpenCLstd_Copysign = 13, + OpenCLstd_Cos = 14, + OpenCLstd_Cosh = 15, + OpenCLstd_Cospi = 16, + OpenCLstd_Erfc = 17, + OpenCLstd_Erf = 18, + OpenCLstd_Exp = 19, + OpenCLstd_Exp2 = 20, + OpenCLstd_Exp10 = 21, + OpenCLstd_Expm1 = 22, + OpenCLstd_Fabs = 23, + OpenCLstd_Fdim = 24, + OpenCLstd_Floor = 25, + OpenCLstd_Fma = 26, + OpenCLstd_Fmax = 27, + OpenCLstd_Fmin = 28, + OpenCLstd_Fmod = 29, + OpenCLstd_Fract = 30, + OpenCLstd_Frexp = 31, + OpenCLstd_Hypot = 32, + OpenCLstd_Ilogb = 33, + OpenCLstd_Ldexp = 34, + OpenCLstd_Lgamma = 35, + OpenCLstd_Lgamma_r = 36, + OpenCLstd_Log = 37, + OpenCLstd_Log2 = 38, + OpenCLstd_Log10 = 39, + OpenCLstd_Log1p = 40, + OpenCLstd_Logb = 41, + OpenCLstd_Mad = 42, + OpenCLstd_Maxmag = 43, + OpenCLstd_Minmag = 44, + OpenCLstd_Modf = 45, + OpenCLstd_Nan = 46, + OpenCLstd_Nextafter = 47, + OpenCLstd_Pow = 48, + OpenCLstd_Pown = 49, + OpenCLstd_Powr = 50, + OpenCLstd_Remainder = 51, + OpenCLstd_Remquo = 52, + OpenCLstd_Rint = 53, + OpenCLstd_Rootn = 54, + OpenCLstd_Round = 55, + OpenCLstd_Rsqrt = 56, + OpenCLstd_Sin = 57, + OpenCLstd_Sincos = 58, + OpenCLstd_Sinh = 59, + OpenCLstd_Sinpi = 60, + OpenCLstd_Sqrt = 61, + OpenCLstd_Tan = 62, + OpenCLstd_Tanh = 63, + OpenCLstd_Tanpi = 64, + OpenCLstd_Tgamma = 65, + OpenCLstd_Trunc = 66, + OpenCLstd_Half_cos = 67, + OpenCLstd_Half_divide = 68, + OpenCLstd_Half_exp = 69, + OpenCLstd_Half_exp2 = 70, + OpenCLstd_Half_exp10 = 71, + OpenCLstd_Half_log = 72, + OpenCLstd_Half_log2 = 73, + OpenCLstd_Half_log10 = 74, + OpenCLstd_Half_powr = 75, + OpenCLstd_Half_recip = 76, + OpenCLstd_Half_rsqrt = 77, + OpenCLstd_Half_sin = 78, + OpenCLstd_Half_sqrt = 79, + OpenCLstd_Half_tan = 80, + OpenCLstd_Native_cos = 81, + OpenCLstd_Native_divide = 82, + OpenCLstd_Native_exp = 83, + OpenCLstd_Native_exp2 = 84, + OpenCLstd_Native_exp10 = 85, + OpenCLstd_Native_log = 86, + OpenCLstd_Native_log2 = 87, + OpenCLstd_Native_log10 = 88, + OpenCLstd_Native_powr = 89, + OpenCLstd_Native_recip = 90, + OpenCLstd_Native_rsqrt = 91, + OpenCLstd_Native_sin = 92, + OpenCLstd_Native_sqrt = 93, + OpenCLstd_Native_tan = 94, + + // Section 2.2: Integer instructions + OpenCLstd_SAbs = 141, + OpenCLstd_SAbs_diff = 142, + OpenCLstd_SAdd_sat = 143, + OpenCLstd_UAdd_sat = 144, + OpenCLstd_SHadd = 145, + OpenCLstd_UHadd = 146, + OpenCLstd_SRhadd = 147, + OpenCLstd_URhadd = 148, + OpenCLstd_SClamp = 149, + OpenCLstd_UClamp = 150, + OpenCLstd_Clz = 151, + OpenCLstd_Ctz = 152, + OpenCLstd_SMad_hi = 153, + OpenCLstd_UMad_sat = 154, + OpenCLstd_SMad_sat = 155, + OpenCLstd_SMax = 156, + OpenCLstd_UMax = 157, + OpenCLstd_SMin = 158, + OpenCLstd_UMin = 159, + OpenCLstd_SMul_hi = 160, + OpenCLstd_Rotate = 161, + OpenCLstd_SSub_sat = 162, + OpenCLstd_USub_sat = 163, + OpenCLstd_U_Upsample = 164, + OpenCLstd_S_Upsample = 165, + OpenCLstd_Popcount = 166, + OpenCLstd_SMad24 = 167, + OpenCLstd_UMad24 = 168, + OpenCLstd_SMul24 = 169, + OpenCLstd_UMul24 = 170, + OpenCLstd_UAbs = 201, + OpenCLstd_UAbs_diff = 202, + OpenCLstd_UMul_hi = 203, + OpenCLstd_UMad_hi = 204, + + // Section 2.3: Common instructions + OpenCLstd_FClamp = 95, + OpenCLstd_Degrees = 96, + OpenCLstd_FMax_common = 97, + OpenCLstd_FMin_common = 98, + OpenCLstd_Mix = 99, + OpenCLstd_Radians = 100, + OpenCLstd_Step = 101, + OpenCLstd_Smoothstep = 102, + OpenCLstd_Sign = 103, + + // Section 2.4: Geometric instructions + OpenCLstd_Cross = 104, + OpenCLstd_Distance = 105, + OpenCLstd_Length = 106, + OpenCLstd_Normalize = 107, + OpenCLstd_Fast_distance = 108, + OpenCLstd_Fast_length = 109, + OpenCLstd_Fast_normalize = 110, + + // Section 2.5: Relational instructions + OpenCLstd_Bitselect = 186, + OpenCLstd_Select = 187, + + // Section 2.6: Vector Data Load and Store instructions + OpenCLstd_Vloadn = 171, + OpenCLstd_Vstoren = 172, + OpenCLstd_Vload_half = 173, + OpenCLstd_Vload_halfn = 174, + OpenCLstd_Vstore_half = 175, + OpenCLstd_Vstore_half_r = 176, + OpenCLstd_Vstore_halfn = 177, + OpenCLstd_Vstore_halfn_r = 178, + OpenCLstd_Vloada_halfn = 179, + OpenCLstd_Vstorea_halfn = 180, + OpenCLstd_Vstorea_halfn_r = 181, + + // Section 2.7: Miscellaneous Vector instructions + OpenCLstd_Shuffle = 182, + OpenCLstd_Shuffle2 = 183, + + // Section 2.8: Misc instructions + OpenCLstd_Printf = 184, + OpenCLstd_Prefetch = 185, +}; + +#endif + #endif // #ifndef OPENCLstd_H diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/OpenCLDebugInfo100.h b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/OpenCLDebugInfo100.h new file mode 100755 index 000000000..7285bd410 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/OpenCLDebugInfo100.h @@ -0,0 +1,157 @@ +// Copyright (c) 2018 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and/or associated documentation files (the "Materials"), +// to deal in the Materials without restriction, including without limitation +// the rights to use, copy, modify, merge, publish, distribute, sublicense, +// and/or sell copies of the Materials, and to permit persons to whom the +// Materials are furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +// IN THE MATERIALS. + +#ifndef SPIRV_UNIFIED1_OpenCLDebugInfo100_H_ +#define SPIRV_UNIFIED1_OpenCLDebugInfo100_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +enum { + OpenCLDebugInfo100Version = 200, + OpenCLDebugInfo100Version_BitWidthPadding = 0x7fffffff +}; +enum { + OpenCLDebugInfo100Revision = 2, + OpenCLDebugInfo100Revision_BitWidthPadding = 0x7fffffff +}; + +enum OpenCLDebugInfo100Instructions { + OpenCLDebugInfo100DebugInfoNone = 0, + OpenCLDebugInfo100DebugCompilationUnit = 1, + OpenCLDebugInfo100DebugTypeBasic = 2, + OpenCLDebugInfo100DebugTypePointer = 3, + OpenCLDebugInfo100DebugTypeQualifier = 4, + OpenCLDebugInfo100DebugTypeArray = 5, + OpenCLDebugInfo100DebugTypeVector = 6, + OpenCLDebugInfo100DebugTypedef = 7, + OpenCLDebugInfo100DebugTypeFunction = 8, + OpenCLDebugInfo100DebugTypeEnum = 9, + OpenCLDebugInfo100DebugTypeComposite = 10, + OpenCLDebugInfo100DebugTypeMember = 11, + OpenCLDebugInfo100DebugTypeInheritance = 12, + OpenCLDebugInfo100DebugTypePtrToMember = 13, + OpenCLDebugInfo100DebugTypeTemplate = 14, + OpenCLDebugInfo100DebugTypeTemplateParameter = 15, + OpenCLDebugInfo100DebugTypeTemplateTemplateParameter = 16, + OpenCLDebugInfo100DebugTypeTemplateParameterPack = 17, + OpenCLDebugInfo100DebugGlobalVariable = 18, + OpenCLDebugInfo100DebugFunctionDeclaration = 19, + OpenCLDebugInfo100DebugFunction = 20, + OpenCLDebugInfo100DebugLexicalBlock = 21, + OpenCLDebugInfo100DebugLexicalBlockDiscriminator = 22, + OpenCLDebugInfo100DebugScope = 23, + OpenCLDebugInfo100DebugNoScope = 24, + OpenCLDebugInfo100DebugInlinedAt = 25, + OpenCLDebugInfo100DebugLocalVariable = 26, + OpenCLDebugInfo100DebugInlinedVariable = 27, + OpenCLDebugInfo100DebugDeclare = 28, + OpenCLDebugInfo100DebugValue = 29, + OpenCLDebugInfo100DebugOperation = 30, + OpenCLDebugInfo100DebugExpression = 31, + OpenCLDebugInfo100DebugMacroDef = 32, + OpenCLDebugInfo100DebugMacroUndef = 33, + OpenCLDebugInfo100DebugImportedEntity = 34, + OpenCLDebugInfo100DebugSource = 35, + OpenCLDebugInfo100InstructionsMax = 0x7fffffff +}; + + +enum OpenCLDebugInfo100DebugInfoFlags { + OpenCLDebugInfo100None = 0x0000, + OpenCLDebugInfo100FlagIsProtected = 0x01, + OpenCLDebugInfo100FlagIsPrivate = 0x02, + OpenCLDebugInfo100FlagIsPublic = 0x03, + OpenCLDebugInfo100FlagIsLocal = 0x04, + OpenCLDebugInfo100FlagIsDefinition = 0x08, + OpenCLDebugInfo100FlagFwdDecl = 0x10, + OpenCLDebugInfo100FlagArtificial = 0x20, + OpenCLDebugInfo100FlagExplicit = 0x40, + OpenCLDebugInfo100FlagPrototyped = 0x80, + OpenCLDebugInfo100FlagObjectPointer = 0x100, + OpenCLDebugInfo100FlagStaticMember = 0x200, + OpenCLDebugInfo100FlagIndirectVariable = 0x400, + OpenCLDebugInfo100FlagLValueReference = 0x800, + OpenCLDebugInfo100FlagRValueReference = 0x1000, + OpenCLDebugInfo100FlagIsOptimized = 0x2000, + OpenCLDebugInfo100FlagIsEnumClass = 0x4000, + OpenCLDebugInfo100FlagTypePassByValue = 0x8000, + OpenCLDebugInfo100FlagTypePassByReference = 0x10000, + OpenCLDebugInfo100DebugInfoFlagsMax = 0x7fffffff +}; + +enum OpenCLDebugInfo100DebugBaseTypeAttributeEncoding { + OpenCLDebugInfo100Unspecified = 0, + OpenCLDebugInfo100Address = 1, + OpenCLDebugInfo100Boolean = 2, + OpenCLDebugInfo100Float = 3, + OpenCLDebugInfo100Signed = 4, + OpenCLDebugInfo100SignedChar = 5, + OpenCLDebugInfo100Unsigned = 6, + OpenCLDebugInfo100UnsignedChar = 7, + OpenCLDebugInfo100DebugBaseTypeAttributeEncodingMax = 0x7fffffff +}; + +enum OpenCLDebugInfo100DebugCompositeType { + OpenCLDebugInfo100Class = 0, + OpenCLDebugInfo100Structure = 1, + OpenCLDebugInfo100Union = 2, + OpenCLDebugInfo100DebugCompositeTypeMax = 0x7fffffff +}; + +enum OpenCLDebugInfo100DebugTypeQualifier { + OpenCLDebugInfo100ConstType = 0, + OpenCLDebugInfo100VolatileType = 1, + OpenCLDebugInfo100RestrictType = 2, + OpenCLDebugInfo100AtomicType = 3, + OpenCLDebugInfo100DebugTypeQualifierMax = 0x7fffffff +}; + +enum OpenCLDebugInfo100DebugOperation { + OpenCLDebugInfo100Deref = 0, + OpenCLDebugInfo100Plus = 1, + OpenCLDebugInfo100Minus = 2, + OpenCLDebugInfo100PlusUconst = 3, + OpenCLDebugInfo100BitPiece = 4, + OpenCLDebugInfo100Swap = 5, + OpenCLDebugInfo100Xderef = 6, + OpenCLDebugInfo100StackValue = 7, + OpenCLDebugInfo100Constu = 8, + OpenCLDebugInfo100Fragment = 9, + OpenCLDebugInfo100DebugOperationMax = 0x7fffffff +}; + +enum OpenCLDebugInfo100DebugImportedEntity { + OpenCLDebugInfo100ImportedModule = 0, + OpenCLDebugInfo100ImportedDeclaration = 1, + OpenCLDebugInfo100DebugImportedEntityMax = 0x7fffffff +}; + + +#ifdef __cplusplus +} +#endif + +#endif // SPIRV_UNIFIED1_OpenCLDebugInfo100_H_ diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.debuginfo.grammar.json b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.debuginfo.grammar.json new file mode 100755 index 000000000..7d6e8e5b3 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.debuginfo.grammar.json @@ -0,0 +1,572 @@ +{ + "copyright" : [ + "Copyright (c) 2017 The Khronos Group Inc.", + "", + "Permission is hereby granted, free of charge, to any person obtaining a copy", + "of this software and/or associated documentation files (the \"Materials\"),", + "to deal in the Materials without restriction, including without limitation", + "the rights to use, copy, modify, merge, publish, distribute, sublicense,", + "and/or sell copies of the Materials, and to permit persons to whom the", + "Materials are furnished to do so, subject to the following conditions:", + "", + "The above copyright notice and this permission notice shall be included in", + "all copies or substantial portions of the Materials.", + "", + "MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS", + "STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND", + "HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ ", + "", + "THE MATERIALS ARE PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", + "OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", + "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", + "THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", + "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", + "FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS", + "IN THE MATERIALS." + ], + "version" : 100, + "revision" : 1, + "instructions" : [ + { + "opname" : "DebugInfoNone", + "opcode" : 0 + }, + { + "opname" : "DebugCompilationUnit", + "opcode" : 1, + "operands" : [ + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Version'" }, + { "kind" : "LiteralInteger", "name" : "'DWARF Version'" } + ] + }, + { + "opname" : "DebugTypeBasic", + "opcode" : 2, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "DebugBaseTypeAttributeEncoding", "name" : "'Encoding'" } + ] + }, + { + "opname" : "DebugTypePointer", + "opcode" : 3, + "operands" : [ + { "kind" : "IdRef", "name" : "'Base Type'" }, + { "kind" : "StorageClass", "name" : "'Storage Class'" }, + { "kind" : "DebugInfoFlags", "name" : "'Literal Flags'" } + ] + }, + { + "opname" : "DebugTypeQualifier", + "opcode" : 4, + "operands" : [ + { "kind" : "IdRef", "name" : "'Base Type'" }, + { "kind" : "DebugTypeQualifier", "name" : "'Type Qualifier'" } + ] + }, + { + "opname" : "DebugTypeArray", + "opcode" : 5, + "operands" : [ + { "kind" : "IdRef", "name" : "'Base Type'" }, + { "kind" : "IdRef", "name" : "'Component Counts'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugTypeVector", + "opcode" : 6, + "operands" : [ + { "kind" : "IdRef", "name" : "'Base Type'" }, + { "kind" : "LiteralInteger", "name" : "'Component Count'" } + ] + }, + { + "opname" : "DebugTypedef", + "opcode" : 7, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Base Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" } + ] + }, + { + "opname" : "DebugTypeFunction", + "opcode" : 8, + "operands" : [ + { "kind" : "IdRef", "name" : "'Return Type'" }, + { "kind" : "IdRef", "name" : "'Paramter Types'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugTypeEnum", + "opcode" : 9, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Underlying Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "PairIdRefIdRef", "name" : "'Value, Name, Value, Name, ...'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugTypeComposite", + "opcode" : 10, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "DebugCompositeType", "name" : "'Tag'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "IdRef", "name" : "'Members'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugTypeMember", + "opcode" : 11, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Offset'" }, + { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "IdRef", "name" : "'Value'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugTypeInheritance", + "opcode" : 12, + "operands" : [ + { "kind" : "IdRef", "name" : "'Child'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Offset'" }, + { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" } + ] + }, + { + "opname" : "DebugTypePtrToMember", + "opcode" : 13, + "operands" : [ + { "kind" : "IdRef", "name" : "'Member Type'" }, + { "kind" : "IdRef", "name" : "'Parent'" } + ] + }, + { + "opname" : "DebugTypeTemplate", + "opcode" : 14, + "operands" : [ + { "kind" : "IdRef", "name" : "'Target'" }, + { "kind" : "IdRef", "name" : "'Parameters'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugTypeTemplateParameter", + "opcode" : 15, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Actual Type'" }, + { "kind" : "IdRef", "name" : "'Value'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" } + ] + }, + { + "opname" : "DebugTypeTemplateTemplateParameter", + "opcode" : 16, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Template Name'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" } + ] + }, + { + "opname" : "DebugTypeTemplateParameterPack", + "opcode" : 17, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Template Parameters'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugGlobalVariable", + "opcode" : 18, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Linkage Name'" }, + { "kind" : "IdRef", "name" : "'Variable'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "IdRef", "name" : "'Static Member Declaration'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugFunctionDeclaration", + "opcode" : 19, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Linkage Name'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" } + ] + }, + { + "opname" : "DebugFunction", + "opcode" : 20, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Linkage Name'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "LiteralInteger", "name" : "'Scope Line'" }, + { "kind" : "IdRef", "name" : "'Function'" }, + { "kind" : "IdRef", "name" : "'Declaration'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugLexicalBlock", + "opcode" : 21, + "operands" : [ + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Name'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugLexicalBlockDiscriminator", + "opcode" : 22, + "operands" : [ + { "kind" : "IdRef", "name" : "'Scope'" }, + { "kind" : "LiteralInteger", "name" : "'Discriminator'" }, + { "kind" : "IdRef", "name" : "'Parent'" } + ] + }, + { + "opname" : "DebugScope", + "opcode" : 23, + "operands" : [ + { "kind" : "IdRef", "name" : "'Scope'" }, + { "kind" : "IdRef", "name" : "'Inlined At'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugNoScope", + "opcode" : 24 + }, + { + "opname" : "DebugInlinedAt", + "opcode" : 25, + "operands" : [ + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "IdRef", "name" : "'Scope'" }, + { "kind" : "IdRef", "name" : "'Inlined'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugLocalVariable", + "opcode" : 26, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "LiteralInteger", "name" : "'Arg Number'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugInlinedVariable", + "opcode" : 27, + "operands" : [ + { "kind" : "IdRef", "name" : "'Variable'" }, + { "kind" : "IdRef", "name" : "'Inlined'" } + ] + }, + { + "opname" : "DebugDeclare", + "opcode" : 28, + "operands" : [ + { "kind" : "IdRef", "name" : "'Local Variable'" }, + { "kind" : "IdRef", "name" : "'Variable'" }, + { "kind" : "IdRef", "name" : "'Expression'" } + ] + }, + { + "opname" : "DebugValue", + "opcode" : 29, + "operands" : [ + { "kind" : "IdRef", "name" : "'Value'" }, + { "kind" : "IdRef", "name" : "'Expression'" }, + { "kind" : "IdRef", "name" : "'Indexes'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugOperation", + "opcode" : 30, + "operands" : [ + { "kind" : "DebugOperation", "name" : "'OpCode'" }, + { "kind" : "LiteralInteger", "name" : "'Operands ...'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugExpression", + "opcode" : 31, + "operands" : [ + { "kind" : "IdRef", "name" : "'Operands ...'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugMacroDef", + "opcode" : 32, + "operands" : [ + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Value'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugMacroUndef", + "opcode" : 33, + "operands" : [ + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "IdRef", "name" : "'Macro'" } + ] + } + ], + "operand_kinds" : [ + { + "category" : "BitEnum", + "kind" : "DebugInfoFlags", + "enumerants" : [ + { + "enumerant" : "None", + "value" : "0x0000" + }, + { + "enumerant" : "FlagIsProtected", + "value" : "0x01" + }, + { + "enumerant" : "FlagIsPrivate", + "value" : "0x02" + }, + { + "enumerant" : "FlagIsPublic", + "value" : "0x03" + }, + { + "enumerant" : "FlagIsLocal", + "value" : "0x04" + }, + { + "enumerant" : "FlagIsDefinition", + "value" : "0x08" + }, + { + "enumerant" : "FlagFwdDecl", + "value" : "0x10" + }, + { + "enumerant" : "FlagArtificial", + "value" : "0x20" + }, + { + "enumerant" : "FlagExplicit", + "value" : "0x40" + }, + { + "enumerant" : "FlagPrototyped", + "value" : "0x80" + }, + { + "enumerant" : "FlagObjectPointer", + "value" : "0x100" + }, + { + "enumerant" : "FlagStaticMember", + "value" : "0x200" + }, + { + "enumerant" : "FlagIndirectVariable", + "value" : "0x400" + }, + { + "enumerant" : "FlagLValueReference", + "value" : "0x800" + }, + { + "enumerant" : "FlagRValueReference", + "value" : "0x1000" + }, + { + "enumerant" : "FlagIsOptimized", + "value" : "0x2000" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "DebugBaseTypeAttributeEncoding", + "enumerants" : [ + { + "enumerant" : "Unspecified", + "value" : "0" + }, + { + "enumerant" : "Address", + "value" : "1" + }, + { + "enumerant" : "Boolean", + "value" : "2" + }, + { + "enumerant" : "Float", + "value" : "4" + }, + { + "enumerant" : "Signed", + "value" : "5" + }, + { + "enumerant" : "SignedChar", + "value" : "6" + }, + { + "enumerant" : "Unsigned", + "value" : "7" + }, + { + "enumerant" : "UnsignedChar", + "value" : "8" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "DebugCompositeType", + "enumerants" : [ + { + "enumerant" : "Class", + "value" : "0" + }, + { + "enumerant" : "Structure", + "value" : "1" + }, + { + "enumerant" : "Union", + "value" : "2" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "DebugTypeQualifier", + "enumerants" : [ + { + "enumerant" : "ConstType", + "value" : "0" + }, + { + "enumerant" : "VolatileType", + "value" : "1" + }, + { + "enumerant" : "RestrictType", + "value" : "2" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "DebugOperation", + "enumerants" : [ + { + "enumerant" : "Deref", + "value" : "0" + }, + { + "enumerant" : "Plus", + "value" : "1" + }, + { + "enumerant" : "Minus", + "value" : "2" + }, + { + "enumerant" : "PlusUconst", + "value" : "3", + "parameters" : [ + { "kind" : "LiteralInteger" } + ] + }, + { + "enumerant" : "BitPiece", + "value" : "4", + "parameters" : [ + { "kind" : "LiteralInteger" }, + { "kind" : "LiteralInteger" } + ] + }, + { + "enumerant" : "Swap", + "value" : "5" + }, + { + "enumerant" : "Xderef", + "value" : "6" + }, + { + "enumerant" : "StackValue", + "value" : "7" + }, + { + "enumerant" : "Constu", + "value" : "8", + "parameters" : [ + { "kind" : "LiteralInteger" } + ] + } + ] + } + ] +} diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.nonsemantic.clspvreflection.grammar.json b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.nonsemantic.clspvreflection.grammar.json new file mode 100755 index 000000000..15e56990a --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.nonsemantic.clspvreflection.grammar.json @@ -0,0 +1,237 @@ +{ + "revision" : 1, + "instructions" : [ + { + "opname" : "Kernel", + "opcode" : 1, + "operands" : [ + { "kind" : "IdRef", "name" : "Kernel" }, + { "kind" : "IdRef", "name" : "Name" } + ] + }, + { + "opname" : "ArgumentInfo", + "opcode" : 2, + "operands" : [ + { "kind" : "IdRef", "name" : "Name" }, + { "kind" : "IdRef", "name" : "Type Name", "quantifier" : "?" }, + { "kind" : "IdRef", "name" : "Address Qualifier", "quantifier" : "?" }, + { "kind" : "IdRef", "name" : "Access Qualifier", "quantifier" : "?" }, + { "kind" : "IdRef", "name" : "Type Qualifier", "quantifier" : "?" } + ] + }, + { + "opname" : "ArgumentStorageBuffer", + "opcode" : 3, + "operands" : [ + { "kind" : "IdRef", "name" : "Decl" }, + { "kind" : "IdRef", "name" : "Ordinal" }, + { "kind" : "IdRef", "name" : "DescriptorSet" }, + { "kind" : "IdRef", "name" : "Binding" }, + { "kind" : "IdRef", "name" : "ArgInfo", "quantifier" : "?" } + ] + }, + { + "opname" : "ArgumentUniform", + "opcode" : 4, + "operands" : [ + { "kind" : "IdRef", "name" : "Decl" }, + { "kind" : "IdRef", "name" : "Ordinal" }, + { "kind" : "IdRef", "name" : "DescriptorSet" }, + { "kind" : "IdRef", "name" : "Binding" }, + { "kind" : "IdRef", "name" : "ArgInfo", "quantifier" : "?" } + ] + }, + { + "opname" : "ArgumentPodStorageBuffer", + "opcode" : 5, + "operands" : [ + { "kind" : "IdRef", "name" : "Decl" }, + { "kind" : "IdRef", "name" : "Ordinal" }, + { "kind" : "IdRef", "name" : "DescriptorSet" }, + { "kind" : "IdRef", "name" : "Binding" }, + { "kind" : "IdRef", "name" : "Offset" }, + { "kind" : "IdRef", "name" : "Size" }, + { "kind" : "IdRef", "name" : "ArgInfo", "quantifier" : "?" } + ] + }, + { + "opname" : "ArgumentPodUniform", + "opcode" : 6, + "operands" : [ + { "kind" : "IdRef", "name" : "Decl" }, + { "kind" : "IdRef", "name" : "Ordinal" }, + { "kind" : "IdRef", "name" : "DescriptorSet" }, + { "kind" : "IdRef", "name" : "Binding" }, + { "kind" : "IdRef", "name" : "Offset" }, + { "kind" : "IdRef", "name" : "Size" }, + { "kind" : "IdRef", "name" : "ArgInfo", "quantifier" : "?" } + ] + }, + { + "opname" : "ArgumentPodPushConstant", + "opcode" : 7, + "operands" : [ + { "kind" : "IdRef", "name" : "Decl" }, + { "kind" : "IdRef", "name" : "Ordinal" }, + { "kind" : "IdRef", "name" : "Offset" }, + { "kind" : "IdRef", "name" : "Size" }, + { "kind" : "IdRef", "name" : "ArgInfo", "quantifier" : "?" } + ] + }, + { + "opname" : "ArgumentSampledImage", + "opcode" : 8, + "operands" : [ + { "kind" : "IdRef", "name" : "Decl" }, + { "kind" : "IdRef", "name" : "Ordinal" }, + { "kind" : "IdRef", "name" : "DescriptorSet" }, + { "kind" : "IdRef", "name" : "Binding" }, + { "kind" : "IdRef", "name" : "ArgInfo", "quantifier" : "?" } + ] + }, + { + "opname" : "ArgumentStorageImage", + "opcode" : 9, + "operands" : [ + { "kind" : "IdRef", "name" : "Decl" }, + { "kind" : "IdRef", "name" : "Ordinal" }, + { "kind" : "IdRef", "name" : "DescriptorSet" }, + { "kind" : "IdRef", "name" : "Binding" }, + { "kind" : "IdRef", "name" : "ArgInfo", "quantifier" : "?" } + ] + }, + { + "opname" : "ArgumentSampler", + "opcode" : 10, + "operands" : [ + { "kind" : "IdRef", "name" : "Decl" }, + { "kind" : "IdRef", "name" : "Ordinal" }, + { "kind" : "IdRef", "name" : "DescriptorSet" }, + { "kind" : "IdRef", "name" : "Binding" }, + { "kind" : "IdRef", "name" : "ArgInfo", "quantifier" : "?" } + ] + }, + { + "opname" : "ArgumentWorkgroup", + "opcode" : 11, + "operands" : [ + { "kind" : "IdRef", "name" : "Decl" }, + { "kind" : "IdRef", "name" : "Ordinal" }, + { "kind" : "IdRef", "name" : "SpecId" }, + { "kind" : "IdRef", "name" : "ElemSize" }, + { "kind" : "IdRef", "name" : "ArgInfo", "quantifier" : "?" } + ] + }, + { + "opname" : "SpecConstantWorkgroupSize", + "opcode" : 12, + "operands" : [ + { "kind" : "IdRef", "name" : "X" }, + { "kind" : "IdRef", "name" : "Y" }, + { "kind" : "IdRef", "name" : "Z" } + ] + }, + { + "opname" : "SpecConstantGlobalOffset", + "opcode" : 13, + "operands" : [ + { "kind" : "IdRef", "name" : "X" }, + { "kind" : "IdRef", "name" : "Y" }, + { "kind" : "IdRef", "name" : "Z" } + ] + }, + { + "opname" : "SpecConstantWorkDim", + "opcode" : 14, + "operands" : [ + { "kind" : "IdRef", "name" : "Dim" } + ] + }, + { + "opname" : "PushConstantGlobalOffset", + "opcode" : 15, + "operands" : [ + { "kind" : "IdRef", "name" : "Offset" }, + { "kind" : "IdRef", "name" : "Size" } + ] + }, + { + "opname" : "PushConstantEnqueuedLocalSize", + "opcode" : 16, + "operands" : [ + { "kind" : "IdRef", "name" : "Offset" }, + { "kind" : "IdRef", "name" : "Size" } + ] + }, + { + "opname" : "PushConstantGlobalSize", + "opcode" : 17, + "operands" : [ + { "kind" : "IdRef", "name" : "Offset" }, + { "kind" : "IdRef", "name" : "Size" } + ] + }, + { + "opname" : "PushConstantRegionOffset", + "opcode" : 18, + "operands" : [ + { "kind" : "IdRef", "name" : "Offset" }, + { "kind" : "IdRef", "name" : "Size" } + ] + }, + { + "opname" : "PushConstantNumWorkgroups", + "opcode" : 19, + "operands" : [ + { "kind" : "IdRef", "name" : "Offset" }, + { "kind" : "IdRef", "name" : "Size" } + ] + }, + { + "opname" : "PushConstantRegionGroupOffset", + "opcode" : 20, + "operands" : [ + { "kind" : "IdRef", "name" : "Offset" }, + { "kind" : "IdRef", "name" : "Size" } + ] + }, + { + "opname" : "ConstantDataStorageBuffer", + "opcode" : 21, + "operands" : [ + { "kind" : "IdRef", "name" : "DescriptorSet" }, + { "kind" : "IdRef", "name" : "Binding" }, + { "kind" : "IdRef", "name" : "Data" } + ] + }, + { + "opname" : "ConstantDataUniform", + "opcode" : 22, + "operands" : [ + { "kind" : "IdRef", "name" : "DescriptorSet" }, + { "kind" : "IdRef", "name" : "Binding" }, + { "kind" : "IdRef", "name" : "Data" } + ] + }, + { + "opname" : "LiteralSampler", + "opcode" : 23, + "operands" : [ + { "kind" : "IdRef", "name" : "DescriptorSet" }, + { "kind" : "IdRef", "name" : "Binding" }, + { "kind" : "IdRef", "name" : "Mask" } + ] + }, + { + "opname" : "PropertyRequiredWorkgroupSize", + "opcode" : 24, + "operands" : [ + { "kind" : "IdRef", "name" : "Kernel" }, + { "kind" : "IdRef", "name" : "X" }, + { "kind" : "IdRef", "name" : "Y" }, + { "kind" : "IdRef", "name" : "Z" } + ] + } + ] +} diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.nonsemantic.debugprintf.grammar.json b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.nonsemantic.debugprintf.grammar.json new file mode 100755 index 000000000..71fa7112c --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.nonsemantic.debugprintf.grammar.json @@ -0,0 +1,13 @@ +{ + "revision" : 1, + "instructions" : [ + { + "opname" : "DebugPrintf", + "opcode" : 1, + "operands" : [ + { "kind" : "IdRef", "name" : "'Format'" }, + { "kind" : "IdRef", "quantifier" : "*" } + ] + } + ] +} diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.opencl.debuginfo.100.grammar.json b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.opencl.debuginfo.100.grammar.json new file mode 100755 index 000000000..d4cfab215 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.opencl.debuginfo.100.grammar.json @@ -0,0 +1,636 @@ +{ + "copyright" : [ + "Copyright (c) 2018 The Khronos Group Inc.", + "", + "Permission is hereby granted, free of charge, to any person obtaining a copy", + "of this software and/or associated documentation files (the \"Materials\"),", + "to deal in the Materials without restriction, including without limitation", + "the rights to use, copy, modify, merge, publish, distribute, sublicense,", + "and/or sell copies of the Materials, and to permit persons to whom the", + "Materials are furnished to do so, subject to the following conditions:", + "", + "The above copyright notice and this permission notice shall be included in", + "all copies or substantial portions of the Materials.", + "", + "MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS", + "STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND", + "HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ ", + "", + "THE MATERIALS ARE PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", + "OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", + "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", + "THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", + "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", + "FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS", + "IN THE MATERIALS." + ], + "version" : 200, + "revision" : 2, + "instructions" : [ + { + "opname" : "DebugInfoNone", + "opcode" : 0 + }, + { + "opname" : "DebugCompilationUnit", + "opcode" : 1, + "operands" : [ + { "kind" : "LiteralInteger", "name" : "'Version'" }, + { "kind" : "LiteralInteger", "name" : "'DWARF Version'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "SourceLanguage", "name" : "'Language'" } + ] + }, + { + "opname" : "DebugTypeBasic", + "opcode" : 2, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "DebugBaseTypeAttributeEncoding", "name" : "'Encoding'" } + ] + }, + { + "opname" : "DebugTypePointer", + "opcode" : 3, + "operands" : [ + { "kind" : "IdRef", "name" : "'Base Type'" }, + { "kind" : "StorageClass", "name" : "'Storage Class'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" } + ] + }, + { + "opname" : "DebugTypeQualifier", + "opcode" : 4, + "operands" : [ + { "kind" : "IdRef", "name" : "'Base Type'" }, + { "kind" : "DebugTypeQualifier", "name" : "'Type Qualifier'" } + ] + }, + { + "opname" : "DebugTypeArray", + "opcode" : 5, + "operands" : [ + { "kind" : "IdRef", "name" : "'Base Type'" }, + { "kind" : "IdRef", "name" : "'Component Counts'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugTypeVector", + "opcode" : 6, + "operands" : [ + { "kind" : "IdRef", "name" : "'Base Type'" }, + { "kind" : "LiteralInteger", "name" : "'Component Count'" } + ] + }, + { + "opname" : "DebugTypedef", + "opcode" : 7, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Base Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" } + ] + }, + { + "opname" : "DebugTypeFunction", + "opcode" : 8, + "operands" : [ + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "IdRef", "name" : "'Return Type'" }, + { "kind" : "IdRef", "name" : "'Parameter Types'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugTypeEnum", + "opcode" : 9, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Underlying Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "PairIdRefIdRef", "name" : "'Value, Name, Value, Name, ...'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugTypeComposite", + "opcode" : 10, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "DebugCompositeType", "name" : "'Tag'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Linkage Name'" }, + { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "IdRef", "name" : "'Members'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugTypeMember", + "opcode" : 11, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Offset'" }, + { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "IdRef", "name" : "'Value'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugTypeInheritance", + "opcode" : 12, + "operands" : [ + { "kind" : "IdRef", "name" : "'Child'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Offset'" }, + { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" } + ] + }, + { + "opname" : "DebugTypePtrToMember", + "opcode" : 13, + "operands" : [ + { "kind" : "IdRef", "name" : "'Member Type'" }, + { "kind" : "IdRef", "name" : "'Parent'" } + ] + }, + { + "opname" : "DebugTypeTemplate", + "opcode" : 14, + "operands" : [ + { "kind" : "IdRef", "name" : "'Target'" }, + { "kind" : "IdRef", "name" : "'Parameters'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugTypeTemplateParameter", + "opcode" : 15, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Actual Type'" }, + { "kind" : "IdRef", "name" : "'Value'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" } + ] + }, + { + "opname" : "DebugTypeTemplateTemplateParameter", + "opcode" : 16, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Template Name'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" } + ] + }, + { + "opname" : "DebugTypeTemplateParameterPack", + "opcode" : 17, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Template Parameters'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugGlobalVariable", + "opcode" : 18, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Linkage Name'" }, + { "kind" : "IdRef", "name" : "'Variable'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "IdRef", "name" : "'Static Member Declaration'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugFunctionDeclaration", + "opcode" : 19, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Linkage Name'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" } + ] + }, + { + "opname" : "DebugFunction", + "opcode" : 20, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Linkage Name'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "LiteralInteger", "name" : "'Scope Line'" }, + { "kind" : "IdRef", "name" : "'Function'" }, + { "kind" : "IdRef", "name" : "'Declaration'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugLexicalBlock", + "opcode" : 21, + "operands" : [ + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "IdRef", "name" : "'Name'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugLexicalBlockDiscriminator", + "opcode" : 22, + "operands" : [ + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Discriminator'" }, + { "kind" : "IdRef", "name" : "'Parent'" } + ] + }, + { + "opname" : "DebugScope", + "opcode" : 23, + "operands" : [ + { "kind" : "IdRef", "name" : "'Scope'" }, + { "kind" : "IdRef", "name" : "'Inlined At'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugNoScope", + "opcode" : 24 + }, + { + "opname" : "DebugInlinedAt", + "opcode" : 25, + "operands" : [ + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "IdRef", "name" : "'Scope'" }, + { "kind" : "IdRef", "name" : "'Inlined'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugLocalVariable", + "opcode" : 26, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Type'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" }, + { "kind" : "DebugInfoFlags", "name" : "'Flags'" }, + { "kind" : "LiteralInteger", "name" : "'Arg Number'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugInlinedVariable", + "opcode" : 27, + "operands" : [ + { "kind" : "IdRef", "name" : "'Variable'" }, + { "kind" : "IdRef", "name" : "'Inlined'" } + ] + }, + { + "opname" : "DebugDeclare", + "opcode" : 28, + "operands" : [ + { "kind" : "IdRef", "name" : "'Local Variable'" }, + { "kind" : "IdRef", "name" : "'Variable'" }, + { "kind" : "IdRef", "name" : "'Expression'" } + ] + }, + { + "opname" : "DebugValue", + "opcode" : 29, + "operands" : [ + { "kind" : "IdRef", "name" : "'Local Variable'" }, + { "kind" : "IdRef", "name" : "'Value'" }, + { "kind" : "IdRef", "name" : "'Expression'" }, + { "kind" : "IdRef", "name" : "'Indexes'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugOperation", + "opcode" : 30, + "operands" : [ + { "kind" : "DebugOperation", "name" : "'OpCode'" }, + { "kind" : "LiteralInteger", "name" : "'Operands ...'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugExpression", + "opcode" : 31, + "operands" : [ + { "kind" : "IdRef", "name" : "'Operands ...'", "quantifier" : "*" } + ] + }, + { + "opname" : "DebugMacroDef", + "opcode" : 32, + "operands" : [ + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "IdRef", "name" : "'Value'", "quantifier" : "?" } + ] + }, + { + "opname" : "DebugMacroUndef", + "opcode" : 33, + "operands" : [ + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "IdRef", "name" : "'Macro'" } + ] + }, + { + "opname" : "DebugImportedEntity", + "opcode" : 34, + "operands" : [ + { "kind" : "IdRef", "name" : "'Name'" }, + { "kind" : "DebugImportedEntity", "name" : "'Tag'" }, + { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "IdRef", "name" : "'Entity'" }, + { "kind" : "LiteralInteger", "name" : "'Line'" }, + { "kind" : "LiteralInteger", "name" : "'Column'" }, + { "kind" : "IdRef", "name" : "'Parent'" } + ] + }, + { + "opname" : "DebugSource", + "opcode" : 35, + "operands" : [ + { "kind" : "IdRef", "name" : "'File'" }, + { "kind" : "IdRef", "name" : "'Text'", "quantifier" : "?" } + ] + } + ], + "operand_kinds" : [ + { + "category" : "BitEnum", + "kind" : "DebugInfoFlags", + "enumerants" : [ + { + "enumerant" : "None", + "value" : "0x0000" + }, + { + "enumerant" : "FlagIsProtected", + "value" : "0x01" + }, + { + "enumerant" : "FlagIsPrivate", + "value" : "0x02" + }, + { + "enumerant" : "FlagIsPublic", + "value" : "0x03" + }, + { + "enumerant" : "FlagIsLocal", + "value" : "0x04" + }, + { + "enumerant" : "FlagIsDefinition", + "value" : "0x08" + }, + { + "enumerant" : "FlagFwdDecl", + "value" : "0x10" + }, + { + "enumerant" : "FlagArtificial", + "value" : "0x20" + }, + { + "enumerant" : "FlagExplicit", + "value" : "0x40" + }, + { + "enumerant" : "FlagPrototyped", + "value" : "0x80" + }, + { + "enumerant" : "FlagObjectPointer", + "value" : "0x100" + }, + { + "enumerant" : "FlagStaticMember", + "value" : "0x200" + }, + { + "enumerant" : "FlagIndirectVariable", + "value" : "0x400" + }, + { + "enumerant" : "FlagLValueReference", + "value" : "0x800" + }, + { + "enumerant" : "FlagRValueReference", + "value" : "0x1000" + }, + { + "enumerant" : "FlagIsOptimized", + "value" : "0x2000" + }, + { + "enumerant" : "FlagIsEnumClass", + "value" : "0x4000" + }, + { + "enumerant" : "FlagTypePassByValue", + "value" : "0x8000" + }, + { + "enumerant" : "FlagTypePassByReference", + "value" : "0x10000" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "DebugBaseTypeAttributeEncoding", + "enumerants" : [ + { + "enumerant" : "Unspecified", + "value" : "0" + }, + { + "enumerant" : "Address", + "value" : "1" + }, + { + "enumerant" : "Boolean", + "value" : "2" + }, + { + "enumerant" : "Float", + "value" : "3" + }, + { + "enumerant" : "Signed", + "value" : "4" + }, + { + "enumerant" : "SignedChar", + "value" : "5" + }, + { + "enumerant" : "Unsigned", + "value" : "6" + }, + { + "enumerant" : "UnsignedChar", + "value" : "7" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "DebugCompositeType", + "enumerants" : [ + { + "enumerant" : "Class", + "value" : "0" + }, + { + "enumerant" : "Structure", + "value" : "1" + }, + { + "enumerant" : "Union", + "value" : "2" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "DebugTypeQualifier", + "enumerants" : [ + { + "enumerant" : "ConstType", + "value" : "0" + }, + { + "enumerant" : "VolatileType", + "value" : "1" + }, + { + "enumerant" : "RestrictType", + "value" : "2" + }, + { + "enumerant" : "AtomicType", + "value" : "3" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "DebugOperation", + "enumerants" : [ + { + "enumerant" : "Deref", + "value" : "0" + }, + { + "enumerant" : "Plus", + "value" : "1" + }, + { + "enumerant" : "Minus", + "value" : "2" + }, + { + "enumerant" : "PlusUconst", + "value" : "3", + "parameters" : [ + { "kind" : "LiteralInteger" } + ] + }, + { + "enumerant" : "BitPiece", + "value" : "4", + "parameters" : [ + { "kind" : "LiteralInteger" }, + { "kind" : "LiteralInteger" } + ] + }, + { + "enumerant" : "Swap", + "value" : "5" + }, + { + "enumerant" : "Xderef", + "value" : "6" + }, + { + "enumerant" : "StackValue", + "value" : "7" + }, + { + "enumerant" : "Constu", + "value" : "8", + "parameters" : [ + { "kind" : "LiteralInteger" } + ] + }, + { + "enumerant" : "Fragment", + "value" : "9", + "parameters" : [ + { "kind" : "LiteralInteger" }, + { "kind" : "LiteralInteger" } + ] + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "DebugImportedEntity", + "enumerants" : [ + { + "enumerant" : "ImportedModule", + "value" : "0" + }, + { + "enumerant" : "ImportedDeclaration", + "value" : "1" + } + ] + } + ] +} diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-gcn-shader.grammar.json b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-gcn-shader.grammar.json new file mode 100755 index 000000000..e18251bba --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-gcn-shader.grammar.json @@ -0,0 +1,26 @@ +{ + "revision" : 2, + "instructions" : [ + { + "opname" : "CubeFaceIndexAMD", + "opcode" : 1, + "operands" : [ + { "kind" : "IdRef", "name" : "'P'" } + ], + "extensions" : [ "SPV_AMD_gcn_shader" ] + }, + { + "opname" : "CubeFaceCoordAMD", + "opcode" : 2, + "operands" : [ + { "kind" : "IdRef", "name" : "'P'" } + ], + "extensions" : [ "SPV_AMD_gcn_shader" ] + }, + { + "opname" : "TimeAMD", + "opcode" : 3, + "extensions" : [ "SPV_AMD_gcn_shader" ] + } + ] +} diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-ballot.grammar.json b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-ballot.grammar.json new file mode 100755 index 000000000..62a470eeb --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-ballot.grammar.json @@ -0,0 +1,41 @@ +{ + "revision" : 5, + "instructions" : [ + { + "opname" : "SwizzleInvocationsAMD", + "opcode" : 1, + "operands" : [ + { "kind" : "IdRef", "name" : "'data'" }, + { "kind" : "IdRef", "name" : "'offset'" } + ], + "extensions" : [ "SPV_AMD_shader_ballot" ] + }, + { + "opname" : "SwizzleInvocationsMaskedAMD", + "opcode" : 2, + "operands" : [ + { "kind" : "IdRef", "name" : "'data'" }, + { "kind" : "IdRef", "name" : "'mask'" } + ], + "extensions" : [ "SPV_AMD_shader_ballot" ] + }, + { + "opname" : "WriteInvocationAMD", + "opcode" : 3, + "operands" : [ + { "kind" : "IdRef", "name" : "'inputValue'" }, + { "kind" : "IdRef", "name" : "'writeValue'" }, + { "kind" : "IdRef", "name" : "'invocationIndex'" } + ], + "extensions" : [ "SPV_AMD_shader_ballot" ] + }, + { + "opname" : "MbcntAMD", + "opcode" : 4, + "operands" : [ + { "kind" : "IdRef", "name" : "'mask'" } + ], + "extensions" : [ "SPV_AMD_shader_ballot" ] + } + ] +} diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-explicit-vertex-parameter.grammar.json b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-explicit-vertex-parameter.grammar.json new file mode 100755 index 000000000..e156b1b6f --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-explicit-vertex-parameter.grammar.json @@ -0,0 +1,14 @@ +{ + "revision" : 4, + "instructions" : [ + { + "opname" : "InterpolateAtVertexAMD", + "opcode" : 1, + "operands" : [ + { "kind" : "IdRef", "name" : "'interpolant'" }, + { "kind" : "IdRef", "name" : "'vertexIdx'" } + ], + "extensions" : [ "SPV_AMD_shader_explicit_vertex_parameter" ] + } + ] +} diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-trinary-minmax.grammar.json b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-trinary-minmax.grammar.json new file mode 100755 index 000000000..c681976fe --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/extinst.spv-amd-shader-trinary-minmax.grammar.json @@ -0,0 +1,95 @@ +{ + "revision" : 4, + "instructions" : [ + { + "opname" : "FMin3AMD", + "opcode" : 1, + "operands" : [ + { "kind" : "IdRef", "name" : "'x'" }, + { "kind" : "IdRef", "name" : "'y'" }, + { "kind" : "IdRef", "name" : "'z'" } + ], + "extensions" : [ "SPV_AMD_shader_trinary_minmax" ] + }, + { + "opname" : "UMin3AMD", + "opcode" : 2, + "operands" : [ + { "kind" : "IdRef", "name" : "'x'" }, + { "kind" : "IdRef", "name" : "'y'" }, + { "kind" : "IdRef", "name" : "'z'" } + ], + "extensions" : [ "SPV_AMD_shader_trinary_minmax" ] + }, + { + "opname" : "SMin3AMD", + "opcode" : 3, + "operands" : [ + { "kind" : "IdRef", "name" : "'x'" }, + { "kind" : "IdRef", "name" : "'y'" }, + { "kind" : "IdRef", "name" : "'z'" } + ], + "extensions" : [ "SPV_AMD_shader_trinary_minmax" ] + }, + { + "opname" : "FMax3AMD", + "opcode" : 4, + "operands" : [ + { "kind" : "IdRef", "name" : "'x'" }, + { "kind" : "IdRef", "name" : "'y'" }, + { "kind" : "IdRef", "name" : "'z'" } + ], + "extensions" : [ "SPV_AMD_shader_trinary_minmax" ] + }, + { + "opname" : "UMax3AMD", + "opcode" : 5, + "operands" : [ + { "kind" : "IdRef", "name" : "'x'" }, + { "kind" : "IdRef", "name" : "'y'" }, + { "kind" : "IdRef", "name" : "'z'" } + ], + "extensions" : [ "SPV_AMD_shader_trinary_minmax" ] + }, + { + "opname" : "SMax3AMD", + "opcode" : 6, + "operands" : [ + { "kind" : "IdRef", "name" : "'x'" }, + { "kind" : "IdRef", "name" : "'y'" }, + { "kind" : "IdRef", "name" : "'z'" } + ], + "extensions" : [ "SPV_AMD_shader_trinary_minmax" ] + }, + { + "opname" : "FMid3AMD", + "opcode" : 7, + "operands" : [ + { "kind" : "IdRef", "name" : "'x'" }, + { "kind" : "IdRef", "name" : "'y'" }, + { "kind" : "IdRef", "name" : "'z'" } + ], + "extensions" : [ "SPV_AMD_shader_trinary_minmax" ] + }, + { + "opname" : "UMid3AMD", + "opcode" : 8, + "operands" : [ + { "kind" : "IdRef", "name" : "'x'" }, + { "kind" : "IdRef", "name" : "'y'" }, + { "kind" : "IdRef", "name" : "'z'" } + ], + "extensions" : [ "SPV_AMD_shader_trinary_minmax" ] + }, + { + "opname" : "SMid3AMD", + "opcode" : 9, + "operands" : [ + { "kind" : "IdRef", "name" : "'x'" }, + { "kind" : "IdRef", "name" : "'y'" }, + { "kind" : "IdRef", "name" : "'z'" } + ], + "extensions" : [ "SPV_AMD_shader_trinary_minmax" ] + } + ] +} diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.core.grammar.json b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.core.grammar.json index cb641420d..49f1bbc59 100755 --- a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.core.grammar.json +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.core.grammar.json @@ -1,6 +1,6 @@ { "copyright" : [ - "Copyright (c) 2014-2016 The Khronos Group Inc.", + "Copyright (c) 2014-2020 The Khronos Group Inc.", "", "Permission is hereby granted, free of charge, to any person obtaining a copy", "of this software and/or associated documentation files (the \"Materials\"),", @@ -26,15 +26,122 @@ ], "magic_number" : "0x07230203", "major_version" : 1, - "minor_version" : 3, - "revision" : 1, + "minor_version" : 5, + "revision" : 4, + "instruction_printing_class" : [ + { + "tag" : "@exclude" + }, + { + "tag" : "Miscellaneous", + "heading" : "Miscellaneous Instructions" + }, + { + "tag" : "Debug", + "heading" : "Debug Instructions" + }, + { + "tag" : "Annotation", + "heading" : "Annotation Instructions" + }, + { + "tag" : "Extension", + "heading" : "Extension Instructions" + }, + { + "tag" : "Mode-Setting", + "heading" : "Mode-Setting Instructions" + }, + { + "tag" : "Type-Declaration", + "heading" : "Type-Declaration Instructions" + }, + { + "tag" : "Constant-Creation", + "heading" : "Constant-Creation Instructions" + }, + { + "tag" : "Memory", + "heading" : "Memory Instructions" + }, + { + "tag" : "Function", + "heading" : "Function Instructions" + }, + { + "tag" : "Image", + "heading" : "Image Instructions" + }, + { + "tag" : "Conversion", + "heading" : "Conversion Instructions" + }, + { + "tag" : "Composite", + "heading" : "Composite Instructions" + }, + { + "tag" : "Arithmetic", + "heading" : "Arithmetic Instructions" + }, + { + "tag" : "Bit", + "heading" : "Bit Instructions" + }, + { + "tag" : "Relational_and_Logical", + "heading" : "Relational and Logical Instructions" + }, + { + "tag" : "Derivative", + "heading" : "Derivative Instructions" + }, + { + "tag" : "Control-Flow", + "heading" : "Control-Flow Instructions" + }, + { + "tag" : "Atomic", + "heading" : "Atomic Instructions" + }, + { + "tag" : "Primitive", + "heading" : "Primitive Instructions" + }, + { + "tag" : "Barrier", + "heading" : "Barrier Instructions" + }, + { + "tag" : "Group", + "heading" : "Group and Subgroup Instructions" + }, + { + "tag" : "Device-Side_Enqueue", + "heading" : "Device-Side Enqueue Instructions" + }, + { + "tag" : "Pipe", + "heading" : "Pipe Instructions" + }, + { + "tag" : "Non-Uniform", + "heading" : "Non-Uniform Instructions" + }, + { + "tag" : "Reserved", + "heading" : "Reserved Instructions" + } + ], "instructions" : [ { "opname" : "OpNop", + "class" : "Miscellaneous", "opcode" : 0 }, { "opname" : "OpUndef", + "class" : "Miscellaneous", "opcode" : 1, "operands" : [ { "kind" : "IdResultType" }, @@ -43,6 +150,7 @@ }, { "opname" : "OpSourceContinued", + "class" : "Debug", "opcode" : 2, "operands" : [ { "kind" : "LiteralString", "name" : "'Continued Source'" } @@ -50,6 +158,7 @@ }, { "opname" : "OpSource", + "class" : "Debug", "opcode" : 3, "operands" : [ { "kind" : "SourceLanguage" }, @@ -60,6 +169,7 @@ }, { "opname" : "OpSourceExtension", + "class" : "Debug", "opcode" : 4, "operands" : [ { "kind" : "LiteralString", "name" : "'Extension'" } @@ -67,6 +177,7 @@ }, { "opname" : "OpName", + "class" : "Debug", "opcode" : 5, "operands" : [ { "kind" : "IdRef", "name" : "'Target'" }, @@ -75,6 +186,7 @@ }, { "opname" : "OpMemberName", + "class" : "Debug", "opcode" : 6, "operands" : [ { "kind" : "IdRef", "name" : "'Type'" }, @@ -84,6 +196,7 @@ }, { "opname" : "OpString", + "class" : "Debug", "opcode" : 7, "operands" : [ { "kind" : "IdResult" }, @@ -92,6 +205,7 @@ }, { "opname" : "OpLine", + "class" : "Debug", "opcode" : 8, "operands" : [ { "kind" : "IdRef", "name" : "'File'" }, @@ -101,6 +215,7 @@ }, { "opname" : "OpExtension", + "class" : "Extension", "opcode" : 10, "operands" : [ { "kind" : "LiteralString", "name" : "'Name'" } @@ -108,6 +223,7 @@ }, { "opname" : "OpExtInstImport", + "class" : "Extension", "opcode" : 11, "operands" : [ { "kind" : "IdResult" }, @@ -116,6 +232,7 @@ }, { "opname" : "OpExtInst", + "class" : "Extension", "opcode" : 12, "operands" : [ { "kind" : "IdResultType" }, @@ -127,6 +244,7 @@ }, { "opname" : "OpMemoryModel", + "class" : "Mode-Setting", "opcode" : 14, "operands" : [ { "kind" : "AddressingModel" }, @@ -135,6 +253,7 @@ }, { "opname" : "OpEntryPoint", + "class" : "Mode-Setting", "opcode" : 15, "operands" : [ { "kind" : "ExecutionModel" }, @@ -145,6 +264,7 @@ }, { "opname" : "OpExecutionMode", + "class" : "Mode-Setting", "opcode" : 16, "operands" : [ { "kind" : "IdRef", "name" : "'Entry Point'" }, @@ -153,6 +273,7 @@ }, { "opname" : "OpCapability", + "class" : "Mode-Setting", "opcode" : 17, "operands" : [ { "kind" : "Capability", "name" : "'Capability'" } @@ -160,6 +281,7 @@ }, { "opname" : "OpTypeVoid", + "class" : "Type-Declaration", "opcode" : 19, "operands" : [ { "kind" : "IdResult" } @@ -167,6 +289,7 @@ }, { "opname" : "OpTypeBool", + "class" : "Type-Declaration", "opcode" : 20, "operands" : [ { "kind" : "IdResult" } @@ -174,6 +297,7 @@ }, { "opname" : "OpTypeInt", + "class" : "Type-Declaration", "opcode" : 21, "operands" : [ { "kind" : "IdResult" }, @@ -183,6 +307,7 @@ }, { "opname" : "OpTypeFloat", + "class" : "Type-Declaration", "opcode" : 22, "operands" : [ { "kind" : "IdResult" }, @@ -191,6 +316,7 @@ }, { "opname" : "OpTypeVector", + "class" : "Type-Declaration", "opcode" : 23, "operands" : [ { "kind" : "IdResult" }, @@ -200,6 +326,7 @@ }, { "opname" : "OpTypeMatrix", + "class" : "Type-Declaration", "opcode" : 24, "operands" : [ { "kind" : "IdResult" }, @@ -210,6 +337,7 @@ }, { "opname" : "OpTypeImage", + "class" : "Type-Declaration", "opcode" : 25, "operands" : [ { "kind" : "IdResult" }, @@ -225,6 +353,7 @@ }, { "opname" : "OpTypeSampler", + "class" : "Type-Declaration", "opcode" : 26, "operands" : [ { "kind" : "IdResult" } @@ -232,6 +361,7 @@ }, { "opname" : "OpTypeSampledImage", + "class" : "Type-Declaration", "opcode" : 27, "operands" : [ { "kind" : "IdResult" }, @@ -240,6 +370,7 @@ }, { "opname" : "OpTypeArray", + "class" : "Type-Declaration", "opcode" : 28, "operands" : [ { "kind" : "IdResult" }, @@ -249,6 +380,7 @@ }, { "opname" : "OpTypeRuntimeArray", + "class" : "Type-Declaration", "opcode" : 29, "operands" : [ { "kind" : "IdResult" }, @@ -258,6 +390,7 @@ }, { "opname" : "OpTypeStruct", + "class" : "Type-Declaration", "opcode" : 30, "operands" : [ { "kind" : "IdResult" }, @@ -266,6 +399,7 @@ }, { "opname" : "OpTypeOpaque", + "class" : "Type-Declaration", "opcode" : 31, "operands" : [ { "kind" : "IdResult" }, @@ -275,6 +409,7 @@ }, { "opname" : "OpTypePointer", + "class" : "Type-Declaration", "opcode" : 32, "operands" : [ { "kind" : "IdResult" }, @@ -284,6 +419,7 @@ }, { "opname" : "OpTypeFunction", + "class" : "Type-Declaration", "opcode" : 33, "operands" : [ { "kind" : "IdResult" }, @@ -293,6 +429,7 @@ }, { "opname" : "OpTypeEvent", + "class" : "Type-Declaration", "opcode" : 34, "operands" : [ { "kind" : "IdResult" } @@ -301,6 +438,7 @@ }, { "opname" : "OpTypeDeviceEvent", + "class" : "Type-Declaration", "opcode" : 35, "operands" : [ { "kind" : "IdResult" } @@ -309,6 +447,7 @@ }, { "opname" : "OpTypeReserveId", + "class" : "Type-Declaration", "opcode" : 36, "operands" : [ { "kind" : "IdResult" } @@ -317,6 +456,7 @@ }, { "opname" : "OpTypeQueue", + "class" : "Type-Declaration", "opcode" : 37, "operands" : [ { "kind" : "IdResult" } @@ -325,6 +465,7 @@ }, { "opname" : "OpTypePipe", + "class" : "Type-Declaration", "opcode" : 38, "operands" : [ { "kind" : "IdResult" }, @@ -334,15 +475,20 @@ }, { "opname" : "OpTypeForwardPointer", + "class" : "Type-Declaration", "opcode" : 39, "operands" : [ { "kind" : "IdRef", "name" : "'Pointer Type'" }, { "kind" : "StorageClass" } ], - "capabilities" : [ "Addresses" ] + "capabilities" : [ + "Addresses", + "PhysicalStorageBufferAddresses" + ] }, { "opname" : "OpConstantTrue", + "class" : "Constant-Creation", "opcode" : 41, "operands" : [ { "kind" : "IdResultType" }, @@ -351,6 +497,7 @@ }, { "opname" : "OpConstantFalse", + "class" : "Constant-Creation", "opcode" : 42, "operands" : [ { "kind" : "IdResultType" }, @@ -359,6 +506,7 @@ }, { "opname" : "OpConstant", + "class" : "Constant-Creation", "opcode" : 43, "operands" : [ { "kind" : "IdResultType" }, @@ -368,6 +516,7 @@ }, { "opname" : "OpConstantComposite", + "class" : "Constant-Creation", "opcode" : 44, "operands" : [ { "kind" : "IdResultType" }, @@ -377,6 +526,7 @@ }, { "opname" : "OpConstantSampler", + "class" : "Constant-Creation", "opcode" : 45, "operands" : [ { "kind" : "IdResultType" }, @@ -389,6 +539,7 @@ }, { "opname" : "OpConstantNull", + "class" : "Constant-Creation", "opcode" : 46, "operands" : [ { "kind" : "IdResultType" }, @@ -397,6 +548,7 @@ }, { "opname" : "OpSpecConstantTrue", + "class" : "Constant-Creation", "opcode" : 48, "operands" : [ { "kind" : "IdResultType" }, @@ -405,6 +557,7 @@ }, { "opname" : "OpSpecConstantFalse", + "class" : "Constant-Creation", "opcode" : 49, "operands" : [ { "kind" : "IdResultType" }, @@ -413,6 +566,7 @@ }, { "opname" : "OpSpecConstant", + "class" : "Constant-Creation", "opcode" : 50, "operands" : [ { "kind" : "IdResultType" }, @@ -422,6 +576,7 @@ }, { "opname" : "OpSpecConstantComposite", + "class" : "Constant-Creation", "opcode" : 51, "operands" : [ { "kind" : "IdResultType" }, @@ -431,6 +586,7 @@ }, { "opname" : "OpSpecConstantOp", + "class" : "Constant-Creation", "opcode" : 52, "operands" : [ { "kind" : "IdResultType" }, @@ -440,6 +596,7 @@ }, { "opname" : "OpFunction", + "class" : "Function", "opcode" : 54, "operands" : [ { "kind" : "IdResultType" }, @@ -450,6 +607,7 @@ }, { "opname" : "OpFunctionParameter", + "class" : "Function", "opcode" : 55, "operands" : [ { "kind" : "IdResultType" }, @@ -458,10 +616,12 @@ }, { "opname" : "OpFunctionEnd", + "class" : "Function", "opcode" : 56 }, { "opname" : "OpFunctionCall", + "class" : "Function", "opcode" : 57, "operands" : [ { "kind" : "IdResultType" }, @@ -472,6 +632,7 @@ }, { "opname" : "OpVariable", + "class" : "Memory", "opcode" : 59, "operands" : [ { "kind" : "IdResultType" }, @@ -482,6 +643,7 @@ }, { "opname" : "OpImageTexelPointer", + "class" : "Memory", "opcode" : 60, "operands" : [ { "kind" : "IdResultType" }, @@ -493,6 +655,7 @@ }, { "opname" : "OpLoad", + "class" : "Memory", "opcode" : 61, "operands" : [ { "kind" : "IdResultType" }, @@ -503,6 +666,7 @@ }, { "opname" : "OpStore", + "class" : "Memory", "opcode" : 62, "operands" : [ { "kind" : "IdRef", "name" : "'Pointer'" }, @@ -512,26 +676,31 @@ }, { "opname" : "OpCopyMemory", + "class" : "Memory", "opcode" : 63, "operands" : [ { "kind" : "IdRef", "name" : "'Target'" }, { "kind" : "IdRef", "name" : "'Source'" }, + { "kind" : "MemoryAccess", "quantifier" : "?" }, { "kind" : "MemoryAccess", "quantifier" : "?" } ] }, { "opname" : "OpCopyMemorySized", + "class" : "Memory", "opcode" : 64, "operands" : [ { "kind" : "IdRef", "name" : "'Target'" }, { "kind" : "IdRef", "name" : "'Source'" }, { "kind" : "IdRef", "name" : "'Size'" }, + { "kind" : "MemoryAccess", "quantifier" : "?" }, { "kind" : "MemoryAccess", "quantifier" : "?" } ], "capabilities" : [ "Addresses" ] }, { "opname" : "OpAccessChain", + "class" : "Memory", "opcode" : 65, "operands" : [ { "kind" : "IdResultType" }, @@ -542,6 +711,7 @@ }, { "opname" : "OpInBoundsAccessChain", + "class" : "Memory", "opcode" : 66, "operands" : [ { "kind" : "IdResultType" }, @@ -552,6 +722,7 @@ }, { "opname" : "OpPtrAccessChain", + "class" : "Memory", "opcode" : 67, "operands" : [ { "kind" : "IdResultType" }, @@ -563,11 +734,13 @@ "capabilities" : [ "Addresses", "VariablePointers", - "VariablePointersStorageBuffer" + "VariablePointersStorageBuffer", + "PhysicalStorageBufferAddresses" ] }, { "opname" : "OpArrayLength", + "class" : "Memory", "opcode" : 68, "operands" : [ { "kind" : "IdResultType" }, @@ -579,6 +752,7 @@ }, { "opname" : "OpGenericPtrMemSemantics", + "class" : "Memory", "opcode" : 69, "operands" : [ { "kind" : "IdResultType" }, @@ -589,6 +763,7 @@ }, { "opname" : "OpInBoundsPtrAccessChain", + "class" : "Memory", "opcode" : 70, "operands" : [ { "kind" : "IdResultType" }, @@ -601,6 +776,7 @@ }, { "opname" : "OpDecorate", + "class" : "Annotation", "opcode" : 71, "operands" : [ { "kind" : "IdRef", "name" : "'Target'" }, @@ -609,6 +785,7 @@ }, { "opname" : "OpMemberDecorate", + "class" : "Annotation", "opcode" : 72, "operands" : [ { "kind" : "IdRef", "name" : "'Structure Type'" }, @@ -618,6 +795,7 @@ }, { "opname" : "OpDecorationGroup", + "class" : "Annotation", "opcode" : 73, "operands" : [ { "kind" : "IdResult" } @@ -625,6 +803,7 @@ }, { "opname" : "OpGroupDecorate", + "class" : "Annotation", "opcode" : 74, "operands" : [ { "kind" : "IdRef", "name" : "'Decoration Group'" }, @@ -633,6 +812,7 @@ }, { "opname" : "OpGroupMemberDecorate", + "class" : "Annotation", "opcode" : 75, "operands" : [ { "kind" : "IdRef", "name" : "'Decoration Group'" }, @@ -641,6 +821,7 @@ }, { "opname" : "OpVectorExtractDynamic", + "class" : "Composite", "opcode" : 77, "operands" : [ { "kind" : "IdResultType" }, @@ -651,6 +832,7 @@ }, { "opname" : "OpVectorInsertDynamic", + "class" : "Composite", "opcode" : 78, "operands" : [ { "kind" : "IdResultType" }, @@ -662,6 +844,7 @@ }, { "opname" : "OpVectorShuffle", + "class" : "Composite", "opcode" : 79, "operands" : [ { "kind" : "IdResultType" }, @@ -673,6 +856,7 @@ }, { "opname" : "OpCompositeConstruct", + "class" : "Composite", "opcode" : 80, "operands" : [ { "kind" : "IdResultType" }, @@ -682,6 +866,7 @@ }, { "opname" : "OpCompositeExtract", + "class" : "Composite", "opcode" : 81, "operands" : [ { "kind" : "IdResultType" }, @@ -692,6 +877,7 @@ }, { "opname" : "OpCompositeInsert", + "class" : "Composite", "opcode" : 82, "operands" : [ { "kind" : "IdResultType" }, @@ -703,6 +889,7 @@ }, { "opname" : "OpCopyObject", + "class" : "Composite", "opcode" : 83, "operands" : [ { "kind" : "IdResultType" }, @@ -712,6 +899,7 @@ }, { "opname" : "OpTranspose", + "class" : "Composite", "opcode" : 84, "operands" : [ { "kind" : "IdResultType" }, @@ -722,6 +910,7 @@ }, { "opname" : "OpSampledImage", + "class" : "Image", "opcode" : 86, "operands" : [ { "kind" : "IdResultType" }, @@ -732,6 +921,7 @@ }, { "opname" : "OpImageSampleImplicitLod", + "class" : "Image", "opcode" : 87, "operands" : [ { "kind" : "IdResultType" }, @@ -744,6 +934,7 @@ }, { "opname" : "OpImageSampleExplicitLod", + "class" : "Image", "opcode" : 88, "operands" : [ { "kind" : "IdResultType" }, @@ -755,6 +946,7 @@ }, { "opname" : "OpImageSampleDrefImplicitLod", + "class" : "Image", "opcode" : 89, "operands" : [ { "kind" : "IdResultType" }, @@ -768,6 +960,7 @@ }, { "opname" : "OpImageSampleDrefExplicitLod", + "class" : "Image", "opcode" : 90, "operands" : [ { "kind" : "IdResultType" }, @@ -781,6 +974,7 @@ }, { "opname" : "OpImageSampleProjImplicitLod", + "class" : "Image", "opcode" : 91, "operands" : [ { "kind" : "IdResultType" }, @@ -793,6 +987,7 @@ }, { "opname" : "OpImageSampleProjExplicitLod", + "class" : "Image", "opcode" : 92, "operands" : [ { "kind" : "IdResultType" }, @@ -805,6 +1000,7 @@ }, { "opname" : "OpImageSampleProjDrefImplicitLod", + "class" : "Image", "opcode" : 93, "operands" : [ { "kind" : "IdResultType" }, @@ -818,6 +1014,7 @@ }, { "opname" : "OpImageSampleProjDrefExplicitLod", + "class" : "Image", "opcode" : 94, "operands" : [ { "kind" : "IdResultType" }, @@ -831,6 +1028,7 @@ }, { "opname" : "OpImageFetch", + "class" : "Image", "opcode" : 95, "operands" : [ { "kind" : "IdResultType" }, @@ -842,6 +1040,7 @@ }, { "opname" : "OpImageGather", + "class" : "Image", "opcode" : 96, "operands" : [ { "kind" : "IdResultType" }, @@ -855,6 +1054,7 @@ }, { "opname" : "OpImageDrefGather", + "class" : "Image", "opcode" : 97, "operands" : [ { "kind" : "IdResultType" }, @@ -868,6 +1068,7 @@ }, { "opname" : "OpImageRead", + "class" : "Image", "opcode" : 98, "operands" : [ { "kind" : "IdResultType" }, @@ -879,6 +1080,7 @@ }, { "opname" : "OpImageWrite", + "class" : "Image", "opcode" : 99, "operands" : [ { "kind" : "IdRef", "name" : "'Image'" }, @@ -889,6 +1091,7 @@ }, { "opname" : "OpImage", + "class" : "Image", "opcode" : 100, "operands" : [ { "kind" : "IdResultType" }, @@ -898,6 +1101,7 @@ }, { "opname" : "OpImageQueryFormat", + "class" : "Image", "opcode" : 101, "operands" : [ { "kind" : "IdResultType" }, @@ -908,6 +1112,7 @@ }, { "opname" : "OpImageQueryOrder", + "class" : "Image", "opcode" : 102, "operands" : [ { "kind" : "IdResultType" }, @@ -918,6 +1123,7 @@ }, { "opname" : "OpImageQuerySizeLod", + "class" : "Image", "opcode" : 103, "operands" : [ { "kind" : "IdResultType" }, @@ -929,6 +1135,7 @@ }, { "opname" : "OpImageQuerySize", + "class" : "Image", "opcode" : 104, "operands" : [ { "kind" : "IdResultType" }, @@ -939,6 +1146,7 @@ }, { "opname" : "OpImageQueryLod", + "class" : "Image", "opcode" : 105, "operands" : [ { "kind" : "IdResultType" }, @@ -950,6 +1158,7 @@ }, { "opname" : "OpImageQueryLevels", + "class" : "Image", "opcode" : 106, "operands" : [ { "kind" : "IdResultType" }, @@ -960,6 +1169,7 @@ }, { "opname" : "OpImageQuerySamples", + "class" : "Image", "opcode" : 107, "operands" : [ { "kind" : "IdResultType" }, @@ -970,6 +1180,7 @@ }, { "opname" : "OpConvertFToU", + "class" : "Conversion", "opcode" : 109, "operands" : [ { "kind" : "IdResultType" }, @@ -979,6 +1190,7 @@ }, { "opname" : "OpConvertFToS", + "class" : "Conversion", "opcode" : 110, "operands" : [ { "kind" : "IdResultType" }, @@ -988,6 +1200,7 @@ }, { "opname" : "OpConvertSToF", + "class" : "Conversion", "opcode" : 111, "operands" : [ { "kind" : "IdResultType" }, @@ -997,6 +1210,7 @@ }, { "opname" : "OpConvertUToF", + "class" : "Conversion", "opcode" : 112, "operands" : [ { "kind" : "IdResultType" }, @@ -1006,6 +1220,7 @@ }, { "opname" : "OpUConvert", + "class" : "Conversion", "opcode" : 113, "operands" : [ { "kind" : "IdResultType" }, @@ -1015,6 +1230,7 @@ }, { "opname" : "OpSConvert", + "class" : "Conversion", "opcode" : 114, "operands" : [ { "kind" : "IdResultType" }, @@ -1024,6 +1240,7 @@ }, { "opname" : "OpFConvert", + "class" : "Conversion", "opcode" : 115, "operands" : [ { "kind" : "IdResultType" }, @@ -1033,6 +1250,7 @@ }, { "opname" : "OpQuantizeToF16", + "class" : "Conversion", "opcode" : 116, "operands" : [ { "kind" : "IdResultType" }, @@ -1042,16 +1260,21 @@ }, { "opname" : "OpConvertPtrToU", + "class" : "Conversion", "opcode" : 117, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" } ], - "capabilities" : [ "Addresses" ] + "capabilities" : [ + "Addresses", + "PhysicalStorageBufferAddresses" + ] }, { "opname" : "OpSatConvertSToU", + "class" : "Conversion", "opcode" : 118, "operands" : [ { "kind" : "IdResultType" }, @@ -1062,6 +1285,7 @@ }, { "opname" : "OpSatConvertUToS", + "class" : "Conversion", "opcode" : 119, "operands" : [ { "kind" : "IdResultType" }, @@ -1072,16 +1296,21 @@ }, { "opname" : "OpConvertUToPtr", + "class" : "Conversion", "opcode" : 120, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Integer Value'" } ], - "capabilities" : [ "Addresses" ] + "capabilities" : [ + "Addresses", + "PhysicalStorageBufferAddresses" + ] }, { "opname" : "OpPtrCastToGeneric", + "class" : "Conversion", "opcode" : 121, "operands" : [ { "kind" : "IdResultType" }, @@ -1092,6 +1321,7 @@ }, { "opname" : "OpGenericCastToPtr", + "class" : "Conversion", "opcode" : 122, "operands" : [ { "kind" : "IdResultType" }, @@ -1102,6 +1332,7 @@ }, { "opname" : "OpGenericCastToPtrExplicit", + "class" : "Conversion", "opcode" : 123, "operands" : [ { "kind" : "IdResultType" }, @@ -1113,6 +1344,7 @@ }, { "opname" : "OpBitcast", + "class" : "Conversion", "opcode" : 124, "operands" : [ { "kind" : "IdResultType" }, @@ -1122,6 +1354,7 @@ }, { "opname" : "OpSNegate", + "class" : "Arithmetic", "opcode" : 126, "operands" : [ { "kind" : "IdResultType" }, @@ -1131,6 +1364,7 @@ }, { "opname" : "OpFNegate", + "class" : "Arithmetic", "opcode" : 127, "operands" : [ { "kind" : "IdResultType" }, @@ -1140,6 +1374,7 @@ }, { "opname" : "OpIAdd", + "class" : "Arithmetic", "opcode" : 128, "operands" : [ { "kind" : "IdResultType" }, @@ -1150,6 +1385,7 @@ }, { "opname" : "OpFAdd", + "class" : "Arithmetic", "opcode" : 129, "operands" : [ { "kind" : "IdResultType" }, @@ -1160,6 +1396,7 @@ }, { "opname" : "OpISub", + "class" : "Arithmetic", "opcode" : 130, "operands" : [ { "kind" : "IdResultType" }, @@ -1170,6 +1407,7 @@ }, { "opname" : "OpFSub", + "class" : "Arithmetic", "opcode" : 131, "operands" : [ { "kind" : "IdResultType" }, @@ -1180,6 +1418,7 @@ }, { "opname" : "OpIMul", + "class" : "Arithmetic", "opcode" : 132, "operands" : [ { "kind" : "IdResultType" }, @@ -1190,6 +1429,7 @@ }, { "opname" : "OpFMul", + "class" : "Arithmetic", "opcode" : 133, "operands" : [ { "kind" : "IdResultType" }, @@ -1200,6 +1440,7 @@ }, { "opname" : "OpUDiv", + "class" : "Arithmetic", "opcode" : 134, "operands" : [ { "kind" : "IdResultType" }, @@ -1210,6 +1451,7 @@ }, { "opname" : "OpSDiv", + "class" : "Arithmetic", "opcode" : 135, "operands" : [ { "kind" : "IdResultType" }, @@ -1220,6 +1462,7 @@ }, { "opname" : "OpFDiv", + "class" : "Arithmetic", "opcode" : 136, "operands" : [ { "kind" : "IdResultType" }, @@ -1230,6 +1473,7 @@ }, { "opname" : "OpUMod", + "class" : "Arithmetic", "opcode" : 137, "operands" : [ { "kind" : "IdResultType" }, @@ -1240,6 +1484,7 @@ }, { "opname" : "OpSRem", + "class" : "Arithmetic", "opcode" : 138, "operands" : [ { "kind" : "IdResultType" }, @@ -1250,6 +1495,7 @@ }, { "opname" : "OpSMod", + "class" : "Arithmetic", "opcode" : 139, "operands" : [ { "kind" : "IdResultType" }, @@ -1260,6 +1506,7 @@ }, { "opname" : "OpFRem", + "class" : "Arithmetic", "opcode" : 140, "operands" : [ { "kind" : "IdResultType" }, @@ -1270,6 +1517,7 @@ }, { "opname" : "OpFMod", + "class" : "Arithmetic", "opcode" : 141, "operands" : [ { "kind" : "IdResultType" }, @@ -1280,6 +1528,7 @@ }, { "opname" : "OpVectorTimesScalar", + "class" : "Arithmetic", "opcode" : 142, "operands" : [ { "kind" : "IdResultType" }, @@ -1290,6 +1539,7 @@ }, { "opname" : "OpMatrixTimesScalar", + "class" : "Arithmetic", "opcode" : 143, "operands" : [ { "kind" : "IdResultType" }, @@ -1301,6 +1551,7 @@ }, { "opname" : "OpVectorTimesMatrix", + "class" : "Arithmetic", "opcode" : 144, "operands" : [ { "kind" : "IdResultType" }, @@ -1312,6 +1563,7 @@ }, { "opname" : "OpMatrixTimesVector", + "class" : "Arithmetic", "opcode" : 145, "operands" : [ { "kind" : "IdResultType" }, @@ -1323,6 +1575,7 @@ }, { "opname" : "OpMatrixTimesMatrix", + "class" : "Arithmetic", "opcode" : 146, "operands" : [ { "kind" : "IdResultType" }, @@ -1334,6 +1587,7 @@ }, { "opname" : "OpOuterProduct", + "class" : "Arithmetic", "opcode" : 147, "operands" : [ { "kind" : "IdResultType" }, @@ -1345,6 +1599,7 @@ }, { "opname" : "OpDot", + "class" : "Arithmetic", "opcode" : 148, "operands" : [ { "kind" : "IdResultType" }, @@ -1355,6 +1610,7 @@ }, { "opname" : "OpIAddCarry", + "class" : "Arithmetic", "opcode" : 149, "operands" : [ { "kind" : "IdResultType" }, @@ -1365,6 +1621,7 @@ }, { "opname" : "OpISubBorrow", + "class" : "Arithmetic", "opcode" : 150, "operands" : [ { "kind" : "IdResultType" }, @@ -1375,6 +1632,7 @@ }, { "opname" : "OpUMulExtended", + "class" : "Arithmetic", "opcode" : 151, "operands" : [ { "kind" : "IdResultType" }, @@ -1385,6 +1643,7 @@ }, { "opname" : "OpSMulExtended", + "class" : "Arithmetic", "opcode" : 152, "operands" : [ { "kind" : "IdResultType" }, @@ -1395,6 +1654,7 @@ }, { "opname" : "OpAny", + "class" : "Relational_and_Logical", "opcode" : 154, "operands" : [ { "kind" : "IdResultType" }, @@ -1404,6 +1664,7 @@ }, { "opname" : "OpAll", + "class" : "Relational_and_Logical", "opcode" : 155, "operands" : [ { "kind" : "IdResultType" }, @@ -1413,6 +1674,7 @@ }, { "opname" : "OpIsNan", + "class" : "Relational_and_Logical", "opcode" : 156, "operands" : [ { "kind" : "IdResultType" }, @@ -1422,6 +1684,7 @@ }, { "opname" : "OpIsInf", + "class" : "Relational_and_Logical", "opcode" : 157, "operands" : [ { "kind" : "IdResultType" }, @@ -1431,6 +1694,7 @@ }, { "opname" : "OpIsFinite", + "class" : "Relational_and_Logical", "opcode" : 158, "operands" : [ { "kind" : "IdResultType" }, @@ -1441,6 +1705,7 @@ }, { "opname" : "OpIsNormal", + "class" : "Relational_and_Logical", "opcode" : 159, "operands" : [ { "kind" : "IdResultType" }, @@ -1451,6 +1716,7 @@ }, { "opname" : "OpSignBitSet", + "class" : "Relational_and_Logical", "opcode" : 160, "operands" : [ { "kind" : "IdResultType" }, @@ -1461,6 +1727,7 @@ }, { "opname" : "OpLessOrGreater", + "class" : "Relational_and_Logical", "opcode" : 161, "operands" : [ { "kind" : "IdResultType" }, @@ -1472,6 +1739,7 @@ }, { "opname" : "OpOrdered", + "class" : "Relational_and_Logical", "opcode" : 162, "operands" : [ { "kind" : "IdResultType" }, @@ -1483,6 +1751,7 @@ }, { "opname" : "OpUnordered", + "class" : "Relational_and_Logical", "opcode" : 163, "operands" : [ { "kind" : "IdResultType" }, @@ -1494,6 +1763,7 @@ }, { "opname" : "OpLogicalEqual", + "class" : "Relational_and_Logical", "opcode" : 164, "operands" : [ { "kind" : "IdResultType" }, @@ -1504,6 +1774,7 @@ }, { "opname" : "OpLogicalNotEqual", + "class" : "Relational_and_Logical", "opcode" : 165, "operands" : [ { "kind" : "IdResultType" }, @@ -1514,6 +1785,7 @@ }, { "opname" : "OpLogicalOr", + "class" : "Relational_and_Logical", "opcode" : 166, "operands" : [ { "kind" : "IdResultType" }, @@ -1524,6 +1796,7 @@ }, { "opname" : "OpLogicalAnd", + "class" : "Relational_and_Logical", "opcode" : 167, "operands" : [ { "kind" : "IdResultType" }, @@ -1534,6 +1807,7 @@ }, { "opname" : "OpLogicalNot", + "class" : "Relational_and_Logical", "opcode" : 168, "operands" : [ { "kind" : "IdResultType" }, @@ -1543,6 +1817,7 @@ }, { "opname" : "OpSelect", + "class" : "Relational_and_Logical", "opcode" : 169, "operands" : [ { "kind" : "IdResultType" }, @@ -1554,6 +1829,7 @@ }, { "opname" : "OpIEqual", + "class" : "Relational_and_Logical", "opcode" : 170, "operands" : [ { "kind" : "IdResultType" }, @@ -1564,6 +1840,7 @@ }, { "opname" : "OpINotEqual", + "class" : "Relational_and_Logical", "opcode" : 171, "operands" : [ { "kind" : "IdResultType" }, @@ -1574,6 +1851,7 @@ }, { "opname" : "OpUGreaterThan", + "class" : "Relational_and_Logical", "opcode" : 172, "operands" : [ { "kind" : "IdResultType" }, @@ -1584,6 +1862,7 @@ }, { "opname" : "OpSGreaterThan", + "class" : "Relational_and_Logical", "opcode" : 173, "operands" : [ { "kind" : "IdResultType" }, @@ -1594,6 +1873,7 @@ }, { "opname" : "OpUGreaterThanEqual", + "class" : "Relational_and_Logical", "opcode" : 174, "operands" : [ { "kind" : "IdResultType" }, @@ -1604,6 +1884,7 @@ }, { "opname" : "OpSGreaterThanEqual", + "class" : "Relational_and_Logical", "opcode" : 175, "operands" : [ { "kind" : "IdResultType" }, @@ -1614,6 +1895,7 @@ }, { "opname" : "OpULessThan", + "class" : "Relational_and_Logical", "opcode" : 176, "operands" : [ { "kind" : "IdResultType" }, @@ -1624,6 +1906,7 @@ }, { "opname" : "OpSLessThan", + "class" : "Relational_and_Logical", "opcode" : 177, "operands" : [ { "kind" : "IdResultType" }, @@ -1634,6 +1917,7 @@ }, { "opname" : "OpULessThanEqual", + "class" : "Relational_and_Logical", "opcode" : 178, "operands" : [ { "kind" : "IdResultType" }, @@ -1644,6 +1928,7 @@ }, { "opname" : "OpSLessThanEqual", + "class" : "Relational_and_Logical", "opcode" : 179, "operands" : [ { "kind" : "IdResultType" }, @@ -1654,6 +1939,7 @@ }, { "opname" : "OpFOrdEqual", + "class" : "Relational_and_Logical", "opcode" : 180, "operands" : [ { "kind" : "IdResultType" }, @@ -1664,6 +1950,7 @@ }, { "opname" : "OpFUnordEqual", + "class" : "Relational_and_Logical", "opcode" : 181, "operands" : [ { "kind" : "IdResultType" }, @@ -1674,6 +1961,7 @@ }, { "opname" : "OpFOrdNotEqual", + "class" : "Relational_and_Logical", "opcode" : 182, "operands" : [ { "kind" : "IdResultType" }, @@ -1684,6 +1972,7 @@ }, { "opname" : "OpFUnordNotEqual", + "class" : "Relational_and_Logical", "opcode" : 183, "operands" : [ { "kind" : "IdResultType" }, @@ -1694,6 +1983,7 @@ }, { "opname" : "OpFOrdLessThan", + "class" : "Relational_and_Logical", "opcode" : 184, "operands" : [ { "kind" : "IdResultType" }, @@ -1704,6 +1994,7 @@ }, { "opname" : "OpFUnordLessThan", + "class" : "Relational_and_Logical", "opcode" : 185, "operands" : [ { "kind" : "IdResultType" }, @@ -1714,6 +2005,7 @@ }, { "opname" : "OpFOrdGreaterThan", + "class" : "Relational_and_Logical", "opcode" : 186, "operands" : [ { "kind" : "IdResultType" }, @@ -1724,6 +2016,7 @@ }, { "opname" : "OpFUnordGreaterThan", + "class" : "Relational_and_Logical", "opcode" : 187, "operands" : [ { "kind" : "IdResultType" }, @@ -1734,6 +2027,7 @@ }, { "opname" : "OpFOrdLessThanEqual", + "class" : "Relational_and_Logical", "opcode" : 188, "operands" : [ { "kind" : "IdResultType" }, @@ -1744,6 +2038,7 @@ }, { "opname" : "OpFUnordLessThanEqual", + "class" : "Relational_and_Logical", "opcode" : 189, "operands" : [ { "kind" : "IdResultType" }, @@ -1754,6 +2049,7 @@ }, { "opname" : "OpFOrdGreaterThanEqual", + "class" : "Relational_and_Logical", "opcode" : 190, "operands" : [ { "kind" : "IdResultType" }, @@ -1764,6 +2060,7 @@ }, { "opname" : "OpFUnordGreaterThanEqual", + "class" : "Relational_and_Logical", "opcode" : 191, "operands" : [ { "kind" : "IdResultType" }, @@ -1774,6 +2071,7 @@ }, { "opname" : "OpShiftRightLogical", + "class" : "Bit", "opcode" : 194, "operands" : [ { "kind" : "IdResultType" }, @@ -1784,6 +2082,7 @@ }, { "opname" : "OpShiftRightArithmetic", + "class" : "Bit", "opcode" : 195, "operands" : [ { "kind" : "IdResultType" }, @@ -1794,6 +2093,7 @@ }, { "opname" : "OpShiftLeftLogical", + "class" : "Bit", "opcode" : 196, "operands" : [ { "kind" : "IdResultType" }, @@ -1804,6 +2104,7 @@ }, { "opname" : "OpBitwiseOr", + "class" : "Bit", "opcode" : 197, "operands" : [ { "kind" : "IdResultType" }, @@ -1814,6 +2115,7 @@ }, { "opname" : "OpBitwiseXor", + "class" : "Bit", "opcode" : 198, "operands" : [ { "kind" : "IdResultType" }, @@ -1824,6 +2126,7 @@ }, { "opname" : "OpBitwiseAnd", + "class" : "Bit", "opcode" : 199, "operands" : [ { "kind" : "IdResultType" }, @@ -1834,6 +2137,7 @@ }, { "opname" : "OpNot", + "class" : "Bit", "opcode" : 200, "operands" : [ { "kind" : "IdResultType" }, @@ -1843,6 +2147,7 @@ }, { "opname" : "OpBitFieldInsert", + "class" : "Bit", "opcode" : 201, "operands" : [ { "kind" : "IdResultType" }, @@ -1856,6 +2161,7 @@ }, { "opname" : "OpBitFieldSExtract", + "class" : "Bit", "opcode" : 202, "operands" : [ { "kind" : "IdResultType" }, @@ -1868,6 +2174,7 @@ }, { "opname" : "OpBitFieldUExtract", + "class" : "Bit", "opcode" : 203, "operands" : [ { "kind" : "IdResultType" }, @@ -1880,6 +2187,7 @@ }, { "opname" : "OpBitReverse", + "class" : "Bit", "opcode" : 204, "operands" : [ { "kind" : "IdResultType" }, @@ -1890,6 +2198,7 @@ }, { "opname" : "OpBitCount", + "class" : "Bit", "opcode" : 205, "operands" : [ { "kind" : "IdResultType" }, @@ -1899,6 +2208,7 @@ }, { "opname" : "OpDPdx", + "class" : "Derivative", "opcode" : 207, "operands" : [ { "kind" : "IdResultType" }, @@ -1909,6 +2219,7 @@ }, { "opname" : "OpDPdy", + "class" : "Derivative", "opcode" : 208, "operands" : [ { "kind" : "IdResultType" }, @@ -1919,6 +2230,7 @@ }, { "opname" : "OpFwidth", + "class" : "Derivative", "opcode" : 209, "operands" : [ { "kind" : "IdResultType" }, @@ -1929,6 +2241,7 @@ }, { "opname" : "OpDPdxFine", + "class" : "Derivative", "opcode" : 210, "operands" : [ { "kind" : "IdResultType" }, @@ -1939,6 +2252,7 @@ }, { "opname" : "OpDPdyFine", + "class" : "Derivative", "opcode" : 211, "operands" : [ { "kind" : "IdResultType" }, @@ -1949,6 +2263,7 @@ }, { "opname" : "OpFwidthFine", + "class" : "Derivative", "opcode" : 212, "operands" : [ { "kind" : "IdResultType" }, @@ -1959,6 +2274,7 @@ }, { "opname" : "OpDPdxCoarse", + "class" : "Derivative", "opcode" : 213, "operands" : [ { "kind" : "IdResultType" }, @@ -1969,6 +2285,7 @@ }, { "opname" : "OpDPdyCoarse", + "class" : "Derivative", "opcode" : 214, "operands" : [ { "kind" : "IdResultType" }, @@ -1979,6 +2296,7 @@ }, { "opname" : "OpFwidthCoarse", + "class" : "Derivative", "opcode" : 215, "operands" : [ { "kind" : "IdResultType" }, @@ -1989,16 +2307,19 @@ }, { "opname" : "OpEmitVertex", + "class" : "Primitive", "opcode" : 218, "capabilities" : [ "Geometry" ] }, { "opname" : "OpEndPrimitive", + "class" : "Primitive", "opcode" : 219, "capabilities" : [ "Geometry" ] }, { "opname" : "OpEmitStreamVertex", + "class" : "Primitive", "opcode" : 220, "operands" : [ { "kind" : "IdRef", "name" : "'Stream'" } @@ -2007,6 +2328,7 @@ }, { "opname" : "OpEndStreamPrimitive", + "class" : "Primitive", "opcode" : 221, "operands" : [ { "kind" : "IdRef", "name" : "'Stream'" } @@ -2015,6 +2337,7 @@ }, { "opname" : "OpControlBarrier", + "class" : "Barrier", "opcode" : 224, "operands" : [ { "kind" : "IdScope", "name" : "'Execution'" }, @@ -2024,6 +2347,7 @@ }, { "opname" : "OpMemoryBarrier", + "class" : "Barrier", "opcode" : 225, "operands" : [ { "kind" : "IdScope", "name" : "'Memory'" }, @@ -2032,45 +2356,49 @@ }, { "opname" : "OpAtomicLoad", + "class" : "Atomic", "opcode" : 227, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" } ] }, { "opname" : "OpAtomicStore", + "class" : "Atomic", "opcode" : 228, "operands" : [ { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpAtomicExchange", + "class" : "Atomic", "opcode" : 229, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpAtomicCompareExchange", + "class" : "Atomic", "opcode" : 230, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Equal'" }, { "kind" : "IdMemorySemantics", "name" : "'Unequal'" }, { "kind" : "IdRef", "name" : "'Value'" }, @@ -2079,151 +2407,165 @@ }, { "opname" : "OpAtomicCompareExchangeWeak", + "class" : "Atomic", "opcode" : 231, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Equal'" }, { "kind" : "IdMemorySemantics", "name" : "'Unequal'" }, { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'Comparator'" } ], - "capabilities" : [ "Kernel" ] + "capabilities" : [ "Kernel" ], + "lastVersion" : "1.3" }, { "opname" : "OpAtomicIIncrement", + "class" : "Atomic", "opcode" : 232, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" } ] }, { "opname" : "OpAtomicIDecrement", + "class" : "Atomic", "opcode" : 233, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" } ] }, { "opname" : "OpAtomicIAdd", + "class" : "Atomic", "opcode" : 234, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpAtomicISub", + "class" : "Atomic", "opcode" : 235, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpAtomicSMin", + "class" : "Atomic", "opcode" : 236, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpAtomicUMin", + "class" : "Atomic", "opcode" : 237, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpAtomicSMax", + "class" : "Atomic", "opcode" : 238, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpAtomicUMax", + "class" : "Atomic", "opcode" : 239, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpAtomicAnd", + "class" : "Atomic", "opcode" : 240, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpAtomicOr", + "class" : "Atomic", "opcode" : 241, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpAtomicXor", + "class" : "Atomic", "opcode" : 242, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, { "kind" : "IdRef", "name" : "'Value'" } ] }, { "opname" : "OpPhi", + "class" : "Control-Flow", "opcode" : 245, "operands" : [ { "kind" : "IdResultType" }, @@ -2233,6 +2575,7 @@ }, { "opname" : "OpLoopMerge", + "class" : "Control-Flow", "opcode" : 246, "operands" : [ { "kind" : "IdRef", "name" : "'Merge Block'" }, @@ -2242,6 +2585,7 @@ }, { "opname" : "OpSelectionMerge", + "class" : "Control-Flow", "opcode" : 247, "operands" : [ { "kind" : "IdRef", "name" : "'Merge Block'" }, @@ -2250,6 +2594,7 @@ }, { "opname" : "OpLabel", + "class" : "Control-Flow", "opcode" : 248, "operands" : [ { "kind" : "IdResult" } @@ -2257,6 +2602,7 @@ }, { "opname" : "OpBranch", + "class" : "Control-Flow", "opcode" : 249, "operands" : [ { "kind" : "IdRef", "name" : "'Target Label'" } @@ -2264,6 +2610,7 @@ }, { "opname" : "OpBranchConditional", + "class" : "Control-Flow", "opcode" : 250, "operands" : [ { "kind" : "IdRef", "name" : "'Condition'" }, @@ -2274,6 +2621,7 @@ }, { "opname" : "OpSwitch", + "class" : "Control-Flow", "opcode" : 251, "operands" : [ { "kind" : "IdRef", "name" : "'Selector'" }, @@ -2283,15 +2631,18 @@ }, { "opname" : "OpKill", + "class" : "Control-Flow", "opcode" : 252, "capabilities" : [ "Shader" ] }, { "opname" : "OpReturn", + "class" : "Control-Flow", "opcode" : 253 }, { "opname" : "OpReturnValue", + "class" : "Control-Flow", "opcode" : 254, "operands" : [ { "kind" : "IdRef", "name" : "'Value'" } @@ -2299,10 +2650,12 @@ }, { "opname" : "OpUnreachable", + "class" : "Control-Flow", "opcode" : 255 }, { "opname" : "OpLifetimeStart", + "class" : "Control-Flow", "opcode" : 256, "operands" : [ { "kind" : "IdRef", "name" : "'Pointer'" }, @@ -2312,6 +2665,7 @@ }, { "opname" : "OpLifetimeStop", + "class" : "Control-Flow", "opcode" : 257, "operands" : [ { "kind" : "IdRef", "name" : "'Pointer'" }, @@ -2321,6 +2675,7 @@ }, { "opname" : "OpGroupAsyncCopy", + "class" : "Group", "opcode" : 259, "operands" : [ { "kind" : "IdResultType" }, @@ -2336,6 +2691,7 @@ }, { "opname" : "OpGroupWaitEvents", + "class" : "Group", "opcode" : 260, "operands" : [ { "kind" : "IdScope", "name" : "'Execution'" }, @@ -2346,6 +2702,7 @@ }, { "opname" : "OpGroupAll", + "class" : "Group", "opcode" : 261, "operands" : [ { "kind" : "IdResultType" }, @@ -2357,6 +2714,7 @@ }, { "opname" : "OpGroupAny", + "class" : "Group", "opcode" : 262, "operands" : [ { "kind" : "IdResultType" }, @@ -2368,6 +2726,7 @@ }, { "opname" : "OpGroupBroadcast", + "class" : "Group", "opcode" : 263, "operands" : [ { "kind" : "IdResultType" }, @@ -2380,6 +2739,7 @@ }, { "opname" : "OpGroupIAdd", + "class" : "Group", "opcode" : 264, "operands" : [ { "kind" : "IdResultType" }, @@ -2392,6 +2752,7 @@ }, { "opname" : "OpGroupFAdd", + "class" : "Group", "opcode" : 265, "operands" : [ { "kind" : "IdResultType" }, @@ -2404,6 +2765,7 @@ }, { "opname" : "OpGroupFMin", + "class" : "Group", "opcode" : 266, "operands" : [ { "kind" : "IdResultType" }, @@ -2416,6 +2778,7 @@ }, { "opname" : "OpGroupUMin", + "class" : "Group", "opcode" : 267, "operands" : [ { "kind" : "IdResultType" }, @@ -2428,6 +2791,7 @@ }, { "opname" : "OpGroupSMin", + "class" : "Group", "opcode" : 268, "operands" : [ { "kind" : "IdResultType" }, @@ -2440,6 +2804,7 @@ }, { "opname" : "OpGroupFMax", + "class" : "Group", "opcode" : 269, "operands" : [ { "kind" : "IdResultType" }, @@ -2452,6 +2817,7 @@ }, { "opname" : "OpGroupUMax", + "class" : "Group", "opcode" : 270, "operands" : [ { "kind" : "IdResultType" }, @@ -2464,6 +2830,7 @@ }, { "opname" : "OpGroupSMax", + "class" : "Group", "opcode" : 271, "operands" : [ { "kind" : "IdResultType" }, @@ -2476,6 +2843,7 @@ }, { "opname" : "OpReadPipe", + "class" : "Pipe", "opcode" : 274, "operands" : [ { "kind" : "IdResultType" }, @@ -2489,6 +2857,7 @@ }, { "opname" : "OpWritePipe", + "class" : "Pipe", "opcode" : 275, "operands" : [ { "kind" : "IdResultType" }, @@ -2502,6 +2871,7 @@ }, { "opname" : "OpReservedReadPipe", + "class" : "Pipe", "opcode" : 276, "operands" : [ { "kind" : "IdResultType" }, @@ -2517,6 +2887,7 @@ }, { "opname" : "OpReservedWritePipe", + "class" : "Pipe", "opcode" : 277, "operands" : [ { "kind" : "IdResultType" }, @@ -2532,6 +2903,7 @@ }, { "opname" : "OpReserveReadPipePackets", + "class" : "Pipe", "opcode" : 278, "operands" : [ { "kind" : "IdResultType" }, @@ -2545,6 +2917,7 @@ }, { "opname" : "OpReserveWritePipePackets", + "class" : "Pipe", "opcode" : 279, "operands" : [ { "kind" : "IdResultType" }, @@ -2558,6 +2931,7 @@ }, { "opname" : "OpCommitReadPipe", + "class" : "Pipe", "opcode" : 280, "operands" : [ { "kind" : "IdRef", "name" : "'Pipe'" }, @@ -2569,6 +2943,7 @@ }, { "opname" : "OpCommitWritePipe", + "class" : "Pipe", "opcode" : 281, "operands" : [ { "kind" : "IdRef", "name" : "'Pipe'" }, @@ -2580,6 +2955,7 @@ }, { "opname" : "OpIsValidReserveId", + "class" : "Pipe", "opcode" : 282, "operands" : [ { "kind" : "IdResultType" }, @@ -2590,6 +2966,7 @@ }, { "opname" : "OpGetNumPipePackets", + "class" : "Pipe", "opcode" : 283, "operands" : [ { "kind" : "IdResultType" }, @@ -2602,6 +2979,7 @@ }, { "opname" : "OpGetMaxPipePackets", + "class" : "Pipe", "opcode" : 284, "operands" : [ { "kind" : "IdResultType" }, @@ -2614,6 +2992,7 @@ }, { "opname" : "OpGroupReserveReadPipePackets", + "class" : "Pipe", "opcode" : 285, "operands" : [ { "kind" : "IdResultType" }, @@ -2628,6 +3007,7 @@ }, { "opname" : "OpGroupReserveWritePipePackets", + "class" : "Pipe", "opcode" : 286, "operands" : [ { "kind" : "IdResultType" }, @@ -2642,6 +3022,7 @@ }, { "opname" : "OpGroupCommitReadPipe", + "class" : "Pipe", "opcode" : 287, "operands" : [ { "kind" : "IdScope", "name" : "'Execution'" }, @@ -2654,6 +3035,7 @@ }, { "opname" : "OpGroupCommitWritePipe", + "class" : "Pipe", "opcode" : 288, "operands" : [ { "kind" : "IdScope", "name" : "'Execution'" }, @@ -2666,6 +3048,7 @@ }, { "opname" : "OpEnqueueMarker", + "class" : "Device-Side_Enqueue", "opcode" : 291, "operands" : [ { "kind" : "IdResultType" }, @@ -2679,6 +3062,7 @@ }, { "opname" : "OpEnqueueKernel", + "class" : "Device-Side_Enqueue", "opcode" : 292, "operands" : [ { "kind" : "IdResultType" }, @@ -2699,6 +3083,7 @@ }, { "opname" : "OpGetKernelNDrangeSubGroupCount", + "class" : "Device-Side_Enqueue", "opcode" : 293, "operands" : [ { "kind" : "IdResultType" }, @@ -2713,6 +3098,7 @@ }, { "opname" : "OpGetKernelNDrangeMaxSubGroupSize", + "class" : "Device-Side_Enqueue", "opcode" : 294, "operands" : [ { "kind" : "IdResultType" }, @@ -2727,6 +3113,7 @@ }, { "opname" : "OpGetKernelWorkGroupSize", + "class" : "Device-Side_Enqueue", "opcode" : 295, "operands" : [ { "kind" : "IdResultType" }, @@ -2740,6 +3127,7 @@ }, { "opname" : "OpGetKernelPreferredWorkGroupSizeMultiple", + "class" : "Device-Side_Enqueue", "opcode" : 296, "operands" : [ { "kind" : "IdResultType" }, @@ -2753,6 +3141,7 @@ }, { "opname" : "OpRetainEvent", + "class" : "Device-Side_Enqueue", "opcode" : 297, "operands" : [ { "kind" : "IdRef", "name" : "'Event'" } @@ -2761,6 +3150,7 @@ }, { "opname" : "OpReleaseEvent", + "class" : "Device-Side_Enqueue", "opcode" : 298, "operands" : [ { "kind" : "IdRef", "name" : "'Event'" } @@ -2769,6 +3159,7 @@ }, { "opname" : "OpCreateUserEvent", + "class" : "Device-Side_Enqueue", "opcode" : 299, "operands" : [ { "kind" : "IdResultType" }, @@ -2778,6 +3169,7 @@ }, { "opname" : "OpIsValidEvent", + "class" : "Device-Side_Enqueue", "opcode" : 300, "operands" : [ { "kind" : "IdResultType" }, @@ -2788,6 +3180,7 @@ }, { "opname" : "OpSetUserEventStatus", + "class" : "Device-Side_Enqueue", "opcode" : 301, "operands" : [ { "kind" : "IdRef", "name" : "'Event'" }, @@ -2797,6 +3190,7 @@ }, { "opname" : "OpCaptureEventProfilingInfo", + "class" : "Device-Side_Enqueue", "opcode" : 302, "operands" : [ { "kind" : "IdRef", "name" : "'Event'" }, @@ -2807,6 +3201,7 @@ }, { "opname" : "OpGetDefaultQueue", + "class" : "Device-Side_Enqueue", "opcode" : 303, "operands" : [ { "kind" : "IdResultType" }, @@ -2816,6 +3211,7 @@ }, { "opname" : "OpBuildNDRange", + "class" : "Device-Side_Enqueue", "opcode" : 304, "operands" : [ { "kind" : "IdResultType" }, @@ -2828,6 +3224,7 @@ }, { "opname" : "OpImageSparseSampleImplicitLod", + "class" : "Image", "opcode" : 305, "operands" : [ { "kind" : "IdResultType" }, @@ -2840,6 +3237,7 @@ }, { "opname" : "OpImageSparseSampleExplicitLod", + "class" : "Image", "opcode" : 306, "operands" : [ { "kind" : "IdResultType" }, @@ -2852,6 +3250,7 @@ }, { "opname" : "OpImageSparseSampleDrefImplicitLod", + "class" : "Image", "opcode" : 307, "operands" : [ { "kind" : "IdResultType" }, @@ -2865,6 +3264,7 @@ }, { "opname" : "OpImageSparseSampleDrefExplicitLod", + "class" : "Image", "opcode" : 308, "operands" : [ { "kind" : "IdResultType" }, @@ -2878,6 +3278,7 @@ }, { "opname" : "OpImageSparseSampleProjImplicitLod", + "class" : "Image", "opcode" : 309, "operands" : [ { "kind" : "IdResultType" }, @@ -2891,6 +3292,7 @@ }, { "opname" : "OpImageSparseSampleProjExplicitLod", + "class" : "Image", "opcode" : 310, "operands" : [ { "kind" : "IdResultType" }, @@ -2904,6 +3306,7 @@ }, { "opname" : "OpImageSparseSampleProjDrefImplicitLod", + "class" : "Image", "opcode" : 311, "operands" : [ { "kind" : "IdResultType" }, @@ -2918,6 +3321,7 @@ }, { "opname" : "OpImageSparseSampleProjDrefExplicitLod", + "class" : "Image", "opcode" : 312, "operands" : [ { "kind" : "IdResultType" }, @@ -2932,6 +3336,7 @@ }, { "opname" : "OpImageSparseFetch", + "class" : "Image", "opcode" : 313, "operands" : [ { "kind" : "IdResultType" }, @@ -2944,6 +3349,7 @@ }, { "opname" : "OpImageSparseGather", + "class" : "Image", "opcode" : 314, "operands" : [ { "kind" : "IdResultType" }, @@ -2957,6 +3363,7 @@ }, { "opname" : "OpImageSparseDrefGather", + "class" : "Image", "opcode" : 315, "operands" : [ { "kind" : "IdResultType" }, @@ -2970,6 +3377,7 @@ }, { "opname" : "OpImageSparseTexelsResident", + "class" : "Image", "opcode" : 316, "operands" : [ { "kind" : "IdResultType" }, @@ -2980,32 +3388,36 @@ }, { "opname" : "OpNoLine", + "class" : "Debug", "opcode" : 317 }, { "opname" : "OpAtomicFlagTestAndSet", + "class" : "Atomic", "opcode" : 318, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" } ], "capabilities" : [ "Kernel" ] }, { "opname" : "OpAtomicFlagClear", + "class" : "Atomic", "opcode" : 319, "operands" : [ { "kind" : "IdRef", "name" : "'Pointer'" }, - { "kind" : "IdScope", "name" : "'Scope'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, { "kind" : "IdMemorySemantics", "name" : "'Semantics'" } ], "capabilities" : [ "Kernel" ] }, { "opname" : "OpImageSparseRead", + "class" : "Image", "opcode" : 320, "operands" : [ { "kind" : "IdResultType" }, @@ -3018,6 +3430,7 @@ }, { "opname" : "OpSizeOf", + "class" : "Miscellaneous", "opcode" : 321, "operands" : [ { "kind" : "IdResultType" }, @@ -3029,6 +3442,7 @@ }, { "opname" : "OpTypePipeStorage", + "class" : "Type-Declaration", "opcode" : 322, "operands" : [ { "kind" : "IdResult" } @@ -3038,6 +3452,7 @@ }, { "opname" : "OpConstantPipeStorage", + "class" : "Pipe", "opcode" : 323, "operands" : [ { "kind" : "IdResultType" }, @@ -3051,6 +3466,7 @@ }, { "opname" : "OpCreatePipeFromPipeStorage", + "class" : "Pipe", "opcode" : 324, "operands" : [ { "kind" : "IdResultType" }, @@ -3062,6 +3478,7 @@ }, { "opname" : "OpGetKernelLocalSizeForSubgroupCount", + "class" : "Device-Side_Enqueue", "opcode" : 325, "operands" : [ { "kind" : "IdResultType" }, @@ -3077,6 +3494,7 @@ }, { "opname" : "OpGetKernelMaxNumSubgroups", + "class" : "Device-Side_Enqueue", "opcode" : 326, "operands" : [ { "kind" : "IdResultType" }, @@ -3091,6 +3509,7 @@ }, { "opname" : "OpTypeNamedBarrier", + "class" : "Type-Declaration", "opcode" : 327, "operands" : [ { "kind" : "IdResult" } @@ -3100,6 +3519,7 @@ }, { "opname" : "OpNamedBarrierInitialize", + "class" : "Barrier", "opcode" : 328, "operands" : [ { "kind" : "IdResultType" }, @@ -3111,6 +3531,7 @@ }, { "opname" : "OpMemoryNamedBarrier", + "class" : "Barrier", "opcode" : 329, "operands" : [ { "kind" : "IdRef", "name" : "'Named Barrier'" }, @@ -3122,6 +3543,7 @@ }, { "opname" : "OpModuleProcessed", + "class" : "Debug", "opcode" : 330, "operands" : [ { "kind" : "LiteralString", "name" : "'Process'" } @@ -3130,6 +3552,7 @@ }, { "opname" : "OpExecutionModeId", + "class" : "Mode-Setting", "opcode" : 331, "operands" : [ { "kind" : "IdRef", "name" : "'Entry Point'" }, @@ -3139,6 +3562,7 @@ }, { "opname" : "OpDecorateId", + "class" : "Annotation", "opcode" : 332, "operands" : [ { "kind" : "IdRef", "name" : "'Target'" }, @@ -3149,6 +3573,7 @@ }, { "opname" : "OpGroupNonUniformElect", + "class" : "Non-Uniform", "opcode" : 333, "operands" : [ { "kind" : "IdResultType" }, @@ -3160,6 +3585,7 @@ }, { "opname" : "OpGroupNonUniformAll", + "class" : "Non-Uniform", "opcode" : 334, "operands" : [ { "kind" : "IdResultType" }, @@ -3172,6 +3598,7 @@ }, { "opname" : "OpGroupNonUniformAny", + "class" : "Non-Uniform", "opcode" : 335, "operands" : [ { "kind" : "IdResultType" }, @@ -3184,6 +3611,7 @@ }, { "opname" : "OpGroupNonUniformAllEqual", + "class" : "Non-Uniform", "opcode" : 336, "operands" : [ { "kind" : "IdResultType" }, @@ -3196,6 +3624,7 @@ }, { "opname" : "OpGroupNonUniformBroadcast", + "class" : "Non-Uniform", "opcode" : 337, "operands" : [ { "kind" : "IdResultType" }, @@ -3209,6 +3638,7 @@ }, { "opname" : "OpGroupNonUniformBroadcastFirst", + "class" : "Non-Uniform", "opcode" : 338, "operands" : [ { "kind" : "IdResultType" }, @@ -3221,6 +3651,7 @@ }, { "opname" : "OpGroupNonUniformBallot", + "class" : "Non-Uniform", "opcode" : 339, "operands" : [ { "kind" : "IdResultType" }, @@ -3233,6 +3664,7 @@ }, { "opname" : "OpGroupNonUniformInverseBallot", + "class" : "Non-Uniform", "opcode" : 340, "operands" : [ { "kind" : "IdResultType" }, @@ -3245,6 +3677,7 @@ }, { "opname" : "OpGroupNonUniformBallotBitExtract", + "class" : "Non-Uniform", "opcode" : 341, "operands" : [ { "kind" : "IdResultType" }, @@ -3258,6 +3691,7 @@ }, { "opname" : "OpGroupNonUniformBallotBitCount", + "class" : "Non-Uniform", "opcode" : 342, "operands" : [ { "kind" : "IdResultType" }, @@ -3271,6 +3705,7 @@ }, { "opname" : "OpGroupNonUniformBallotFindLSB", + "class" : "Non-Uniform", "opcode" : 343, "operands" : [ { "kind" : "IdResultType" }, @@ -3283,6 +3718,7 @@ }, { "opname" : "OpGroupNonUniformBallotFindMSB", + "class" : "Non-Uniform", "opcode" : 344, "operands" : [ { "kind" : "IdResultType" }, @@ -3295,6 +3731,7 @@ }, { "opname" : "OpGroupNonUniformShuffle", + "class" : "Non-Uniform", "opcode" : 345, "operands" : [ { "kind" : "IdResultType" }, @@ -3308,6 +3745,7 @@ }, { "opname" : "OpGroupNonUniformShuffleXor", + "class" : "Non-Uniform", "opcode" : 346, "operands" : [ { "kind" : "IdResultType" }, @@ -3321,6 +3759,7 @@ }, { "opname" : "OpGroupNonUniformShuffleUp", + "class" : "Non-Uniform", "opcode" : 347, "operands" : [ { "kind" : "IdResultType" }, @@ -3334,6 +3773,7 @@ }, { "opname" : "OpGroupNonUniformShuffleDown", + "class" : "Non-Uniform", "opcode" : 348, "operands" : [ { "kind" : "IdResultType" }, @@ -3347,6 +3787,7 @@ }, { "opname" : "OpGroupNonUniformIAdd", + "class" : "Non-Uniform", "opcode" : 349, "operands" : [ { "kind" : "IdResultType" }, @@ -3356,11 +3797,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformFAdd", + "class" : "Non-Uniform", "opcode" : 350, "operands" : [ { "kind" : "IdResultType" }, @@ -3370,11 +3812,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformIMul", + "class" : "Non-Uniform", "opcode" : 351, "operands" : [ { "kind" : "IdResultType" }, @@ -3384,11 +3827,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformFMul", + "class" : "Non-Uniform", "opcode" : 352, "operands" : [ { "kind" : "IdResultType" }, @@ -3398,11 +3842,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformSMin", + "class" : "Non-Uniform", "opcode" : 353, "operands" : [ { "kind" : "IdResultType" }, @@ -3412,11 +3857,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformUMin", + "class" : "Non-Uniform", "opcode" : 354, "operands" : [ { "kind" : "IdResultType" }, @@ -3426,11 +3872,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformFMin", + "class" : "Non-Uniform", "opcode" : 355, "operands" : [ { "kind" : "IdResultType" }, @@ -3440,11 +3887,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformSMax", + "class" : "Non-Uniform", "opcode" : 356, "operands" : [ { "kind" : "IdResultType" }, @@ -3454,11 +3902,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformUMax", + "class" : "Non-Uniform", "opcode" : 357, "operands" : [ { "kind" : "IdResultType" }, @@ -3468,11 +3917,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformFMax", + "class" : "Non-Uniform", "opcode" : 358, "operands" : [ { "kind" : "IdResultType" }, @@ -3482,11 +3932,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformBitwiseAnd", + "class" : "Non-Uniform", "opcode" : 359, "operands" : [ { "kind" : "IdResultType" }, @@ -3496,11 +3947,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformBitwiseOr", + "class" : "Non-Uniform", "opcode" : 360, "operands" : [ { "kind" : "IdResultType" }, @@ -3510,11 +3962,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformBitwiseXor", + "class" : "Non-Uniform", "opcode" : 361, "operands" : [ { "kind" : "IdResultType" }, @@ -3524,11 +3977,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformLogicalAnd", + "class" : "Non-Uniform", "opcode" : 362, "operands" : [ { "kind" : "IdResultType" }, @@ -3538,11 +3992,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformLogicalOr", + "class" : "Non-Uniform", "opcode" : 363, "operands" : [ { "kind" : "IdResultType" }, @@ -3552,11 +4007,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformLogicalXor", + "class" : "Non-Uniform", "opcode" : 364, "operands" : [ { "kind" : "IdResultType" }, @@ -3566,11 +4022,12 @@ { "kind" : "IdRef", "name" : "'Value'" }, { "kind" : "IdRef", "name" : "'ClusterSize'", "quantifier" : "?" } ], - "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered" ], + "capabilities" : [ "GroupNonUniformArithmetic", "GroupNonUniformClustered", "GroupNonUniformPartitionedNV" ], "version" : "1.3" }, { "opname" : "OpGroupNonUniformQuadBroadcast", + "class" : "Non-Uniform", "opcode" : 365, "operands" : [ { "kind" : "IdResultType" }, @@ -3584,6 +4041,7 @@ }, { "opname" : "OpGroupNonUniformQuadSwap", + "class" : "Non-Uniform", "opcode" : 366, "operands" : [ { "kind" : "IdResultType" }, @@ -3595,8 +4053,67 @@ "capabilities" : [ "GroupNonUniformQuad" ], "version" : "1.3" }, + { + "opname" : "OpCopyLogical", + "class" : "Composite", + "opcode" : 400, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand'" } + ], + "version" : "1.4" + }, + { + "opname" : "OpPtrEqual", + "class" : "Memory", + "opcode" : 401, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "version" : "1.4" + }, + { + "opname" : "OpPtrNotEqual", + "class" : "Memory", + "opcode" : 402, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "version" : "1.4" + }, + { + "opname" : "OpPtrDiff", + "class" : "Memory", + "opcode" : 403, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "Addresses", "VariablePointers", "VariablePointersStorageBuffer" ], + "version" : "1.4" + }, + { + "opname" : "OpTerminateInvocation", + "class" : "Control-Flow", + "opcode" : 4416, + "extensions" : [ + "SPV_KHR_terminate_invocation" + ], + "capabilities" : [ "Shader" ], + "version" : "None" + }, { "opname" : "OpSubgroupBallotKHR", + "class" : "Group", "opcode" : 4421, "operands" : [ { "kind" : "IdResultType" }, @@ -3609,6 +4126,7 @@ }, { "opname" : "OpSubgroupFirstInvocationKHR", + "class" : "Group", "opcode" : 4422, "operands" : [ { "kind" : "IdResultType" }, @@ -3621,6 +4139,7 @@ }, { "opname" : "OpSubgroupAllKHR", + "class" : "Group", "opcode" : 4428, "operands" : [ { "kind" : "IdResultType" }, @@ -3635,6 +4154,7 @@ }, { "opname" : "OpSubgroupAnyKHR", + "class" : "Group", "opcode" : 4429, "operands" : [ { "kind" : "IdResultType" }, @@ -3649,6 +4169,7 @@ }, { "opname" : "OpSubgroupAllEqualKHR", + "class" : "Group", "opcode" : 4430, "operands" : [ { "kind" : "IdResultType" }, @@ -3663,6 +4184,7 @@ }, { "opname" : "OpSubgroupReadInvocationKHR", + "class" : "Group", "opcode" : 4432, "operands" : [ { "kind" : "IdResultType" }, @@ -3674,8 +4196,209 @@ "extensions" : [ "SPV_KHR_shader_ballot" ], "version" : "None" }, + { + "opname" : "OpTraceRayKHR", + "class" : "Reserved", + "opcode" : 4445, + "operands" : [ + + { "kind" : "IdRef", "name" : "'Accel'" }, + { "kind" : "IdRef", "name" : "'Ray Flags'" }, + { "kind" : "IdRef", "name" : "'Cull Mask'" }, + { "kind" : "IdRef", "name" : "'SBT Offset'" }, + { "kind" : "IdRef", "name" : "'SBT Stride'" }, + { "kind" : "IdRef", "name" : "'Miss Index'" }, + { "kind" : "IdRef", "name" : "'Ray Origin'" }, + { "kind" : "IdRef", "name" : "'Ray Tmin'" }, + { "kind" : "IdRef", "name" : "'Ray Direction'" }, + { "kind" : "IdRef", "name" : "'Ray Tmax'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "RayTracingKHR" ], + "extensions" : [ "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "opname" : "OpExecuteCallableKHR", + "class" : "Reserved", + "opcode" : 4446, + "operands" : [ + + { "kind" : "IdRef", "name" : "'SBT Index'" }, + { "kind" : "IdRef", "name" : "'Callable Data'" } + ], + "capabilities" : [ "RayTracingKHR" ], + "extensions" : [ "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "opname" : "OpConvertUToAccelerationStructureKHR", + "class" : "Reserved", + "opcode" : 4447, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Accel'" } + ], + "capabilities" : [ "RayTracingKHR", "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_tracing", "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpIgnoreIntersectionKHR", + "class" : "Reserved", + "opcode" : 4448, + "capabilities" : [ "RayTracingKHR" ], + "extensions" : [ "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "opname" : "OpTerminateRayKHR", + "class" : "Reserved", + "opcode" : 4449, + "capabilities" : [ "RayTracingKHR" ], + "extensions" : [ "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "opname" : "OpTypeRayQueryKHR", + "class" : "Reserved", + "opcode" : 4472, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryInitializeKHR", + "class" : "Reserved", + "opcode" : 4473, + "operands" : [ + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Accel'" + }, + { + "kind" : "IdRef", + "name" : "'RayFlags'" + }, + { + "kind" : "IdRef", + "name" : "'CullMask'" + }, + { + "kind" : "IdRef", + "name" : "'RayOrigin'" + }, + { + "kind" : "IdRef", + "name" : "'RayTMin'" + }, + { + "kind" : "IdRef", + "name" : "'RayDirection'" + }, + { + "kind" : "IdRef", + "name" : "'RayTMax'" + } + + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryTerminateKHR", + "class" : "Reserved", + "opcode" : 4474, + "operands" : [ + { + "kind" : "IdRef", + "name" : "'RayQuery'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGenerateIntersectionKHR", + "class" : "Reserved", + "opcode" : 4475, + "operands" : [ + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'HitT'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryConfirmIntersectionKHR", + "class" : "Reserved", + "opcode" : 4476, + "operands" : [ + { + "kind" : "IdRef", + "name" : "'RayQuery'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryProceedKHR", + "class" : "Reserved", + "opcode" : 4477, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionTypeKHR", + "class" : "Reserved", + "opcode" : 4479, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, { "opname" : "OpGroupIAddNonUniformAMD", + "class" : "Group", "opcode" : 5000, "operands" : [ { "kind" : "IdResultType" }, @@ -3690,6 +4413,7 @@ }, { "opname" : "OpGroupFAddNonUniformAMD", + "class" : "Group", "opcode" : 5001, "operands" : [ { "kind" : "IdResultType" }, @@ -3704,6 +4428,7 @@ }, { "opname" : "OpGroupFMinNonUniformAMD", + "class" : "Group", "opcode" : 5002, "operands" : [ { "kind" : "IdResultType" }, @@ -3718,6 +4443,7 @@ }, { "opname" : "OpGroupUMinNonUniformAMD", + "class" : "Group", "opcode" : 5003, "operands" : [ { "kind" : "IdResultType" }, @@ -3732,6 +4458,7 @@ }, { "opname" : "OpGroupSMinNonUniformAMD", + "class" : "Group", "opcode" : 5004, "operands" : [ { "kind" : "IdResultType" }, @@ -3746,6 +4473,7 @@ }, { "opname" : "OpGroupFMaxNonUniformAMD", + "class" : "Group", "opcode" : 5005, "operands" : [ { "kind" : "IdResultType" }, @@ -3760,6 +4488,7 @@ }, { "opname" : "OpGroupUMaxNonUniformAMD", + "class" : "Group", "opcode" : 5006, "operands" : [ { "kind" : "IdResultType" }, @@ -3774,6 +4503,7 @@ }, { "opname" : "OpGroupSMaxNonUniformAMD", + "class" : "Group", "opcode" : 5007, "operands" : [ { "kind" : "IdResultType" }, @@ -3788,6 +4518,7 @@ }, { "opname" : "OpFragmentMaskFetchAMD", + "class" : "Reserved", "opcode" : 5011, "operands" : [ { "kind" : "IdResultType" }, @@ -3801,6 +4532,7 @@ }, { "opname" : "OpFragmentFetchAMD", + "class" : "Reserved", "opcode" : 5012, "operands" : [ { "kind" : "IdResultType" }, @@ -3813,8 +4545,275 @@ "extensions" : [ "SPV_AMD_shader_fragment_mask" ], "version" : "None" }, + { + "opname" : "OpReadClockKHR", + "class" : "Reserved", + "opcode" : 5056, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdScope", "name" : "'Execution'" } + ], + "capabilities" : [ "ShaderClockKHR" ], + "extensions" : [ "SPV_KHR_shader_clock" ], + "version" : "None" + }, + { + "opname" : "OpImageSampleFootprintNV", + "class" : "Image", + "opcode" : 5283, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Sampled Image'" }, + { "kind" : "IdRef", "name" : "'Coordinate'" }, + { "kind" : "IdRef", "name" : "'Granularity'" }, + { "kind" : "IdRef", "name" : "'Coarse'" }, + { "kind" : "ImageOperands", "quantifier" : "?" } + ], + "capabilities" : [ "ImageFootprintNV" ], + "extensions" : [ "SPV_NV_shader_image_footprint" ], + "version" : "None" + }, + { + "opname" : "OpGroupNonUniformPartitionNV", + "class" : "Non-Uniform", + "opcode" : 5296, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Value'" } + ], + "capabilities" : [ "GroupNonUniformPartitionedNV" ], + "extensions" : [ "SPV_NV_shader_subgroup_partitioned" ], + "version" : "None" + }, + { + "opname" : "OpWritePackedPrimitiveIndices4x8NV", + "class" : "Reserved", + "opcode" : 5299, + "operands" : [ + { "kind" : "IdRef", "name" : "'Index Offset'" }, + { "kind" : "IdRef", "name" : "'Packed Indices'" } + ], + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "opname" : "OpReportIntersectionNV", + "class" : "Reserved", + "opcode" : 5334, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Hit'" }, + { "kind" : "IdRef", "name" : "'HitKind'" } + ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "opname" : "OpReportIntersectionKHR", + "class" : "Reserved", + "opcode" : 5334, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Hit'" }, + { "kind" : "IdRef", "name" : "'HitKind'" } + ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "opname" : "OpIgnoreIntersectionNV", + "class" : "Reserved", + "opcode" : 5335, + "capabilities" : [ "RayTracingNV" ], + "extensions" : [ "SPV_NV_ray_tracing" ], + "version" : "None" + }, + { + "opname" : "OpTerminateRayNV", + "class" : "Reserved", + "opcode" : 5336, + "capabilities" : [ "RayTracingNV" ], + "extensions" : [ "SPV_NV_ray_tracing" ], + "version" : "None" + }, + { + "opname" : "OpTraceNV", + "class" : "Reserved", + "opcode" : 5337, + "operands" : [ + + { "kind" : "IdRef", "name" : "'Accel'" }, + { "kind" : "IdRef", "name" : "'Ray Flags'" }, + { "kind" : "IdRef", "name" : "'Cull Mask'" }, + { "kind" : "IdRef", "name" : "'SBT Offset'" }, + { "kind" : "IdRef", "name" : "'SBT Stride'" }, + { "kind" : "IdRef", "name" : "'Miss Index'" }, + { "kind" : "IdRef", "name" : "'Ray Origin'" }, + { "kind" : "IdRef", "name" : "'Ray Tmin'" }, + { "kind" : "IdRef", "name" : "'Ray Direction'" }, + { "kind" : "IdRef", "name" : "'Ray Tmax'" }, + { "kind" : "IdRef", "name" : "'PayloadId'" } + ], + "capabilities" : [ "RayTracingNV" ], + "extensions" : [ "SPV_NV_ray_tracing" ], + "version" : "None" + }, + { + "opname" : "OpTypeAccelerationStructureNV", + "class" : "Reserved", + "opcode" : 5341, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR", "RayQueryKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing", "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpTypeAccelerationStructureKHR", + "class" : "Reserved", + "opcode" : 5341, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR", "RayQueryKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing", "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpExecuteCallableNV", + "class" : "Reserved", + "opcode" : 5344, + "operands" : [ + + { "kind" : "IdRef", "name" : "'SBT Index'" }, + { "kind" : "IdRef", "name" : "'Callable DataId'" } + ], + "capabilities" : [ "RayTracingNV" ], + "extensions" : [ "SPV_NV_ray_tracing" ], + "version" : "None" + }, + { + "opname" : "OpTypeCooperativeMatrixNV", + "class" : "Reserved", + "opcode" : 5358, + "operands" : [ + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Component Type'" }, + { "kind" : "IdScope", "name" : "'Execution'" }, + { "kind" : "IdRef", "name" : "'Rows'" }, + { "kind" : "IdRef", "name" : "'Columns'" } + ], + "capabilities" : [ "CooperativeMatrixNV" ], + "extensions" : [ "SPV_NV_cooperative_matrix" ], + "version" : "None" + }, + { + "opname" : "OpCooperativeMatrixLoadNV", + "class" : "Reserved", + "opcode" : 5359, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Pointer'" }, + { "kind" : "IdRef", "name" : "'Stride'" }, + { "kind" : "IdRef", "name" : "'Column Major'" }, + { "kind" : "MemoryAccess", "quantifier" : "?" } + ], + "capabilities" : [ "CooperativeMatrixNV" ], + "extensions" : [ "SPV_NV_cooperative_matrix" ], + "version" : "None" + }, + { + "opname" : "OpCooperativeMatrixStoreNV", + "class" : "Reserved", + "opcode" : 5360, + "operands" : [ + { "kind" : "IdRef", "name" : "'Pointer'" }, + { "kind" : "IdRef", "name" : "'Object'" }, + { "kind" : "IdRef", "name" : "'Stride'" }, + { "kind" : "IdRef", "name" : "'Column Major'" }, + { "kind" : "MemoryAccess", "quantifier" : "?" } + ], + "capabilities" : [ "CooperativeMatrixNV" ], + "extensions" : [ "SPV_NV_cooperative_matrix" ], + "version" : "None" + }, + { + "opname" : "OpCooperativeMatrixMulAddNV", + "class" : "Reserved", + "opcode" : 5361, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'A'" }, + { "kind" : "IdRef", "name" : "'B'" }, + { "kind" : "IdRef", "name" : "'C'" } + ], + "capabilities" : [ "CooperativeMatrixNV" ], + "extensions" : [ "SPV_NV_cooperative_matrix" ], + "version" : "None" + }, + { + "opname" : "OpCooperativeMatrixLengthNV", + "class" : "Reserved", + "opcode" : 5362, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Type'" } + ], + "capabilities" : [ "CooperativeMatrixNV" ], + "extensions" : [ "SPV_NV_cooperative_matrix" ], + "version" : "None" + }, + { + "opname" : "OpBeginInvocationInterlockEXT", + "class" : "Reserved", + "opcode" : 5364, + "capabilities" : [ "FragmentShaderSampleInterlockEXT", "FragmentShaderPixelInterlockEXT", "FragmentShaderShadingRateInterlockEXT" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "opname" : "OpEndInvocationInterlockEXT", + "class" : "Reserved", + "opcode" : 5365, + "capabilities" : [ "FragmentShaderSampleInterlockEXT", "FragmentShaderPixelInterlockEXT", "FragmentShaderShadingRateInterlockEXT" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "opname" : "OpDemoteToHelperInvocationEXT", + "class" : "Reserved", + "opcode" : 5380, + "capabilities" : [ "DemoteToHelperInvocationEXT" ], + "extensions" : [ "SPV_EXT_demote_to_helper_invocation" ], + "version" : "None" + }, + { + "opname" : "OpIsHelperInvocationEXT", + "class" : "Reserved", + "opcode" : 5381, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" } + ], + "capabilities" : [ "DemoteToHelperInvocationEXT" ], + "extensions" : [ "SPV_EXT_demote_to_helper_invocation" ], + "version" : "None" + }, { "opname" : "OpSubgroupShuffleINTEL", + "class" : "Group", "opcode" : 5571, "operands" : [ { "kind" : "IdResultType" }, @@ -3827,6 +4826,7 @@ }, { "opname" : "OpSubgroupShuffleDownINTEL", + "class" : "Group", "opcode" : 5572, "operands" : [ { "kind" : "IdResultType" }, @@ -3840,6 +4840,7 @@ }, { "opname" : "OpSubgroupShuffleUpINTEL", + "class" : "Group", "opcode" : 5573, "operands" : [ { "kind" : "IdResultType" }, @@ -3853,6 +4854,7 @@ }, { "opname" : "OpSubgroupShuffleXorINTEL", + "class" : "Group", "opcode" : 5574, "operands" : [ { "kind" : "IdResultType" }, @@ -3865,6 +4867,7 @@ }, { "opname" : "OpSubgroupBlockReadINTEL", + "class" : "Group", "opcode" : 5575, "operands" : [ { "kind" : "IdResultType" }, @@ -3876,6 +4879,7 @@ }, { "opname" : "OpSubgroupBlockWriteINTEL", + "class" : "Group", "opcode" : 5576, "operands" : [ { "kind" : "IdRef", "name" : "'Ptr'" }, @@ -3886,6 +4890,7 @@ }, { "opname" : "OpSubgroupImageBlockReadINTEL", + "class" : "Group", "opcode" : 5577, "operands" : [ { "kind" : "IdResultType" }, @@ -3898,6 +4903,7 @@ }, { "opname" : "OpSubgroupImageBlockWriteINTEL", + "class" : "Group", "opcode" : 5578, "operands" : [ { "kind" : "IdRef", "name" : "'Image'" }, @@ -3908,17 +4914,335 @@ "version" : "None" }, { - "opname" : "OpDecorateStringGOOGLE", + "opname" : "OpSubgroupImageMediaBlockReadINTEL", + "class" : "Group", + "opcode" : 5580, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Image'" }, + { "kind" : "IdRef", "name" : "'Coordinate'" }, + { "kind" : "IdRef", "name" : "'Width'" }, + { "kind" : "IdRef", "name" : "'Height'" } + ], + "capabilities" : [ "SubgroupImageMediaBlockIOINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupImageMediaBlockWriteINTEL", + "class" : "Group", + "opcode" : 5581, + "operands" : [ + { "kind" : "IdRef", "name" : "'Image'" }, + { "kind" : "IdRef", "name" : "'Coordinate'" }, + { "kind" : "IdRef", "name" : "'Width'" }, + { "kind" : "IdRef", "name" : "'Height'" }, + { "kind" : "IdRef", "name" : "'Data'" } + ], + "capabilities" : [ "SubgroupImageMediaBlockIOINTEL" ], + "version" : "None" + }, + { + "opname" : "OpUCountLeadingZerosINTEL", + "class" : "Reserved", + "opcode" : 5585, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpUCountTrailingZerosINTEL", + "class" : "Reserved", + "opcode" : 5586, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpAbsISubINTEL", + "class" : "Reserved", + "opcode" : 5587, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpAbsUSubINTEL", + "class" : "Reserved", + "opcode" : 5588, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpIAddSatINTEL", + "class" : "Reserved", + "opcode" : 5589, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpUAddSatINTEL", + "class" : "Reserved", + "opcode" : 5590, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpIAverageINTEL", + "class" : "Reserved", + "opcode" : 5591, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpUAverageINTEL", + "class" : "Reserved", + "opcode" : 5592, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpIAverageRoundedINTEL", + "class" : "Reserved", + "opcode" : 5593, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpUAverageRoundedINTEL", + "class" : "Reserved", + "opcode" : 5594, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpISubSatINTEL", + "class" : "Reserved", + "opcode" : 5595, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpUSubSatINTEL", + "class" : "Reserved", + "opcode" : 5596, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpIMul32x16INTEL", + "class" : "Reserved", + "opcode" : 5597, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpUMul32x16INTEL", + "class" : "Reserved", + "opcode" : 5598, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Operand 1'" }, + { "kind" : "IdRef", "name" : "'Operand 2'" } + ], + "capabilities" : [ "IntegerFunctions2INTEL" ], + "version" : "None" + }, + { + "opname" : "OpConstFunctionPointerINTEL", + "class" : "@exclude", + "opcode" : 5600, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Function'" } + ], + "capabilities" : [ "FunctionPointersINTEL" ], + "extensions" : [ "SPV_INTEL_function_pointers" ], + "version" : "None" + }, + { + "opname" : "OpFunctionPointerCallINTEL", + "class" : "@exclude", + "opcode" : 5601, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "quantifier" : "*", "name" : "'Operand 1'" } + ], + "capabilities" : [ "FunctionPointersINTEL" ], + "extensions" : [ "SPV_INTEL_function_pointers" ], + "version" : "None" + }, + { + "opname" : "OpAsmTargetINTEL", + "class" : "@exclude", + "opcode" : 5609, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "LiteralString", "name" : "'Asm target'" } + ], + "capabilities" : [ "AsmINTEL" ], + "version" : "None" + }, + { + "opname" : "OpAsmINTEL", + "class" : "@exclude", + "opcode" : 5610, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Asm type'" }, + { "kind" : "IdRef", "name" : "'Target'" }, + { "kind" : "LiteralString", "name" : "'Asm instructions'" }, + { "kind" : "LiteralString", "name" : "'Constraints'" } + ], + "capabilities" : [ "AsmINTEL" ], + "version" : "None" + }, + { + "opname" : "OpAsmCallINTEL", + "class" : "@exclude", + "opcode" : 5611, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Asm'" }, + { "kind" : "IdRef", "quantifier" : "*", "name" : "'Argument 0'" } + ], + "capabilities" : [ "AsmINTEL" ], + "version" : "None" + }, + { + "opname" : "OpAtomicFMinEXT", + "class" : "Atomic", + "opcode" : 5614, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Pointer'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, + { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, + { "kind" : "IdRef", "name" : "'Value'" } + ], + "capabilities" : [ "AtomicFloat16MinMaxEXT", "AtomicFloat32MinMaxEXT", "AtomicFloat64MinMaxEXT" ], + "version" : "None" + }, + { + "opname" : "OpAtomicFMaxEXT", + "class" : "Atomic", + "opcode" : 5615, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Pointer'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, + { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, + { "kind" : "IdRef", "name" : "'Value'" } + ], + "capabilities" : [ "AtomicFloat16MinMaxEXT", "AtomicFloat32MinMaxEXT", "AtomicFloat64MinMaxEXT" ], + "version" : "None" + }, + { + "opname" : "OpDecorateString", + "class" : "Annotation", "opcode" : 5632, "operands" : [ { "kind" : "IdRef", "name" : "'Target'" }, { "kind" : "Decoration" } ], "extensions" : [ "SPV_GOOGLE_decorate_string", "SPV_GOOGLE_hlsl_functionality1" ], - "version" : "None" + "version" : "1.4" }, { - "opname" : "OpMemberDecorateStringGOOGLE", + "opname" : "OpDecorateStringGOOGLE", + "class" : "Annotation", + "opcode" : 5632, + "operands" : [ + { "kind" : "IdRef", "name" : "'Target'" }, + { "kind" : "Decoration" } + ], + "extensions" : [ "SPV_GOOGLE_decorate_string", "SPV_GOOGLE_hlsl_functionality1" ], + "version" : "1.4" + }, + { + "opname" : "OpMemberDecorateString", + "class" : "Annotation", "opcode" : 5633, "operands" : [ { "kind" : "IdRef", "name" : "'Struct Type'" }, @@ -3926,18 +5250,2021 @@ { "kind" : "Decoration" } ], "extensions" : [ "SPV_GOOGLE_decorate_string", "SPV_GOOGLE_hlsl_functionality1" ], - "version" : "None" + "version" : "1.4" }, { - "opname" : "OpGroupNonUniformPartitionNV", - "opcode" : 5296, + "opname" : "OpMemberDecorateStringGOOGLE", + "class" : "Annotation", + "opcode" : 5633, + "operands" : [ + { "kind" : "IdRef", "name" : "'Struct Type'" }, + { "kind" : "LiteralInteger", "name" : "'Member'" }, + { "kind" : "Decoration" } + ], + "extensions" : [ "SPV_GOOGLE_decorate_string", "SPV_GOOGLE_hlsl_functionality1" ], + "version" : "1.4" + }, + { + "opname" : "OpVmeImageINTEL", + "class" : "@exclude", + "opcode" : 5699, "operands" : [ { "kind" : "IdResultType" }, { "kind" : "IdResult" }, - { "kind" : "IdRef", "name" : "'Value'" } + { "kind" : "IdRef", "name" : "'Image Type'" }, + { "kind" : "IdRef", "name" : "'Sampler'" } ], - "capabilities" : [ "GroupNonUniformPartitionedNV" ], - "extensions" : [ "SPV_NV_shader_subgroup_partitioned" ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeVmeImageINTEL", + "class" : "@exclude", + "opcode" : 5700, + "operands" : [ + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Image Type'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcImePayloadINTEL", + "class" : "@exclude", + "opcode" : 5701, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcRefPayloadINTEL", + "class" : "@exclude", + "opcode" : 5702, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcSicPayloadINTEL", + "class" : "@exclude", + "opcode" : 5703, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcMcePayloadINTEL", + "class" : "@exclude", + "opcode" : 5704, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcMceResultINTEL", + "class" : "@exclude", + "opcode" : 5705, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcImeResultINTEL", + "class" : "@exclude", + "opcode" : 5706, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcImeResultSingleReferenceStreamoutINTEL", + "class" : "@exclude", + "opcode" : 5707, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcImeResultDualReferenceStreamoutINTEL", + "class" : "@exclude", + "opcode" : 5708, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcImeSingleReferenceStreaminINTEL", + "class" : "@exclude", + "opcode" : 5709, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcImeDualReferenceStreaminINTEL", + "class" : "@exclude", + "opcode" : 5710, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcRefResultINTEL", + "class" : "@exclude", + "opcode" : 5711, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeAvcSicResultINTEL", + "class" : "@exclude", + "opcode" : 5712, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL", + "class" : "@exclude", + "opcode" : 5713, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Slice Type'" }, + { "kind" : "IdRef", "name" : "'Qp'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL", + "class" : "@exclude", + "opcode" : 5714, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Reference Base Penalty'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL", + "class" : "@exclude", + "opcode" : 5715, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Slice Type'" }, + { "kind" : "IdRef", "name" : "'Qp'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceSetInterShapePenaltyINTEL", + "class" : "@exclude", + "opcode" : 5716, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Packed Shape Penalty'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL", + "class" : "@exclude", + "opcode" : 5717, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Slice Type'" }, + { "kind" : "IdRef", "name" : "'Qp'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceSetInterDirectionPenaltyINTEL", + "class" : "@exclude", + "opcode" : 5718, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Direction Cost'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL", + "class" : "@exclude", + "opcode" : 5719, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Slice Type'" }, + { "kind" : "IdRef", "name" : "'Qp'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL", + "class" : "@exclude", + "opcode" : 5720, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Slice Type'" }, + { "kind" : "IdRef", "name" : "'Qp'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL", + "class" : "@exclude", + "opcode" : 5721, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL", + "class" : "@exclude", + "opcode" : 5722, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL", + "class" : "@exclude", + "opcode" : 5723, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL", + "class" : "@exclude", + "opcode" : 5724, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Packed Cost Center Delta'" }, + { "kind" : "IdRef", "name" : "'Packed Cost Table'" }, + { "kind" : "IdRef", "name" : "'Cost Precision'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL", + "class" : "@exclude", + "opcode" : 5725, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Slice Type'" }, + { "kind" : "IdRef", "name" : "'Qp'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL", + "class" : "@exclude", + "opcode" : 5726, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL", + "class" : "@exclude", + "opcode" : 5727, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationChromaINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceSetAcOnlyHaarINTEL", + "class" : "@exclude", + "opcode" : 5728, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL", + "class" : "@exclude", + "opcode" : 5729, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Source Field Polarity'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL", + "class" : "@exclude", + "opcode" : 5730, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Reference Field Polarity'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL", + "class" : "@exclude", + "opcode" : 5731, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Forward Reference Field Polarity'" }, + { "kind" : "IdRef", "name" : "'Backward Reference Field Polarity'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceConvertToImePayloadINTEL", + "class" : "@exclude", + "opcode" : 5732, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceConvertToImeResultINTEL", + "class" : "@exclude", + "opcode" : 5733, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceConvertToRefPayloadINTEL", + "class" : "@exclude", + "opcode" : 5734, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceConvertToRefResultINTEL", + "class" : "@exclude", + "opcode" : 5735, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceConvertToSicPayloadINTEL", + "class" : "@exclude", + "opcode" : 5736, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceConvertToSicResultINTEL", + "class" : "@exclude", + "opcode" : 5737, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetMotionVectorsINTEL", + "class" : "@exclude", + "opcode" : 5738, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetInterDistortionsINTEL", + "class" : "@exclude", + "opcode" : 5739, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetBestInterDistortionsINTEL", + "class" : "@exclude", + "opcode" : 5740, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetInterMajorShapeINTEL", + "class" : "@exclude", + "opcode" : 5741, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetInterMinorShapeINTEL", + "class" : "@exclude", + "opcode" : 5742, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetInterDirectionsINTEL", + "class" : "@exclude", + "opcode" : 5743, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetInterMotionVectorCountINTEL", + "class" : "@exclude", + "opcode" : 5744, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetInterReferenceIdsINTEL", + "class" : "@exclude", + "opcode" : 5745, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL", + "class" : "@exclude", + "opcode" : 5746, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Packed Reference Ids'" }, + { "kind" : "IdRef", "name" : "'Packed Reference Parameter Field Polarities'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeInitializeINTEL", + "class" : "@exclude", + "opcode" : 5747, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Coord'" }, + { "kind" : "IdRef", "name" : "'Partition Mask'" }, + { "kind" : "IdRef", "name" : "'SAD Adjustment'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeSetSingleReferenceINTEL", + "class" : "@exclude", + "opcode" : 5748, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Ref Offset'" }, + { "kind" : "IdRef", "name" : "'Search Window Config'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeSetDualReferenceINTEL", + "class" : "@exclude", + "opcode" : 5749, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Fwd Ref Offset'" }, + { "kind" : "IdRef", "name" : "'Bwd Ref Offset'" }, + { "kind" : "IdRef", "name" : "'id> Search Window Config'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeRefWindowSizeINTEL", + "class" : "@exclude", + "opcode" : 5750, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Search Window Config'" }, + { "kind" : "IdRef", "name" : "'Dual Ref'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeAdjustRefOffsetINTEL", + "class" : "@exclude", + "opcode" : 5751, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Ref Offset'" }, + { "kind" : "IdRef", "name" : "'Src Coord'" }, + { "kind" : "IdRef", "name" : "'Ref Window Size'" }, + { "kind" : "IdRef", "name" : "'Image Size'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeConvertToMcePayloadINTEL", + "class" : "@exclude", + "opcode" : 5752, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeSetMaxMotionVectorCountINTEL", + "class" : "@exclude", + "opcode" : 5753, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Max Motion Vector Count'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL", + "class" : "@exclude", + "opcode" : 5754, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL", + "class" : "@exclude", + "opcode" : 5755, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Threshold'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeSetWeightedSadINTEL", + "class" : "@exclude", + "opcode" : 5756, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Packed Sad Weights'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL", + "class" : "@exclude", + "opcode" : 5757, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeEvaluateWithDualReferenceINTEL", + "class" : "@exclude", + "opcode" : 5758, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Fwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Bwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL", + "class" : "@exclude", + "opcode" : 5759, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" }, + { "kind" : "IdRef", "name" : "'Streamin Components'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL", + "class" : "@exclude", + "opcode" : 5760, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Fwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Bwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" }, + { "kind" : "IdRef", "name" : "'Streamin Components'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL", + "class" : "@exclude", + "opcode" : 5761, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL", + "class" : "@exclude", + "opcode" : 5762, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Fwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Bwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL", + "class" : "@exclude", + "opcode" : 5763, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" }, + { "kind" : "IdRef", "name" : "'Streamin Components'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL", + "class" : "@exclude", + "opcode" : 5764, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Fwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Bwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" }, + { "kind" : "IdRef", "name" : "'Streamin Components'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeConvertToMceResultINTEL", + "class" : "@exclude", + "opcode" : 5765, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetSingleReferenceStreaminINTEL", + "class" : "@exclude", + "opcode" : 5766, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetDualReferenceStreaminINTEL", + "class" : "@exclude", + "opcode" : 5767, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL", + "class" : "@exclude", + "opcode" : 5768, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeStripDualReferenceStreamoutINTEL", + "class" : "@exclude", + "opcode" : 5769, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL", + "class" : "@exclude", + "opcode" : 5770, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" }, + { "kind" : "IdRef", "name" : "'Major Shape'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL", + "class" : "@exclude", + "opcode" : 5771, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" }, + { "kind" : "IdRef", "name" : "'Major Shape'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL", + "class" : "@exclude", + "opcode" : 5772, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" }, + { "kind" : "IdRef", "name" : "'Major Shape'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL", + "class" : "@exclude", + "opcode" : 5773, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" }, + { "kind" : "IdRef", "name" : "'Major Shape'" }, + { "kind" : "IdRef", "name" : "'Direction'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL", + "class" : "@exclude", + "opcode" : 5774, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" }, + { "kind" : "IdRef", "name" : "'Major Shape'" }, + { "kind" : "IdRef", "name" : "'Direction'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL", + "class" : "@exclude", + "opcode" : 5775, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" }, + { "kind" : "IdRef", "name" : "'Major Shape'" }, + { "kind" : "IdRef", "name" : "'Direction'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetBorderReachedINTEL", + "class" : "@exclude", + "opcode" : 5776, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Image Select'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL", + "class" : "@exclude", + "opcode" : 5777, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL", + "class" : "@exclude", + "opcode" : 5778, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL", + "class" : "@exclude", + "opcode" : 5779, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL", + "class" : "@exclude", + "opcode" : 5780, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcFmeInitializeINTEL", + "class" : "@exclude", + "opcode" : 5781, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Coord'" }, + { "kind" : "IdRef", "name" : "'Motion Vectors'" }, + { "kind" : "IdRef", "name" : "'Major Shapes'" }, + { "kind" : "IdRef", "name" : "'Minor Shapes'" }, + { "kind" : "IdRef", "name" : "'Direction'" }, + { "kind" : "IdRef", "name" : "'Pixel Resolution'" }, + { "kind" : "IdRef", "name" : "'Sad Adjustment'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcBmeInitializeINTEL", + "class" : "@exclude", + "opcode" : 5782, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Coord'" }, + { "kind" : "IdRef", "name" : "'Motion Vectors'" }, + { "kind" : "IdRef", "name" : "'Major Shapes'" }, + { "kind" : "IdRef", "name" : "'Minor Shapes'" }, + { "kind" : "IdRef", "name" : "'Direction'" }, + { "kind" : "IdRef", "name" : "'Pixel Resolution'" }, + { "kind" : "IdRef", "name" : "'Bidirectional Weight'" }, + { "kind" : "IdRef", "name" : "'Sad Adjustment'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcRefConvertToMcePayloadINTEL", + "class" : "@exclude", + "opcode" : 5783, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcRefSetBidirectionalMixDisableINTEL", + "class" : "@exclude", + "opcode" : 5784, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcRefSetBilinearFilterEnableINTEL", + "class" : "@exclude", + "opcode" : 5785, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL", + "class" : "@exclude", + "opcode" : 5786, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcRefEvaluateWithDualReferenceINTEL", + "class" : "@exclude", + "opcode" : 5787, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Fwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Bwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL", + "class" : "@exclude", + "opcode" : 5788, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Packed Reference Ids'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL", + "class" : "@exclude", + "opcode" : 5789, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Packed Reference Ids'" }, + { "kind" : "IdRef", "name" : "'Packed Reference Field Polarities'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcRefConvertToMceResultINTEL", + "class" : "@exclude", + "opcode" : 5790, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicInitializeINTEL", + "class" : "@exclude", + "opcode" : 5791, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Coord'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicConfigureSkcINTEL", + "class" : "@exclude", + "opcode" : 5792, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Skip Block Partition Type'" }, + { "kind" : "IdRef", "name" : "'Skip Motion Vector Mask'" }, + { "kind" : "IdRef", "name" : "'Motion Vectors'" }, + { "kind" : "IdRef", "name" : "'Bidirectional Weight'" }, + { "kind" : "IdRef", "name" : "'Sad Adjustment'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicConfigureIpeLumaINTEL", + "class" : "@exclude", + "opcode" : 5793, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Luma Intra Partition Mask'" }, + { "kind" : "IdRef", "name" : "'Intra Neighbour Availabilty'" }, + { "kind" : "IdRef", "name" : "'Left Edge Luma Pixels'" }, + { "kind" : "IdRef", "name" : "'Upper Left Corner Luma Pixel'" }, + { "kind" : "IdRef", "name" : "'Upper Edge Luma Pixels'" }, + { "kind" : "IdRef", "name" : "'Upper Right Edge Luma Pixels'" }, + { "kind" : "IdRef", "name" : "'Sad Adjustment'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicConfigureIpeLumaChromaINTEL", + "class" : "@exclude", + "opcode" : 5794, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Luma Intra Partition Mask'" }, + { "kind" : "IdRef", "name" : "'Intra Neighbour Availabilty'" }, + { "kind" : "IdRef", "name" : "'Left Edge Luma Pixels'" }, + { "kind" : "IdRef", "name" : "'Upper Left Corner Luma Pixel'" }, + { "kind" : "IdRef", "name" : "'Upper Edge Luma Pixels'" }, + { "kind" : "IdRef", "name" : "'Upper Right Edge Luma Pixels'" }, + { "kind" : "IdRef", "name" : "'Left Edge Chroma Pixels'" }, + { "kind" : "IdRef", "name" : "'Upper Left Corner Chroma Pixel'" }, + { "kind" : "IdRef", "name" : "'Upper Edge Chroma Pixels'" }, + { "kind" : "IdRef", "name" : "'Sad Adjustment'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationChromaINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicGetMotionVectorMaskINTEL", + "class" : "@exclude", + "opcode" : 5795, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Skip Block Partition Type'" }, + { "kind" : "IdRef", "name" : "'Direction'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicConvertToMcePayloadINTEL", + "class" : "@exclude", + "opcode" : 5796, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL", + "class" : "@exclude", + "opcode" : 5797, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Packed Shape Penalty'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL", + "class" : "@exclude", + "opcode" : 5798, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Luma Mode Penalty'" }, + { "kind" : "IdRef", "name" : "'Luma Packed Neighbor Modes'" }, + { "kind" : "IdRef", "name" : "'Luma Packed Non Dc Penalty'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL", + "class" : "@exclude", + "opcode" : 5799, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Chroma Mode Base Penalty'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationChromaINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicSetBilinearFilterEnableINTEL", + "class" : "@exclude", + "opcode" : 5800, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL", + "class" : "@exclude", + "opcode" : 5801, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Packed Sad Coefficients'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL", + "class" : "@exclude", + "opcode" : 5802, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Block Based Skip Type'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicEvaluateIpeINTEL", + "class" : "@exclude", + "opcode" : 5803, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL", + "class" : "@exclude", + "opcode" : 5804, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicEvaluateWithDualReferenceINTEL", + "class" : "@exclude", + "opcode" : 5805, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Fwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Bwd Ref Image'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL", + "class" : "@exclude", + "opcode" : 5806, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Packed Reference Ids'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL", + "class" : "@exclude", + "opcode" : 5807, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Src Image'" }, + { "kind" : "IdRef", "name" : "'Packed Reference Ids'" }, + { "kind" : "IdRef", "name" : "'Packed Reference Field Polarities'" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicConvertToMceResultINTEL", + "class" : "@exclude", + "opcode" : 5808, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicGetIpeLumaShapeINTEL", + "class" : "@exclude", + "opcode" : 5809, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL", + "class" : "@exclude", + "opcode" : 5810, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL", + "class" : "@exclude", + "opcode" : 5811, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicGetPackedIpeLumaModesINTEL", + "class" : "@exclude", + "opcode" : 5812, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicGetIpeChromaModeINTEL", + "class" : "@exclude", + "opcode" : 5813, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationChromaINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL", + "class" : "@exclude", + "opcode" : 5814, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL", + "class" : "@exclude", + "opcode" : 5815, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL", "SubgroupAvcMotionEstimationIntraINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSubgroupAvcSicGetInterRawSadsINTEL", + "class" : "@exclude", + "opcode" : 5816, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Payload'" } + ], + "capabilities" : [ "SubgroupAvcMotionEstimationINTEL" ], + "version" : "None" + }, + { + "opname" : "OpVariableLengthArrayINTEL", + "class" : "@exclude", + "opcode" : 5818, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Lenght'" } + ], + "capabilities" : [ "VariableLengthArrayINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSaveMemoryINTEL", + "class" : "@exclude", + "opcode" : 5819, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" } + ], + "capabilities" : [ "VariableLengthArrayINTEL" ], + "version" : "None" + }, + { + "opname" : "OpRestoreMemoryINTEL", + "class" : "@exclude", + "opcode" : 5820, + "operands" : [ + { "kind" : "IdRef", "name" : "'Ptr'" } + ], + "capabilities" : [ "VariableLengthArrayINTEL" ], + "version" : "None" + }, + { + "opname" : "OpLoopControlINTEL", + "class" : "Reserved", + "opcode" : 5887, + "operands" : [ + { "kind" : "LiteralInteger", "quantifier" : "*", "name" : "'Loop Control Parameters'" } + ], + "capabilities" : [ "UnstructuredLoopControlsINTEL" ], + "extensions" : [ "SPV_INTEL_unstructured_loop_controls" ], + "version" : "None" + }, + { + "opname" : "OpPtrCastToCrossWorkgroupINTEL", + "class" : "@exclude", + "opcode" : 5934, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Pointer'" } + ], + "capabilities" : [ "USMStorageClassesINTEL" ], + "version" : "None" + }, + { + "opname" : "OpCrossWorkgroupCastToPtrINTEL", + "class" : "@exclude", + "opcode" : 5938, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Pointer'" } + ], + "capabilities" : [ "USMStorageClassesINTEL" ], + "version" : "None" + }, + { + "opname" : "OpReadPipeBlockingINTEL", + "class" : "Pipe", + "opcode" : 5946, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Packet Size'" }, + { "kind" : "IdRef", "name" : "'Packet Alignment'" } + ], + "capabilities" : [ "BlockingPipesINTEL" ], + "extensions" : [ "SPV_INTEL_blocking_pipes" ], + "version" : "None" + }, + { + "opname" : "OpWritePipeBlockingINTEL", + "class" : "Pipe", + "opcode" : 5947, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Packet Size'" }, + { "kind" : "IdRef", "name" : "'Packet Alignment'" } + ], + "capabilities" : [ "BlockingPipesINTEL" ], + "extensions" : [ "SPV_INTEL_blocking_pipes" ], + "version" : "None" + }, + { + "opname" : "OpFPGARegINTEL", + "class" : "Reserved", + "opcode" : 5949, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Result'" }, + { "kind" : "IdRef", "name" : "'Input'" } + ], + "capabilities" : [ "FPGARegINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_reg" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetRayTMinKHR", + "class" : "Reserved", + "opcode" : 6016, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetRayFlagsKHR", + "class" : "Reserved", + "opcode" : 6017, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionTKHR", + "class" : "Reserved", + "opcode" : 6018, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionInstanceCustomIndexKHR", + "class" : "Reserved", + "opcode" : 6019, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionInstanceIdKHR", + "class" : "Reserved", + "opcode" : 6020, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR", + "class" : "Reserved", + "opcode" : 6021, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionGeometryIndexKHR", + "class" : "Reserved", + "opcode" : 6022, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionPrimitiveIndexKHR", + "class" : "Reserved", + "opcode" : 6023, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionBarycentricsKHR", + "class" : "Reserved", + "opcode" : 6024, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionFrontFaceKHR", + "class" : "Reserved", + "opcode" : 6025, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionCandidateAABBOpaqueKHR", + "class" : "Reserved", + "opcode" : 6026, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionObjectRayDirectionKHR", + "class" : "Reserved", + "opcode" : 6027, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionObjectRayOriginKHR", + "class" : "Reserved", + "opcode" : 6028, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetWorldRayDirectionKHR", + "class" : "Reserved", + "opcode" : 6029, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetWorldRayOriginKHR", + "class" : "Reserved", + "opcode" : 6030, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionObjectToWorldKHR", + "class" : "Reserved", + "opcode" : 6031, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpRayQueryGetIntersectionWorldToObjectKHR", + "class" : "Reserved", + "opcode" : 6032, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { + "kind" : "IdRef", + "name" : "'RayQuery'" + }, + { + "kind" : "IdRef", + "name" : "'Intersection'" + } + ], + "capabilities" : [ "RayQueryKHR" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "opname" : "OpAtomicFAddEXT", + "class" : "Atomic", + "opcode" : 6035, + "operands" : [ + { "kind" : "IdResultType" }, + { "kind" : "IdResult" }, + { "kind" : "IdRef", "name" : "'Pointer'" }, + { "kind" : "IdScope", "name" : "'Memory'" }, + { "kind" : "IdMemorySemantics", "name" : "'Semantics'" }, + { "kind" : "IdRef", "name" : "'Value'" } + ], + "capabilities" : [ "AtomicFloat32AddEXT", "AtomicFloat64AddEXT" ], + "extensions" : [ "SPV_EXT_shader_atomic_float_add" ], + "version" : "None" + }, + { + "opname" : "OpTypeBufferSurfaceINTEL", + "class" : "Type-Declaration", + "opcode" : 6086, + "operands" : [ + { "kind" : "IdResult" } + ], + "capabilities" : [ "VectorComputeINTEL" ], + "version" : "None" + }, + { + "opname" : "OpTypeStructContinuedINTEL", + "class" : "Type-Declaration", + "opcode" : 6090, + "operands" : [ + { "kind" : "IdRef", "quantifier" : "*", "name" : "'Member 0 type', +\n'member 1 type', +\n..." } + ], + "capabilities" : [ "LongConstantCompositeINTEL" ], + "version" : "None" + }, + { + "opname" : "OpConstantCompositeContinuedINTEL", + "class" : "Constant-Creation", + "opcode" : 6091, + "operands" : [ + { "kind" : "IdRef", "quantifier" : "*", "name" : "'Constituents'" } + ], + "capabilities" : [ "LongConstantCompositeINTEL" ], + "version" : "None" + }, + { + "opname" : "OpSpecConstantCompositeContinuedINTEL", + "class" : "Constant-Creation", + "opcode" : 6092, + "operands" : [ + { "kind" : "IdRef", "quantifier" : "*", "name" : "'Constituents'" } + ], + "capabilities" : [ "LongConstantCompositeINTEL" ], "version" : "None" } ], @@ -4010,6 +7337,80 @@ "parameters" : [ { "kind" : "IdRef" } ] + }, + { + "enumerant" : "MakeTexelAvailable", + "value" : "0x0100", + "capabilities" : [ "VulkanMemoryModel" ], + "parameters" : [ + { "kind" : "IdScope" } + ], + "version" : "1.5" + }, + { + "enumerant" : "MakeTexelAvailableKHR", + "value" : "0x0100", + "capabilities" : [ "VulkanMemoryModel" ], + "parameters" : [ + { "kind" : "IdScope" } + ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "MakeTexelVisible", + "value" : "0x0200", + "capabilities" : [ "VulkanMemoryModel" ], + "parameters" : [ + { "kind" : "IdScope" } + ], + "version" : "1.5" + }, + { + "enumerant" : "MakeTexelVisibleKHR", + "value" : "0x0200", + "capabilities" : [ "VulkanMemoryModel" ], + "parameters" : [ + { "kind" : "IdScope" } + ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "NonPrivateTexel", + "value" : "0x0400", + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "NonPrivateTexelKHR", + "value" : "0x0400", + "capabilities" : [ "VulkanMemoryModel" ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "VolatileTexel", + "value" : "0x0800", + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "VolatileTexelKHR", + "value" : "0x0800", + "capabilities" : [ "VulkanMemoryModel" ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "SignExtend", + "value" : "0x1000", + "version" : "1.4" + }, + { + "enumerant" : "ZeroExtend", + "value" : "0x2000", + "version" : "1.4" } ] }, @@ -4045,6 +7446,18 @@ "enumerant" : "Fast", "value" : "0x0010", "capabilities" : [ "Kernel" ] + }, + { + "enumerant" : "AllowContractFastINTEL", + "value" : "0x10000", + "capabilities" : [ "FPFastMathModeINTEL" ], + "version" : "None" + }, + { + "enumerant" : "AllowReassocINTEL", + "value" : "0x20000", + "capabilities" : [ "FPFastMathModeINTEL" ], + "version" : "None" } ] }, @@ -4094,6 +7507,126 @@ { "kind" : "LiteralInteger" } ], "version" : "1.1" + }, + { + "enumerant" : "MinIterations", + "value" : "0x0010", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "version" : "1.4" + }, + { + "enumerant" : "MaxIterations", + "value" : "0x0020", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "version" : "1.4" + }, + { + "enumerant" : "IterationMultiple", + "value" : "0x0040", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "version" : "1.4" + }, + { + "enumerant" : "PeelCount", + "value" : "0x0080", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "version" : "1.4" + }, + { + "enumerant" : "PartialCount", + "value" : "0x0100", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "version" : "1.4" + }, + { + "enumerant" : "InitiationIntervalINTEL", + "value" : "0x10000", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "capabilities" : [ "FPGALoopControlsINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_loop_controls" ], + "version" : "None" + }, + { + "enumerant" : "MaxConcurrencyINTEL", + "value" : "0x20000", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "capabilities" : [ "FPGALoopControlsINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_loop_controls" ], + "version" : "None" + }, + { + "enumerant" : "DependencyArrayINTEL", + "value" : "0x40000", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "capabilities" : [ "FPGALoopControlsINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_loop_controls" ], + "version" : "None" + }, + { + "enumerant" : "PipelineEnableINTEL", + "value" : "0x80000", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "capabilities" : [ "FPGALoopControlsINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_loop_controls" ], + "version" : "None" + }, + { + "enumerant" : "LoopCoalesceINTEL", + "value" : "0x100000", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "capabilities" : [ "FPGALoopControlsINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_loop_controls" ], + "version" : "None" + }, + { + "enumerant" : "MaxInterleavingINTEL", + "value" : "0x200000", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "capabilities" : [ "FPGALoopControlsINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_loop_controls" ], + "version" : "None" + }, + { + "enumerant" : "SpeculatedIterationsINTEL", + "value" : "0x400000", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "capabilities" : [ "FPGALoopControlsINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_loop_controls" ], + "version" : "None" + }, + { + "enumerant" : "NoFusionINTEL", + "value" : "0x800000", + "parameters" : [ + { "kind" : "LiteralInteger" } + ], + "capabilities" : [ "FPGALoopControlsINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_loop_controls" ], + "version" : "None" } ] }, @@ -4176,6 +7709,52 @@ { "enumerant" : "ImageMemory", "value" : "0x0800" + }, + { + "enumerant" : "OutputMemory", + "value" : "0x1000", + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "OutputMemoryKHR", + "value" : "0x1000", + "capabilities" : [ "VulkanMemoryModel" ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "MakeAvailable", + "value" : "0x2000", + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "MakeAvailableKHR", + "value" : "0x2000", + "capabilities" : [ "VulkanMemoryModel" ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "MakeVisible", + "value" : "0x4000", + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "MakeVisibleKHR", + "value" : "0x4000", + "capabilities" : [ "VulkanMemoryModel" ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "Volatile", + "value" : "0x8000", + "capabilities" : [ "VulkanMemoryModel" ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" } ] }, @@ -4201,6 +7780,57 @@ { "enumerant" : "Nontemporal", "value" : "0x0004" + }, + { + "enumerant" : "MakePointerAvailable", + "value" : "0x0008", + "parameters" : [ + { "kind" : "IdScope" } + ], + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "MakePointerAvailableKHR", + "value" : "0x0008", + "parameters" : [ + { "kind" : "IdScope" } + ], + "capabilities" : [ "VulkanMemoryModel" ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "MakePointerVisible", + "value" : "0x0010", + "parameters" : [ + { "kind" : "IdScope" } + ], + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "MakePointerVisibleKHR", + "value" : "0x0010", + "parameters" : [ + { "kind" : "IdScope" } + ], + "capabilities" : [ "VulkanMemoryModel" ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "NonPrivatePointer", + "value" : "0x0020", + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "NonPrivatePointerKHR", + "value" : "0x0020", + "capabilities" : [ "VulkanMemoryModel" ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" } ] }, @@ -4219,6 +7849,108 @@ } ] }, + { + "category" : "BitEnum", + "kind" : "RayFlags", + "enumerants" : [ + { + "enumerant" : "NoneKHR", + "value" : "0x0000", + "capabilities" : [ "RayQueryKHR","RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "OpaqueKHR", + "value" : "0x0001", + "capabilities" : [ "RayQueryKHR","RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "NoOpaqueKHR", + "value" : "0x0002", + "capabilities" : [ "RayQueryKHR","RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "TerminateOnFirstHitKHR", + "value" : "0x0004", + "capabilities" : [ "RayQueryKHR","RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "SkipClosestHitShaderKHR", + "value" : "0x0008", + "capabilities" : [ "RayQueryKHR","RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "CullBackFacingTrianglesKHR", + "value" : "0x0010", + "capabilities" : [ "RayQueryKHR","RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "CullFrontFacingTrianglesKHR", + "value" : "0x0020", + "capabilities" : [ "RayQueryKHR","RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "CullOpaqueKHR", + "value" : "0x0040", + "capabilities" : [ "RayQueryKHR","RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "CullNoOpaqueKHR", + "value" : "0x0080", + "capabilities" : [ "RayQueryKHR","RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "SkipTrianglesKHR", + "value" : "0x0100", + "capabilities" : [ "RayTraversalPrimitiveCullingKHR" ], + "version" : "None" + }, + { + "enumerant" : "SkipAABBsKHR", + "value" : "0x0200", + "capabilities" : [ "RayTraversalPrimitiveCullingKHR" ], + "version" : "None" + } + ] + }, + { + "category" : "BitEnum", + "kind" : "FragmentShadingRate", + "enumerants" : [ + { + "enumerant" : "Vertical2Pixels", + "value" : "0x0001", + "capabilities" : [ "FragmentShadingRateKHR" ], + "version" : "None" + }, + { + "enumerant" : "Vertical4Pixels", + "value" : "0x0002", + "capabilities" : [ "FragmentShadingRateKHR" ], + "version" : "None" + }, + { + "enumerant" : "Horizontal2Pixels", + "value" : "0x0004", + "capabilities" : [ "FragmentShadingRateKHR" ], + "version" : "None" + }, + { + "enumerant" : "Horizontal4Pixels", + "value" : "0x0008", + "capabilities" : [ "FragmentShadingRateKHR" ], + "version" : "None" + } + ] + }, { "category" : "ValueEnum", "kind" : "SourceLanguage", @@ -4287,6 +8019,90 @@ "enumerant" : "Kernel", "value" : 6, "capabilities" : [ "Kernel" ] + }, + { + "enumerant" : "TaskNV", + "value" : 5267, + "capabilities" : [ "MeshShadingNV" ], + "version" : "None" + }, + { + "enumerant" : "MeshNV", + "value" : 5268, + "capabilities" : [ "MeshShadingNV" ], + "version" : "None" + }, + { + "enumerant" : "RayGenerationNV", + "value" : 5313, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "RayGenerationKHR", + "value" : 5313, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "IntersectionNV", + "value" : 5314, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "IntersectionKHR", + "value" : 5314, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "AnyHitNV", + "value" : 5315, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "AnyHitKHR", + "value" : 5315, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "ClosestHitNV", + "value" : 5316, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "ClosestHitKHR", + "value" : 5316, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "MissNV", + "value" : 5317, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "MissKHR", + "value" : 5317, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "CallableNV", + "value" : 5318, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "CallableKHR", + "value" : 5318, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" } ] }, @@ -4307,6 +8123,20 @@ "enumerant" : "Physical64", "value" : 2, "capabilities" : [ "Addresses" ] + }, + { + "enumerant" : "PhysicalStorageBuffer64", + "value" : 5348, + "extensions" : [ "SPV_EXT_physical_storage_buffer", "SPV_KHR_physical_storage_buffer" ], + "capabilities" : [ "PhysicalStorageBufferAddresses" ], + "version" : "1.5" + }, + { + "enumerant" : "PhysicalStorageBuffer64EXT", + "value" : 5348, + "extensions" : [ "SPV_EXT_physical_storage_buffer" ], + "capabilities" : [ "PhysicalStorageBufferAddresses" ], + "version" : "1.5" } ] }, @@ -4328,6 +8158,19 @@ "enumerant" : "OpenCL", "value" : 2, "capabilities" : [ "Kernel" ] + }, + { + "enumerant" : "Vulkan", + "value" : 3, + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "VulkanKHR", + "value" : 3, + "capabilities" : [ "VulkanMemoryModel" ], + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" } ] }, @@ -4475,7 +8318,7 @@ { "enumerant" : "OutputVertices", "value" : 26, - "capabilities" : [ "Geometry", "Tessellation" ], + "capabilities" : [ "Geometry", "Tessellation", "MeshShadingNV" ], "parameters" : [ { "kind" : "LiteralInteger", "name" : "'Vertex count'" } ] @@ -4483,7 +8326,7 @@ { "enumerant" : "OutputPoints", "value" : 27, - "capabilities" : [ "Geometry" ] + "capabilities" : [ "Geometry", "MeshShadingNV" ] }, { "enumerant" : "OutputLineStrip", @@ -4573,12 +8416,235 @@ "extensions" : [ "SPV_KHR_post_depth_coverage" ], "version" : "None" }, + { + "enumerant" : "DenormPreserve", + "value" : 4459, + "capabilities" : [ "DenormPreserve" ], + "extensions" : [ "SPV_KHR_float_controls" ], + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" } + ], + "version" : "1.4" + }, + { + "enumerant" : "DenormFlushToZero", + "value" : 4460, + "capabilities" : [ "DenormFlushToZero" ], + "extensions" : [ "SPV_KHR_float_controls" ], + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" } + ], + "version" : "1.4" + }, + { + "enumerant" : "SignedZeroInfNanPreserve", + "value" : 4461, + "capabilities" : [ "SignedZeroInfNanPreserve" ], + "extensions" : [ "SPV_KHR_float_controls" ], + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" } + ], + "version" : "1.4" + }, + { + "enumerant" : "RoundingModeRTE", + "value" : 4462, + "capabilities" : [ "RoundingModeRTE" ], + "extensions" : [ "SPV_KHR_float_controls" ], + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" } + ], + "version" : "1.4" + }, + { + "enumerant" : "RoundingModeRTZ", + "value" : 4463, + "capabilities" : [ "RoundingModeRTZ" ], + "extensions" : [ "SPV_KHR_float_controls" ], + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" } + ], + "version" : "1.4" + }, { "enumerant" : "StencilRefReplacingEXT", "value" : 5027, "capabilities" : [ "StencilExportEXT" ], "extensions" : [ "SPV_EXT_shader_stencil_export" ], "version" : "None" + }, + { + "enumerant" : "OutputLinesNV", + "value" : 5269, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "OutputPrimitivesNV", + "value" : 5270, + "capabilities" : [ "MeshShadingNV" ], + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Primitive count'" } + ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "DerivativeGroupQuadsNV", + "value" : 5289, + "capabilities" : [ "ComputeDerivativeGroupQuadsNV" ], + "extensions" : [ "SPV_NV_compute_shader_derivatives" ], + "version" : "None" + }, + { + "enumerant" : "DerivativeGroupLinearNV", + "value" : 5290, + "capabilities" : [ "ComputeDerivativeGroupLinearNV" ], + "extensions" : [ "SPV_NV_compute_shader_derivatives" ], + "version" : "None" + }, + { + "enumerant" : "OutputTrianglesNV", + "value" : 5298, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "PixelInterlockOrderedEXT", + "value" : 5366, + "capabilities" : [ "FragmentShaderPixelInterlockEXT" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "enumerant" : "PixelInterlockUnorderedEXT", + "value" : 5367, + "capabilities" : [ "FragmentShaderPixelInterlockEXT" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "enumerant" : "SampleInterlockOrderedEXT", + "value" : 5368, + "capabilities" : [ "FragmentShaderSampleInterlockEXT" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "enumerant" : "SampleInterlockUnorderedEXT", + "value" : 5369, + "capabilities" : [ "FragmentShaderSampleInterlockEXT" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "enumerant" : "ShadingRateInterlockOrderedEXT", + "value" : 5370, + "capabilities" : [ "FragmentShaderShadingRateInterlockEXT" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "enumerant" : "ShadingRateInterlockUnorderedEXT", + "value" : 5371, + "capabilities" : [ "FragmentShaderShadingRateInterlockEXT" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "enumerant" : "SharedLocalMemorySizeINTEL", + "value" : 5618, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Size'" } + ], + "capabilities" : [ "VectorComputeINTEL" ], + "version" : "None" + }, + { + "enumerant" : "RoundingModeRTPINTEL", + "value" : 5620, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" } + ], + "capabilities" : [ "RoundToInfinityINTEL" ], + "version" : "None" + }, + { + "enumerant" : "RoundingModeRTNINTEL", + "value" : 5621, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" } + ], + "capabilities" : [ "RoundToInfinityINTEL" ], + "version" : "None" + }, + { + "enumerant" : "FloatingPointModeALTINTEL", + "value" : 5622, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" } + ], + "capabilities" : [ "RoundToInfinityINTEL" ], + "version" : "None" + }, + { + "enumerant" : "FloatingPointModeIEEEINTEL", + "value" : 5623, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" } + ], + "capabilities" : [ "RoundToInfinityINTEL" ], + "version" : "None" + }, + { + "enumerant" : "MaxWorkgroupSizeINTEL", + "value" : 5893, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'max_x_size'" }, + { "kind" : "LiteralInteger", "name" : "'max_y_size'" }, + { "kind" : "LiteralInteger", "name" : "'max_z_size'" } + ], + "capabilities" : [ "KernelAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_kernel_attributes" ], + "version" : "None" + }, + { + "enumerant" : "MaxWorkDimINTEL", + "value" : 5894, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'max_dimensions'" } + ], + "capabilities" : [ "KernelAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_kernel_attributes" ], + "version" : "None" + }, + { + "enumerant" : "NoGlobalOffsetINTEL", + "value" : 5895, + "capabilities" : [ "KernelAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_kernel_attributes" ], + "version" : "None" + }, + { + "enumerant" : "NumSIMDWorkitemsINTEL", + "value" : 5896, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'vector_width'" } + ], + "capabilities" : [ "FPGAKernelAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_kernel_attributes" ], + "version" : "None" + }, + { + "enumerant" : "SchedulerTargetFmaxMhzINTEL", + "value" : 5903, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'target_fmax'" } + ], + "capabilities" : [ "FPGAKernelAttributesINTEL" ], + "version" : "None" } ] }, @@ -4649,6 +8715,129 @@ ], "capabilities" : [ "Shader" ], "version" : "1.3" + }, + { + "enumerant" : "CallableDataNV", + "value" : 5328, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "CallableDataKHR", + "value" : 5328, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "IncomingCallableDataNV", + "value" : 5329, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "IncomingCallableDataKHR", + "value" : 5329, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "RayPayloadNV", + "value" : 5338, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "RayPayloadKHR", + "value" : 5338, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "HitAttributeNV", + "value" : 5339, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "HitAttributeKHR", + "value" : 5339, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "IncomingRayPayloadNV", + "value" : 5342, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "IncomingRayPayloadKHR", + "value" : 5342, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "ShaderRecordBufferNV", + "value" : 5343, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "ShaderRecordBufferKHR", + "value" : 5343, + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "version" : "None" + }, + { + "enumerant" : "PhysicalStorageBuffer", + "value" : 5349, + "extensions" : [ "SPV_EXT_physical_storage_buffer", "SPV_KHR_physical_storage_buffer" ], + "capabilities" : [ "PhysicalStorageBufferAddresses" ], + "version" : "1.5" + }, + { + "enumerant" : "PhysicalStorageBufferEXT", + "value" : 5349, + "extensions" : [ "SPV_EXT_physical_storage_buffer" ], + "capabilities" : [ "PhysicalStorageBufferAddresses" ], + "version" : "1.5" + }, + { + "enumerant" : "CodeSectionINTEL", + "value" : 5605, + "extensions" : [ "SPV_INTEL_function_pointers" ], + "capabilities" : [ "FunctionPointersINTEL" ], + "version" : "None" + }, + { + "enumerant" : "DeviceOnlyINTEL", + "value" : 5936, + "extensions" : [ + "SPV_INTEL_usm_storage_classes" + ], + "capabilities" : [ "USMStorageClassesINTEL" ], + "version" : "None" + }, + { + "enumerant" : "HostOnlyINTEL", + "value" : 5937, + "extensions" : [ + "SPV_INTEL_usm_storage_classes" + ], + "capabilities" : [ "USMStorageClassesINTEL" ], + "version" : "None" } ] }, @@ -4659,11 +8848,12 @@ { "enumerant" : "1D", "value" : 0, - "capabilities" : [ "Sampled1D" ] + "capabilities" : [ "Sampled1D", "Image1D" ] }, { "enumerant" : "2D", - "value" : 1 + "value" : 1, + "capabilities" : [ "Shader", "Kernel", "ImageMSArray" ] }, { "enumerant" : "3D", @@ -4672,17 +8862,17 @@ { "enumerant" : "Cube", "value" : 3, - "capabilities" : [ "Shader" ] + "capabilities" : [ "Shader", "ImageCubeArray" ] }, { "enumerant" : "Rect", "value" : 4, - "capabilities" : [ "SampledRect" ] + "capabilities" : [ "SampledRect", "ImageRect" ] }, { "enumerant" : "Buffer", "value" : 5, - "capabilities" : [ "SampledBuffer" ] + "capabilities" : [ "SampledBuffer", "ImageBuffer" ] }, { "enumerant" : "SubpassData", @@ -4940,6 +9130,16 @@ "enumerant" : "R8ui", "value" : 39, "capabilities" : [ "StorageImageExtendedFormats" ] + }, + { + "enumerant" : "R64ui", + "value" : 40, + "capabilities" : [ "Int64ImageEXT" ] + }, + { + "enumerant" : "R64i", + "value" : 41, + "capabilities" : [ "Int64ImageEXT" ] } ] }, @@ -5162,6 +9362,42 @@ } ] }, + { + "category" : "ValueEnum", + "kind" : "FPDenormMode", + "enumerants" : [ + { + "enumerant" : "Preserve", + "value" : 0, + "capabilities" : [ "FunctionFloatControlINTEL" ], + "version" : "None" + }, + { + "enumerant" : "FlushToZero", + "value" : 1, + "capabilities" : [ "FunctionFloatControlINTEL" ], + "version" : "None" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "FPOperationMode", + "enumerants" : [ + { + "enumerant" : "IEEE", + "value" : 0, + "capabilities" : [ "FunctionFloatControlINTEL" ], + "version" : "None" + }, + { + "enumerant" : "ALT", + "value" : 1, + "capabilities" : [ "FunctionFloatControlINTEL" ], + "version" : "None" + } + ] + }, { "category" : "ValueEnum", "kind" : "LinkageType", @@ -5270,7 +9506,8 @@ { "enumerant" : "BufferBlock", "value" : 3, - "capabilities" : [ "Shader" ] + "capabilities" : [ "Shader" ], + "lastVersion" : "1.3" }, { "enumerant" : "RowMajor", @@ -5384,6 +9621,15 @@ "value" : 26, "capabilities" : [ "Shader" ] }, + { + "enumerant" : "UniformId", + "value" : 27, + "capabilities" : [ "Shader" ], + "parameters" : [ + { "kind" : "IdScope", "name" : "'Execution'" } + ], + "version" : "1.4" + }, { "enumerant" : "SaturatedConversion", "value" : 28, @@ -5541,6 +9787,18 @@ ], "version" : "1.2" }, + { + "enumerant" : "NoSignedWrap", + "value" : 4469, + "extensions" : [ "SPV_KHR_no_integer_wrap_decoration" ], + "version" : "1.4" + }, + { + "enumerant" : "NoUnsignedWrap", + "value" : 4470, + "extensions" : [ "SPV_KHR_no_integer_wrap_decoration" ], + "version" : "1.4" + }, { "enumerant" : "ExplicitInterpAMD", "value" : 4999, @@ -5577,10 +9835,149 @@ { "kind" : "LiteralInteger", "name" : "'Offset'" } ] }, + { + "enumerant" : "PerPrimitiveNV", + "value" : 5271, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "PerViewNV", + "value" : 5272, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "PerTaskNV", + "value" : 5273, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "PerVertexNV", + "value" : 5285, + "capabilities" : [ "FragmentBarycentricNV" ], + "extensions" : [ "SPV_NV_fragment_shader_barycentric" ], + "version" : "None" + }, + { + "enumerant" : "NonUniform", + "value" : 5300, + "capabilities" : [ "ShaderNonUniform" ], + "version" : "1.5" + }, { "enumerant" : "NonUniformEXT", "value" : 5300, - "capabilities" : [ "ShaderNonUniformEXT" ] + "capabilities" : [ "ShaderNonUniform" ], + "extensions" : [ "SPV_EXT_descriptor_indexing" ], + "version" : "1.5" + }, + { + "enumerant" : "RestrictPointer", + "value" : 5355, + "capabilities" : [ "PhysicalStorageBufferAddresses" ], + "extensions" : [ "SPV_EXT_physical_storage_buffer", "SPV_KHR_physical_storage_buffer" ], + "version" : "1.5" + }, + { + "enumerant" : "RestrictPointerEXT", + "value" : 5355, + "capabilities" : [ "PhysicalStorageBufferAddresses" ], + "extensions" : [ "SPV_EXT_physical_storage_buffer" ], + "version" : "1.5" + }, + { + "enumerant" : "AliasedPointer", + "value" : 5356, + "capabilities" : [ "PhysicalStorageBufferAddresses" ], + "extensions" : [ "SPV_EXT_physical_storage_buffer", "SPV_KHR_physical_storage_buffer" ], + "version" : "1.5" + }, + { + "enumerant" : "AliasedPointerEXT", + "value" : 5356, + "capabilities" : [ "PhysicalStorageBufferAddresses" ], + "extensions" : [ "SPV_EXT_physical_storage_buffer" ], + "version" : "1.5" + }, + { + "enumerant" : "SIMTCallINTEL", + "value" : 5599, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'N'" } + ], + "capabilities" : [ "VectorComputeINTEL" ], + "version" : "None" + }, + { + "enumerant" : "ReferencedIndirectlyINTEL", + "value" : 5602, + "capabilities" : [ "IndirectReferencesINTEL" ], + "extensions" : [ "SPV_INTEL_function_pointers" ], + "version" : "None" + }, + { + "enumerant" : "ClobberINTEL", + "value" : 5607, + "parameters" : [ + { "kind" : "LiteralString", "name" : "'Register'" } + ], + "capabilities" : [ "AsmINTEL" ], + "version" : "None" + }, + { + "enumerant" : "SideEffectsINTEL", + "value" : 5608, + "capabilities" : [ "AsmINTEL" ], + "version" : "None" + }, + { + "enumerant" : "VectorComputeVariableINTEL", + "value" : 5624, + "capabilities" : [ "VectorComputeINTEL" ], + "version" : "None" + }, + { + "enumerant" : "FuncParamIOKindINTEL", + "value" : 5625, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Kind'" } + ], + "capabilities" : [ "VectorComputeINTEL" ], + "version" : "None" + }, + { + "enumerant" : "VectorComputeFunctionINTEL", + "value" : 5626, + "capabilities" : [ "VectorComputeINTEL" ], + "version" : "None" + }, + { + "enumerant" : "StackCallINTEL", + "value" : 5627, + "capabilities" : [ "VectorComputeINTEL" ], + "version" : "None" + }, + { + "enumerant" : "GlobalVariableOffsetINTEL", + "value" : 5628, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Offset'" } + ], + "capabilities" : [ "VectorComputeINTEL" ], + "version" : "None" + }, + { + "enumerant" : "CounterBuffer", + "value" : 5634, + "parameters" : [ + { "kind" : "IdRef", "name" : "'Counter Buffer'" } + ], + "version" : "1.4" }, { "enumerant" : "HlslCounterBufferGOOGLE", @@ -5591,6 +9988,14 @@ "extensions" : [ "SPV_GOOGLE_hlsl_functionality1" ], "version" : "None" }, + { + "enumerant" : "UserSemantic", + "value" : 5635, + "parameters" : [ + { "kind" : "LiteralString", "name" : "'Semantic'" } + ], + "version" : "1.4" + }, { "enumerant" : "HlslSemanticGOOGLE", "value" : 5635, @@ -5599,6 +10004,226 @@ ], "extensions" : [ "SPV_GOOGLE_hlsl_functionality1" ], "version" : "None" + }, + { + "enumerant" : "UserTypeGOOGLE", + "value" : 5636, + "parameters" : [ + { "kind" : "LiteralString", "name" : "'User Type'" } + ], + "extensions" : [ "SPV_GOOGLE_user_type" ], + "version" : "None" + }, + { + "enumerant" : "FunctionRoundingModeINTEL", + "value" : 5822, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" }, + { "kind" : "FPRoundingMode", "name" : "'FP Rounding Mode'" } + ], + "capabilities" : [ "FunctionFloatControlINTEL" ], + "version" : "None" + }, + { + "enumerant" : "FunctionDenormModeINTEL", + "value" : 5823, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" }, + { "kind" : "FPDenormMode", "name" : "'FP Denorm Mode'" } + ], + "capabilities" : [ "FunctionFloatControlINTEL" ], + "version" : "None" + }, + { + "enumerant" : "RegisterINTEL", + "value" : 5825, + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "MemoryINTEL", + "value" : 5826, + "parameters" : [ + { "kind" : "LiteralString", "name" : "'Memory Type'" } + ], + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "NumbanksINTEL", + "value" : 5827, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Banks'" } + ], + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "BankwidthINTEL", + "value" : 5828, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Bank Width'" } + ], + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "MaxPrivateCopiesINTEL", + "value" : 5829, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Maximum Copies'" } + ], + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "SinglepumpINTEL", + "value" : 5830, + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "DoublepumpINTEL", + "value" : 5831, + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "MaxReplicatesINTEL", + "value" : 5832, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Maximum Replicates'" } + ], + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "SimpleDualPortINTEL", + "value" : 5833, + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "MergeINTEL", + "value" : 5834, + "parameters" : [ + { "kind" : "LiteralString", "name" : "'Merge Key'" }, + { "kind" : "LiteralString", "name" : "'Merge Type'" } + ], + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "BankBitsINTEL", + "value" : 5835, + "parameters" : [ + { "kind" : "LiteralInteger", "quantifier" : "*", "name" : "'Bank Bits'" } + ], + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "ForcePow2DepthINTEL", + "value" : 5836, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Force Key'" } + ], + "capabilities" : [ "FPGAMemoryAttributesINTEL" ], + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "BurstCoalesceINTEL", + "value" : 5899, + "capabilities" : [ "FPGAMemoryAccessesINTEL" ], + "version" : "None" + }, + { + "enumerant" : "CacheSizeINTEL", + "value" : 5900, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Cache Size in bytes'" } + ], + "capabilities" : [ "FPGAMemoryAccessesINTEL" ], + "version" : "None" + }, + { + "enumerant" : "DontStaticallyCoalesceINTEL", + "value" : 5901, + "capabilities" : [ "FPGAMemoryAccessesINTEL" ], + "version" : "None" + }, + { + "enumerant" : "PrefetchINTEL", + "value" : 5902, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Prefetcher Size in bytes'" } + ], + "capabilities" : [ "FPGAMemoryAccessesINTEL" ], + "version" : "None" + }, + { + "enumerant" : "StallEnableINTEL", + "value" : 5905, + "capabilities" : [ "FPGAClusterAttributesINTEL" ], + "version" : "None" + }, + { + "enumerant" : "FuseLoopsInFunctionINTEL", + "value" : 5907, + "capabilities" : [ "LoopFuseINTEL" ], + "version" : "None" + }, + { + "enumerant" : "BufferLocationINTEL", + "value" : 5921, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Buffer Location ID'" } + ], + "capabilities" : [ "FPGABufferLocationINTEL" ], + "version" : "None" + }, + { + "enumerant" : "IOPipeStorageINTEL", + "value" : 5944, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'IO Pipe ID'" } + ], + "capabilities" : [ "IOPipesINTEL" ], + "version" : "None" + }, + { + "enumerant" : "FunctionFloatingPointModeINTEL", + "value" : 6080, + "parameters" : [ + { "kind" : "LiteralInteger", "name" : "'Target Width'" }, + { "kind" : "FPOperationMode", "name" : "'FP Operation Mode'" } + ], + "capabilities" : [ "FunctionFloatControlINTEL" ], + "version" : "None" + }, + { + "enumerant" : "SingleElementVectorINTEL", + "value" : 6085, + "capabilities" : [ "VectorComputeINTEL" ], + "version" : "None" + }, + { + "enumerant" : "VectorComputeCallableFunctionINTEL", + "value" : 6087, + "capabilities" : [ "VectorComputeINTEL" ], + "version" : "None" } ] }, @@ -5639,7 +10264,7 @@ { "enumerant" : "PrimitiveId", "value" : 7, - "capabilities" : [ "Geometry", "Tessellation" ] + "capabilities" : [ "Geometry", "Tessellation", "RayTracingNV", "RayTracingKHR", "MeshShadingNV" ] }, { "enumerant" : "InvocationId", @@ -5649,12 +10274,12 @@ { "enumerant" : "Layer", "value" : 9, - "capabilities" : [ "Geometry" ] + "capabilities" : [ "Geometry", "ShaderLayer", "ShaderViewportIndexLayerEXT", "MeshShadingNV" ] }, { "enumerant" : "ViewportIndex", "value" : 10, - "capabilities" : [ "MultiViewport" ] + "capabilities" : [ "MultiViewport", "ShaderViewportIndex", "ShaderViewportIndexLayerEXT", "MeshShadingNV" ] }, { "enumerant" : "TessLevelOuter", @@ -5811,30 +10436,6 @@ "capabilities" : [ "SubgroupBallotKHR", "GroupNonUniformBallot" ], "version" : "1.3" }, - { - "enumerant" : "SubgroupGeMask", - "value" : 4417, - "capabilities" : [ "SubgroupBallotKHR", "GroupNonUniformBallot" ], - "version" : "1.3" - }, - { - "enumerant" : "SubgroupGtMask", - "value" : 4418, - "capabilities" : [ "SubgroupBallotKHR", "GroupNonUniformBallot" ], - "version" : "1.3" - }, - { - "enumerant" : "SubgroupLeMask", - "value" : 4419, - "capabilities" : [ "SubgroupBallotKHR", "GroupNonUniformBallot" ], - "version" : "1.3" - }, - { - "enumerant" : "SubgroupLtMask", - "value" : 4420, - "capabilities" : [ "SubgroupBallotKHR", "GroupNonUniformBallot" ], - "version" : "1.3" - }, { "enumerant" : "SubgroupEqMaskKHR", "value" : 4416, @@ -5842,6 +10443,12 @@ "extensions" : [ "SPV_KHR_shader_ballot" ], "version" : "1.3" }, + { + "enumerant" : "SubgroupGeMask", + "value" : 4417, + "capabilities" : [ "SubgroupBallotKHR", "GroupNonUniformBallot" ], + "version" : "1.3" + }, { "enumerant" : "SubgroupGeMaskKHR", "value" : 4417, @@ -5849,6 +10456,12 @@ "extensions" : [ "SPV_KHR_shader_ballot" ], "version" : "1.3" }, + { + "enumerant" : "SubgroupGtMask", + "value" : 4418, + "capabilities" : [ "SubgroupBallotKHR", "GroupNonUniformBallot" ], + "version" : "1.3" + }, { "enumerant" : "SubgroupGtMaskKHR", "value" : 4418, @@ -5856,6 +10469,12 @@ "extensions" : [ "SPV_KHR_shader_ballot" ], "version" : "1.3" }, + { + "enumerant" : "SubgroupLeMask", + "value" : 4419, + "capabilities" : [ "SubgroupBallotKHR", "GroupNonUniformBallot" ], + "version" : "1.3" + }, { "enumerant" : "SubgroupLeMaskKHR", "value" : 4419, @@ -5863,6 +10482,12 @@ "extensions" : [ "SPV_KHR_shader_ballot" ], "version" : "1.3" }, + { + "enumerant" : "SubgroupLtMask", + "value" : 4420, + "capabilities" : [ "SubgroupBallotKHR", "GroupNonUniformBallot" ], + "version" : "1.3" + }, { "enumerant" : "SubgroupLtMaskKHR", "value" : 4420, @@ -5887,10 +10512,17 @@ { "enumerant" : "DrawIndex", "value" : 4426, - "capabilities" : [ "DrawParameters" ], - "extensions" : [ "SPV_KHR_shader_draw_parameters" ], + "capabilities" : [ "DrawParameters", "MeshShadingNV" ], + "extensions" : [ "SPV_KHR_shader_draw_parameters", "SPV_NV_mesh_shader" ], "version" : "1.3" }, + { + "enumerant" : "PrimitiveShadingRateKHR", + "value" : 4432, + "capabilities" : [ "FragmentShadingRateKHR" ], + "extensions" : [ "SPV_KHR_fragment_shading_rate" ], + "version" : "None" + }, { "enumerant" : "DeviceIndex", "value" : 4438, @@ -5905,6 +10537,13 @@ "extensions" : [ "SPV_KHR_multiview" ], "version" : "1.3" }, + { + "enumerant" : "ShadingRateKHR", + "value" : 4444, + "capabilities" : [ "FragmentShadingRateKHR" ], + "extensions" : [ "SPV_KHR_fragment_shading_rate" ], + "version" : "None" + }, { "enumerant" : "BaryCoordNoPerspAMD", "value" : 4992, @@ -5957,7 +10596,8 @@ { "enumerant" : "ViewportMaskNV", "value" : 5253, - "capabilities" : [ "ShaderViewportMaskNV" ], + "capabilities" : [ "ShaderViewportMaskNV", "MeshShadingNV" ], + "extensions" : [ "SPV_NV_viewport_array2", "SPV_NV_mesh_shader" ], "version" : "None" }, { @@ -5977,13 +10617,15 @@ { "enumerant" : "PositionPerViewNV", "value" : 5261, - "capabilities" : [ "PerViewAttributesNV" ], + "capabilities" : [ "PerViewAttributesNV", "MeshShadingNV" ], + "extensions" : [ "SPV_NVX_multiview_per_view_attributes", "SPV_NV_mesh_shader" ], "version" : "None" }, { "enumerant" : "ViewportMaskPerViewNV", "value" : 5262, - "capabilities" : [ "PerViewAttributesNV" ], + "capabilities" : [ "PerViewAttributesNV", "MeshShadingNV" ], + "extensions" : [ "SPV_NVX_multiview_per_view_attributes", "SPV_NV_mesh_shader" ], "version" : "None" }, { @@ -5992,6 +10634,328 @@ "capabilities" : [ "FragmentFullyCoveredEXT" ], "extensions" : [ "SPV_EXT_fragment_fully_covered" ], "version" : "None" + }, + { + "enumerant" : "TaskCountNV", + "value" : 5274, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "PrimitiveCountNV", + "value" : 5275, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "PrimitiveIndicesNV", + "value" : 5276, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "ClipDistancePerViewNV", + "value" : 5277, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "CullDistancePerViewNV", + "value" : 5278, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "LayerPerViewNV", + "value" : 5279, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "MeshViewCountNV", + "value" : 5280, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "MeshViewIndicesNV", + "value" : 5281, + "capabilities" : [ "MeshShadingNV" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "BaryCoordNV", + "value" : 5286, + "capabilities" : [ "FragmentBarycentricNV" ], + "extensions" : [ "SPV_NV_fragment_shader_barycentric" ], + "version" : "None" + }, + { + "enumerant" : "BaryCoordNoPerspNV", + "value" : 5287, + "capabilities" : [ "FragmentBarycentricNV" ], + "extensions" : [ "SPV_NV_fragment_shader_barycentric" ], + "version" : "None" + }, + { + "enumerant" : "FragSizeEXT", + "value" : 5292 , + "capabilities" : [ "FragmentDensityEXT", "ShadingRateNV" ], + "extensions" : [ "SPV_EXT_fragment_invocation_density", "SPV_NV_shading_rate" ], + "version" : "None" + }, + { + "enumerant" : "FragmentSizeNV", + "value" : 5292 , + "capabilities" : [ "ShadingRateNV", "FragmentDensityEXT" ], + "extensions" : [ "SPV_NV_shading_rate", "SPV_EXT_fragment_invocation_density" ], + "version" : "None" + }, + { + "enumerant" : "FragInvocationCountEXT", + "value" : 5293, + "capabilities" : [ "FragmentDensityEXT", "ShadingRateNV" ], + "extensions" : [ "SPV_EXT_fragment_invocation_density", "SPV_NV_shading_rate" ], + "version" : "None" + }, + { + "enumerant" : "InvocationsPerPixelNV", + "value" : 5293, + "capabilities" : [ "ShadingRateNV", "FragmentDensityEXT" ], + "extensions" : [ "SPV_NV_shading_rate", "SPV_EXT_fragment_invocation_density" ], + "version" : "None" + }, + { + "enumerant" : "LaunchIdNV", + "value" : 5319, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "LaunchIdKHR", + "value" : 5319, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "LaunchSizeNV", + "value" : 5320, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "LaunchSizeKHR", + "value" : 5320, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "WorldRayOriginNV", + "value" : 5321, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "WorldRayOriginKHR", + "value" : 5321, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "WorldRayDirectionNV", + "value" : 5322, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "WorldRayDirectionKHR", + "value" : 5322, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "ObjectRayOriginNV", + "value" : 5323, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "ObjectRayOriginKHR", + "value" : 5323, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "ObjectRayDirectionNV", + "value" : 5324, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "ObjectRayDirectionKHR", + "value" : 5324, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "RayTminNV", + "value" : 5325, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "RayTminKHR", + "value" : 5325, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "RayTmaxNV", + "value" : 5326, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "RayTmaxKHR", + "value" : 5326, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "InstanceCustomIndexNV", + "value" : 5327, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "InstanceCustomIndexKHR", + "value" : 5327, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "ObjectToWorldNV", + "value" : 5330, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "ObjectToWorldKHR", + "value" : 5330, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "WorldToObjectNV", + "value" : 5331, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "WorldToObjectKHR", + "value" : 5331, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "HitTNV", + "value" : 5332, + "capabilities" : [ "RayTracingNV" ], + "extensions" : [ "SPV_NV_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "HitKindNV", + "value" : 5333, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "HitKindKHR", + "value" : 5333, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "IncomingRayFlagsNV", + "value" : 5351, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "IncomingRayFlagsKHR", + "value" : 5351, + "capabilities" : [ "RayTracingNV" , "RayTracingKHR" ], + "extensions" : [ "SPV_NV_ray_tracing" , "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "RayGeometryIndexKHR", + "value" : 5352, + "capabilities" : [ "RayTracingKHR" ], + "extensions" : [ "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "WarpsPerSMNV", + "value" : 5374, + "capabilities" : [ "ShaderSMBuiltinsNV" ], + "extensions" : [ "SPV_NV_shader_sm_builtins" ], + "version" : "None" + }, + { + "enumerant" : "SMCountNV", + "value" : 5375, + "capabilities" : [ "ShaderSMBuiltinsNV" ], + "extensions" : [ "SPV_NV_shader_sm_builtins" ], + "version" : "None" + }, + { + "enumerant" : "WarpIDNV", + "value" : 5376, + "capabilities" : [ "ShaderSMBuiltinsNV" ], + "extensions" : [ "SPV_NV_shader_sm_builtins" ], + "version" : "None" + }, + { + "enumerant" : "SMIDNV", + "value" : 5377, + "capabilities" : [ "ShaderSMBuiltinsNV" ], + "extensions" : [ "SPV_NV_shader_sm_builtins" ], + "version" : "None" } ] }, @@ -6018,6 +10982,24 @@ { "enumerant" : "Invocation", "value" : 4 + }, + { + "enumerant" : "QueueFamily", + "value" : 5, + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "QueueFamilyKHR", + "value" : 5, + "capabilities" : [ "VulkanMemoryModel" ], + "version" : "1.5" + }, + { + "enumerant" : "ShaderCallKHR", + "value" : 6, + "capabilities" : [ "RayTracingKHR" ], + "version" : "None" } ] }, @@ -6174,7 +11156,8 @@ }, { "enumerant" : "Groups", - "value" : 18 + "value" : 18, + "extensions" : [ "SPV_AMD_shader_ballot" ] }, { "enumerant" : "DeviceEnqueue", @@ -6427,6 +11410,23 @@ "capabilities" : [ "GroupNonUniform" ], "version" : "1.3" }, + { + "enumerant" : "ShaderLayer", + "value" : 69, + "version" : "1.5" + }, + { + "enumerant" : "ShaderViewportIndex", + "value" : 70, + "version" : "1.5" + }, + { + "enumerant" : "FragmentShadingRateKHR", + "value" : 4422, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_KHR_fragment_shading_rate" ], + "version" : "None" + }, { "enumerant" : "SubgroupBallotKHR", "value" : 4423, @@ -6440,6 +11440,27 @@ "extensions" : [ "SPV_KHR_shader_draw_parameters" ], "version" : "1.3" }, + { + "enumerant" : "WorkgroupMemoryExplicitLayoutKHR", + "value" : 4428, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_KHR_workgroup_memory_explicit_layout" ], + "version" : "None" + }, + { + "enumerant" : "WorkgroupMemoryExplicitLayout8BitAccessKHR", + "value" : 4429, + "capabilities" : [ "WorkgroupMemoryExplicitLayoutKHR" ], + "extensions" : [ "SPV_KHR_workgroup_memory_explicit_layout" ], + "version" : "None" + }, + { + "enumerant" : "WorkgroupMemoryExplicitLayout16BitAccessKHR", + "value" : 4430, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_KHR_workgroup_memory_explicit_layout" ], + "version" : "None" + }, { "enumerant" : "SubgroupVoteKHR", "value" : 4431, @@ -6533,19 +11554,77 @@ "enumerant" : "StorageBuffer8BitAccess", "value" : 4448, "extensions" : [ "SPV_KHR_8bit_storage" ], - "version" : "None" + "version" : "1.5" }, { "enumerant" : "UniformAndStorageBuffer8BitAccess", "value" : 4449, "capabilities" : [ "StorageBuffer8BitAccess" ], "extensions" : [ "SPV_KHR_8bit_storage" ], - "version" : "None" + "version" : "1.5" }, { "enumerant" : "StoragePushConstant8", "value" : 4450, "extensions" : [ "SPV_KHR_8bit_storage" ], + "version" : "1.5" + }, + { + "enumerant" : "DenormPreserve", + "value" : 4464, + "extensions" : [ "SPV_KHR_float_controls" ], + "version" : "1.4" + }, + { + "enumerant" : "DenormFlushToZero", + "value" : 4465, + "extensions" : [ "SPV_KHR_float_controls" ], + "version" : "1.4" + }, + { + "enumerant" : "SignedZeroInfNanPreserve", + "value" : 4466, + "extensions" : [ "SPV_KHR_float_controls" ], + "version" : "1.4" + }, + { + "enumerant" : "RoundingModeRTE", + "value" : 4467, + "extensions" : [ "SPV_KHR_float_controls" ], + "version" : "1.4" + }, + { + "enumerant" : "RoundingModeRTZ", + "value" : 4468, + "extensions" : [ "SPV_KHR_float_controls" ], + "version" : "1.4" + }, + { + "enumerant" : "RayQueryProvisionalKHR", + "value" : 4471, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "enumerant" : "RayQueryKHR", + "value" : 4472, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_KHR_ray_query" ], + "version" : "None" + }, + { + "enumerant" : "RayTraversalPrimitiveCullingKHR", + "value" : 4478, + "capabilities" : [ "RayQueryKHR","RayTracingKHR" ], + "extensions" : [ "SPV_KHR_ray_query","SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "RayTracingKHR", + "value" : 4479, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_KHR_ray_tracing" ], "version" : "None" }, { @@ -6583,6 +11662,20 @@ "extensions" : [ "SPV_AMD_shader_image_load_store_lod" ], "version" : "None" }, + { + "enumerant" : "Int64ImageEXT", + "value" : 5016, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_EXT_shader_image_int64" ], + "version" : "None" + }, + { + "enumerant" : "ShaderClockKHR", + "value" : 5055, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_KHR_shader_clock" ], + "version" : "None" + }, { "enumerant" : "SampleMaskOverrideCoverageNV", "value" : 5249, @@ -6639,88 +11732,303 @@ "extensions" : [ "SPV_EXT_fragment_fully_covered" ], "version" : "None" }, + { + "enumerant" : "MeshShadingNV", + "value" : 5266, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_NV_mesh_shader" ], + "version" : "None" + }, + { + "enumerant" : "ImageFootprintNV", + "value" : 5282, + "extensions" : [ "SPV_NV_shader_image_footprint" ], + "version" : "None" + }, + { + "enumerant" : "FragmentBarycentricNV", + "value" : 5284, + "extensions" : [ "SPV_NV_fragment_shader_barycentric" ], + "version" : "None" + }, + { + "enumerant" : "ComputeDerivativeGroupQuadsNV", + "value" : 5288, + "extensions" : [ "SPV_NV_compute_shader_derivatives" ], + "version" : "None" + }, + { + "enumerant" : "FragmentDensityEXT", + "value" : 5291, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_EXT_fragment_invocation_density", "SPV_NV_shading_rate" ], + "version" : "None" + }, + { + "enumerant" : "ShadingRateNV", + "value" : 5291, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_NV_shading_rate", "SPV_EXT_fragment_invocation_density" ], + "version" : "None" + }, + { + "enumerant" : "GroupNonUniformPartitionedNV", + "value" : 5297, + "extensions" : [ "SPV_NV_shader_subgroup_partitioned" ], + "version" : "None" + }, + { + "enumerant" : "ShaderNonUniform", + "value" : 5301, + "capabilities" : [ "Shader" ], + "version" : "1.5" + }, { "enumerant" : "ShaderNonUniformEXT", "value" : 5301, "capabilities" : [ "Shader" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "RuntimeDescriptorArray", + "value" : 5302, + "capabilities" : [ "Shader" ], + "version" : "1.5" }, { "enumerant" : "RuntimeDescriptorArrayEXT", "value" : 5302, "capabilities" : [ "Shader" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "InputAttachmentArrayDynamicIndexing", + "value" : 5303, + "capabilities" : [ "InputAttachment" ], + "version" : "1.5" }, { "enumerant" : "InputAttachmentArrayDynamicIndexingEXT", "value" : 5303, "capabilities" : [ "InputAttachment" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "UniformTexelBufferArrayDynamicIndexing", + "value" : 5304, + "capabilities" : [ "SampledBuffer" ], + "version" : "1.5" }, { "enumerant" : "UniformTexelBufferArrayDynamicIndexingEXT", "value" : 5304, "capabilities" : [ "SampledBuffer" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "StorageTexelBufferArrayDynamicIndexing", + "value" : 5305, + "capabilities" : [ "ImageBuffer" ], + "version" : "1.5" }, { "enumerant" : "StorageTexelBufferArrayDynamicIndexingEXT", "value" : 5305, "capabilities" : [ "ImageBuffer" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "UniformBufferArrayNonUniformIndexing", + "value" : 5306, + "capabilities" : [ "ShaderNonUniform" ], + "version" : "1.5" }, { "enumerant" : "UniformBufferArrayNonUniformIndexingEXT", "value" : 5306, - "capabilities" : [ "ShaderNonUniformEXT" ], + "capabilities" : [ "ShaderNonUniform" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "SampledImageArrayNonUniformIndexing", + "value" : 5307, + "capabilities" : [ "ShaderNonUniform" ], + "version" : "1.5" }, { "enumerant" : "SampledImageArrayNonUniformIndexingEXT", "value" : 5307, - "capabilities" : [ "ShaderNonUniformEXT" ], + "capabilities" : [ "ShaderNonUniform" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "StorageBufferArrayNonUniformIndexing", + "value" : 5308, + "capabilities" : [ "ShaderNonUniform" ], + "version" : "1.5" }, { "enumerant" : "StorageBufferArrayNonUniformIndexingEXT", "value" : 5308, - "capabilities" : [ "ShaderNonUniformEXT" ], + "capabilities" : [ "ShaderNonUniform" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "StorageImageArrayNonUniformIndexing", + "value" : 5309, + "capabilities" : [ "ShaderNonUniform" ], + "version" : "1.5" }, { "enumerant" : "StorageImageArrayNonUniformIndexingEXT", "value" : 5309, - "capabilities" : [ "ShaderNonUniformEXT" ], + "capabilities" : [ "ShaderNonUniform" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "InputAttachmentArrayNonUniformIndexing", + "value" : 5310, + "capabilities" : [ "InputAttachment", "ShaderNonUniform" ], + "version" : "1.5" }, { "enumerant" : "InputAttachmentArrayNonUniformIndexingEXT", "value" : 5310, - "capabilities" : [ "InputAttachment", "ShaderNonUniformEXT" ], + "capabilities" : [ "InputAttachment", "ShaderNonUniform" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "UniformTexelBufferArrayNonUniformIndexing", + "value" : 5311, + "capabilities" : [ "SampledBuffer", "ShaderNonUniform" ], + "version" : "1.5" }, { "enumerant" : "UniformTexelBufferArrayNonUniformIndexingEXT", "value" : 5311, - "capabilities" : [ "SampledBuffer", "ShaderNonUniformEXT" ], + "capabilities" : [ "SampledBuffer", "ShaderNonUniform" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], - "version" : "None" + "version" : "1.5" + }, + { + "enumerant" : "StorageTexelBufferArrayNonUniformIndexing", + "value" : 5312, + "capabilities" : [ "ImageBuffer", "ShaderNonUniform" ], + "version" : "1.5" }, { "enumerant" : "StorageTexelBufferArrayNonUniformIndexingEXT", "value" : 5312, - "capabilities" : [ "ImageBuffer", "ShaderNonUniformEXT" ], + "capabilities" : [ "ImageBuffer", "ShaderNonUniform" ], "extensions" : [ "SPV_EXT_descriptor_indexing" ], + "version" : "1.5" + }, + { + "enumerant" : "RayTracingNV", + "value" : 5340, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_NV_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "VulkanMemoryModel", + "value" : 5345, + "version" : "1.5" + }, + { + "enumerant" : "VulkanMemoryModelKHR", + "value" : 5345, + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "VulkanMemoryModelDeviceScope", + "value" : 5346, + "version" : "1.5" + }, + { + "enumerant" : "VulkanMemoryModelDeviceScopeKHR", + "value" : 5346, + "extensions" : [ "SPV_KHR_vulkan_memory_model" ], + "version" : "1.5" + }, + { + "enumerant" : "PhysicalStorageBufferAddresses", + "value" : 5347, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_EXT_physical_storage_buffer", "SPV_KHR_physical_storage_buffer" ], + "version" : "1.5" + }, + { + "enumerant" : "PhysicalStorageBufferAddressesEXT", + "value" : 5347, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_EXT_physical_storage_buffer" ], + "version" : "1.5" + }, + { + "enumerant" : "ComputeDerivativeGroupLinearNV", + "value" : 5350, + "extensions" : [ "SPV_NV_compute_shader_derivatives" ], + "version" : "None" + }, + { + "enumerant" : "RayTracingProvisionalKHR", + "value" : 5353, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_KHR_ray_tracing" ], + "version" : "None" + }, + { + "enumerant" : "CooperativeMatrixNV", + "value" : 5357, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_NV_cooperative_matrix" ], + "version" : "None" + }, + { + "enumerant" : "FragmentShaderSampleInterlockEXT", + "value" : 5363, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "enumerant" : "FragmentShaderShadingRateInterlockEXT", + "value" : 5372, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "enumerant" : "ShaderSMBuiltinsNV", + "value" : 5373, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_NV_shader_sm_builtins" ], + "version" : "None" + }, + { + "enumerant" : "FragmentShaderPixelInterlockEXT", + "value" : 5378, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_EXT_fragment_shader_interlock" ], + "version" : "None" + }, + { + "enumerant" : "DemoteToHelperInvocationEXT", + "value" : 5379, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_EXT_demote_to_helper_invocation" ], "version" : "None" }, { @@ -6742,9 +12050,278 @@ "version" : "None" }, { - "enumerant" : "GroupNonUniformPartitionedNV", - "value" : 5297, - "extensions" : [ "SPV_NV_shader_subgroup_partitioned" ], + "enumerant" : "SubgroupImageMediaBlockIOINTEL", + "value" : 5579, + "extensions" : [ "SPV_INTEL_media_block_io" ], + "version" : "None" + }, + { + "enumerant" : "RoundToInfinityINTEL", + "value" : 5582, + "extensions" : [ "SPV_INTEL_float_controls2" ], + "version" : "None" + }, + { + "enumerant" : "FloatingPointModeINTEL", + "value" : 5583, + "extensions" : [ "SPV_INTEL_float_controls2" ], + "version" : "None" + }, + { + "enumerant" : "IntegerFunctions2INTEL", + "value" : 5584, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_INTEL_shader_integer_functions2" ], + "version" : "None" + }, + { + "enumerant" : "FunctionPointersINTEL", + "value" : 5603, + "extensions" : [ "SPV_INTEL_function_pointers" ], + "version" : "None" + }, + { + "enumerant" : "IndirectReferencesINTEL", + "value" : 5604, + "extensions" : [ "SPV_INTEL_function_pointers" ], + "version" : "None" + }, + { + "enumerant" : "AsmINTEL", + "value" : 5606, + "extensions" : [ "SPV_INTEL_inline_assembly" ], + "version" : "None" + }, + { + "enumerant" : "AtomicFloat32MinMaxEXT", + "value" : 5612, + "extensions" : [ "SPV_EXT_shader_atomic_float_min_max" ], + "version" : "None" + }, + { + "enumerant" : "AtomicFloat64MinMaxEXT", + "value" : 5613, + "extensions" : [ "SPV_EXT_shader_atomic_float_min_max" ], + "version" : "None" + }, + { + "enumerant" : "AtomicFloat16MinMaxEXT", + "value" : 5616, + "extensions" : [ "SPV_EXT_shader_atomic_float_min_max" ], + "version" : "None" + }, + { + "enumerant" : "VectorComputeINTEL", + "value" : 5617, + "capabilities" : [ "VectorAnyINTEL" ], + "extensions" : [ "SPV_INTEL_vector_compute" ], + "version" : "None" + }, + { + "enumerant" : "VectorAnyINTEL", + "value" : 5619, + "extensions" : [ "SPV_INTEL_vector_compute" ], + "version" : "None" + }, + { + "enumerant" : "SubgroupAvcMotionEstimationINTEL", + "value" : 5696, + "extensions" : [ "SPV_INTEL_device_side_avc_motion_estimation" ], + "version" : "None" + }, + { + "enumerant" : "SubgroupAvcMotionEstimationIntraINTEL", + "value" : 5697, + "extensions" : [ "SPV_INTEL_device_side_avc_motion_estimation" ], + "version" : "None" + }, + { + "enumerant" : "SubgroupAvcMotionEstimationChromaINTEL", + "value" : 5698, + "extensions" : [ "SPV_INTEL_device_side_avc_motion_estimation" ], + "version" : "None" + }, + { + "enumerant" : "VariableLengthArrayINTEL", + "value" : 5817, + "extensions" : [ "SPV_INTEL_variable_length_array" ], + "version" : "None" + }, + { + "enumerant" : "FunctionFloatControlINTEL", + "value" : 5821, + "extensions" : [ "SPV_INTEL_float_controls2" ], + "version" : "None" + }, + { + "enumerant" : "FPGAMemoryAttributesINTEL", + "value" : 5824, + "extensions" : [ "SPV_INTEL_fpga_memory_attributes" ], + "version" : "None" + }, + { + "enumerant" : "FPFastMathModeINTEL", + "value" : 5837, + "capabilities" : [ "Kernel" ], + "extensions" : [ "SPV_INTEL_fp_fast_math_mode" ], + "version" : "None" + }, + { + "enumerant" : "ArbitraryPrecisionIntegersINTEL", + "value" : 5844, + "extensions" : [ "SPV_INTEL_arbitrary_precision_integers" ], + "version" : "None" + }, + { + "enumerant" : "UnstructuredLoopControlsINTEL", + "value" : 5886, + "extensions" : [ "SPV_INTEL_unstructured_loop_controls" ], + "version" : "None" + }, + { + "enumerant" : "FPGALoopControlsINTEL", + "value" : 5888, + "extensions" : [ "SPV_INTEL_fpga_loop_controls" ], + "version" : "None" + }, + { + "enumerant" : "KernelAttributesINTEL", + "value" : 5892, + "extensions" : [ "SPV_INTEL_kernel_attributes" ], + "version" : "None" + }, + { + "enumerant" : "FPGAKernelAttributesINTEL", + "value" : 5897, + "extensions" : [ "SPV_INTEL_kernel_attributes" ], + "version" : "None" + }, + { + "enumerant" : "FPGAMemoryAccessesINTEL", + "value" : 5898, + "extensions" : [ "SPV_INTEL_fpga_memory_accesses" ], + "version" : "None" + }, + { + "enumerant" : "FPGAClusterAttributesINTEL", + "value" : 5904, + "extensions" : [ "SPV_INTEL_fpga_cluster_attributes" ], + "version" : "None" + }, + { + "enumerant" : "LoopFuseINTEL", + "value" : 5906, + "extensions" : [ "SPV_INTEL_loop_fuse" ], + "version" : "None" + }, + { + "enumerant" : "FPGABufferLocationINTEL", + "value" : 5920, + "extensions" : [ "SPV_INTEL_fpga_buffer_location" ], + "version" : "None" + }, + { + "enumerant" : "USMStorageClassesINTEL", + "value" : 5935, + "extensions" : [ "SPV_INTEL_usm_storage_classes" ], + "version" : "None" + }, + { + "enumerant" : "IOPipesINTEL", + "value" : 5943, + "extensions" : [ "SPV_INTEL_io_pipes" ], + "version" : "None" + }, + { + "enumerant" : "BlockingPipesINTEL", + "value" : 5945, + "extensions" : [ "SPV_INTEL_blocking_pipes" ], + "version" : "None" + }, + { + "enumerant" : "FPGARegINTEL", + "value" : 5948, + "extensions" : [ "SPV_INTEL_fpga_reg" ], + "version" : "None" + }, + { + "enumerant" : "AtomicFloat32AddEXT", + "value" : 6033, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_EXT_shader_atomic_float_add" ], + "version" : "None" + }, + { + "enumerant" : "AtomicFloat64AddEXT", + "value" : 6034, + "capabilities" : [ "Shader" ], + "extensions" : [ "SPV_EXT_shader_atomic_float_add" ], + "version" : "None" + }, + { + "enumerant" : "LongConstantCompositeINTEL", + "value" : 6089, + "extensions" : [ "SPV_INTEL_long_constant_composite" ], + "version" : "None" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "RayQueryIntersection", + "enumerants" : [ + { + "enumerant" : "RayQueryCandidateIntersectionKHR", + "value" : 0, + "capabilities" : [ "RayQueryKHR" ], + "version" : "None" + }, + { + "enumerant" : "RayQueryCommittedIntersectionKHR", + "value" : 1, + "capabilities" : [ "RayQueryKHR" ], + "version" : "None" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "RayQueryCommittedIntersectionType", + "enumerants" : [ + { + "enumerant" : "RayQueryCommittedIntersectionNoneKHR", + "value" : 0, + "capabilities" : [ "RayQueryKHR" ], + "version" : "None" + }, + { + "enumerant" : "RayQueryCommittedIntersectionTriangleKHR", + "value" : 1, + "capabilities" : [ "RayQueryKHR" ], + "version" : "None" + }, + { + "enumerant" : "RayQueryCommittedIntersectionGeneratedKHR", + "value" : 2, + "capabilities" : [ "RayQueryKHR" ], + "version" : "None" + } + ] + }, + { + "category" : "ValueEnum", + "kind" : "RayQueryCandidateIntersectionType", + "enumerants" : [ + { + "enumerant" : "RayQueryCandidateIntersectionTriangleKHR", + "value" : 0, + "capabilities" : [ "RayQueryKHR" ], + "version" : "None" + }, + { + "enumerant" : "RayQueryCandidateIntersectionAABBKHR", + "value" : 1, + "capabilities" : [ "RayQueryKHR" ], "version" : "None" } ] diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.cs b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.cs new file mode 100755 index 000000000..c83b731c1 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.cs @@ -0,0 +1,1707 @@ +// Copyright (c) 2014-2020 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and/or associated documentation files (the "Materials"), +// to deal in the Materials without restriction, including without limitation +// the rights to use, copy, modify, merge, publish, distribute, sublicense, +// and/or sell copies of the Materials, and to permit persons to whom the +// Materials are furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +// IN THE MATERIALS. + +// This header is automatically generated by the same tool that creates +// the Binary Section of the SPIR-V specification. + +// Enumeration tokens for SPIR-V, in various styles: +// C, C++, C++11, JSON, Lua, Python, C#, D +// +// - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL +// - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL +// - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL +// - Lua will use tables, e.g.: spv.SourceLanguage.GLSL +// - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +// - C# will use enum classes in the Specification class located in the "Spv" namespace, +// e.g.: Spv.Specification.SourceLanguage.GLSL +// - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL +// +// Some tokens act like mask values, which can be OR'd together, +// while others are mutually exclusive. The mask-like ones have +// "Mask" in their name, and a parallel enum that has the shift +// amount (1 << x) for each corresponding enumerant. + +namespace Spv +{ + + public static class Specification + { + public const uint MagicNumber = 0x07230203; + public const uint Version = 0x00010500; + public const uint Revision = 4; + public const uint OpCodeMask = 0xffff; + public const uint WordCountShift = 16; + + public enum SourceLanguage + { + Unknown = 0, + ESSL = 1, + GLSL = 2, + OpenCL_C = 3, + OpenCL_CPP = 4, + HLSL = 5, + } + + public enum ExecutionModel + { + Vertex = 0, + TessellationControl = 1, + TessellationEvaluation = 2, + Geometry = 3, + Fragment = 4, + GLCompute = 5, + Kernel = 6, + TaskNV = 5267, + MeshNV = 5268, + RayGenerationKHR = 5313, + RayGenerationNV = 5313, + IntersectionKHR = 5314, + IntersectionNV = 5314, + AnyHitKHR = 5315, + AnyHitNV = 5315, + ClosestHitKHR = 5316, + ClosestHitNV = 5316, + MissKHR = 5317, + MissNV = 5317, + CallableKHR = 5318, + CallableNV = 5318, + } + + public enum AddressingModel + { + Logical = 0, + Physical32 = 1, + Physical64 = 2, + PhysicalStorageBuffer64 = 5348, + PhysicalStorageBuffer64EXT = 5348, + } + + public enum MemoryModel + { + Simple = 0, + GLSL450 = 1, + OpenCL = 2, + Vulkan = 3, + VulkanKHR = 3, + } + + public enum ExecutionMode + { + Invocations = 0, + SpacingEqual = 1, + SpacingFractionalEven = 2, + SpacingFractionalOdd = 3, + VertexOrderCw = 4, + VertexOrderCcw = 5, + PixelCenterInteger = 6, + OriginUpperLeft = 7, + OriginLowerLeft = 8, + EarlyFragmentTests = 9, + PointMode = 10, + Xfb = 11, + DepthReplacing = 12, + DepthGreater = 14, + DepthLess = 15, + DepthUnchanged = 16, + LocalSize = 17, + LocalSizeHint = 18, + InputPoints = 19, + InputLines = 20, + InputLinesAdjacency = 21, + Triangles = 22, + InputTrianglesAdjacency = 23, + Quads = 24, + Isolines = 25, + OutputVertices = 26, + OutputPoints = 27, + OutputLineStrip = 28, + OutputTriangleStrip = 29, + VecTypeHint = 30, + ContractionOff = 31, + Initializer = 33, + Finalizer = 34, + SubgroupSize = 35, + SubgroupsPerWorkgroup = 36, + SubgroupsPerWorkgroupId = 37, + LocalSizeId = 38, + LocalSizeHintId = 39, + PostDepthCoverage = 4446, + DenormPreserve = 4459, + DenormFlushToZero = 4460, + SignedZeroInfNanPreserve = 4461, + RoundingModeRTE = 4462, + RoundingModeRTZ = 4463, + StencilRefReplacingEXT = 5027, + OutputLinesNV = 5269, + OutputPrimitivesNV = 5270, + DerivativeGroupQuadsNV = 5289, + DerivativeGroupLinearNV = 5290, + OutputTrianglesNV = 5298, + PixelInterlockOrderedEXT = 5366, + PixelInterlockUnorderedEXT = 5367, + SampleInterlockOrderedEXT = 5368, + SampleInterlockUnorderedEXT = 5369, + ShadingRateInterlockOrderedEXT = 5370, + ShadingRateInterlockUnorderedEXT = 5371, + SharedLocalMemorySizeINTEL = 5618, + RoundingModeRTPINTEL = 5620, + RoundingModeRTNINTEL = 5621, + FloatingPointModeALTINTEL = 5622, + FloatingPointModeIEEEINTEL = 5623, + MaxWorkgroupSizeINTEL = 5893, + MaxWorkDimINTEL = 5894, + NoGlobalOffsetINTEL = 5895, + NumSIMDWorkitemsINTEL = 5896, + SchedulerTargetFmaxMhzINTEL = 5903, + } + + public enum StorageClass + { + UniformConstant = 0, + Input = 1, + Uniform = 2, + Output = 3, + Workgroup = 4, + CrossWorkgroup = 5, + Private = 6, + Function = 7, + Generic = 8, + PushConstant = 9, + AtomicCounter = 10, + Image = 11, + StorageBuffer = 12, + CallableDataKHR = 5328, + CallableDataNV = 5328, + IncomingCallableDataKHR = 5329, + IncomingCallableDataNV = 5329, + RayPayloadKHR = 5338, + RayPayloadNV = 5338, + HitAttributeKHR = 5339, + HitAttributeNV = 5339, + IncomingRayPayloadKHR = 5342, + IncomingRayPayloadNV = 5342, + ShaderRecordBufferKHR = 5343, + ShaderRecordBufferNV = 5343, + PhysicalStorageBuffer = 5349, + PhysicalStorageBufferEXT = 5349, + CodeSectionINTEL = 5605, + DeviceOnlyINTEL = 5936, + HostOnlyINTEL = 5937, + } + + public enum Dim + { + Dim1D = 0, + Dim2D = 1, + Dim3D = 2, + Cube = 3, + Rect = 4, + Buffer = 5, + SubpassData = 6, + } + + public enum SamplerAddressingMode + { + None = 0, + ClampToEdge = 1, + Clamp = 2, + Repeat = 3, + RepeatMirrored = 4, + } + + public enum SamplerFilterMode + { + Nearest = 0, + Linear = 1, + } + + public enum ImageFormat + { + Unknown = 0, + Rgba32f = 1, + Rgba16f = 2, + R32f = 3, + Rgba8 = 4, + Rgba8Snorm = 5, + Rg32f = 6, + Rg16f = 7, + R11fG11fB10f = 8, + R16f = 9, + Rgba16 = 10, + Rgb10A2 = 11, + Rg16 = 12, + Rg8 = 13, + R16 = 14, + R8 = 15, + Rgba16Snorm = 16, + Rg16Snorm = 17, + Rg8Snorm = 18, + R16Snorm = 19, + R8Snorm = 20, + Rgba32i = 21, + Rgba16i = 22, + Rgba8i = 23, + R32i = 24, + Rg32i = 25, + Rg16i = 26, + Rg8i = 27, + R16i = 28, + R8i = 29, + Rgba32ui = 30, + Rgba16ui = 31, + Rgba8ui = 32, + R32ui = 33, + Rgb10a2ui = 34, + Rg32ui = 35, + Rg16ui = 36, + Rg8ui = 37, + R16ui = 38, + R8ui = 39, + R64ui = 40, + R64i = 41, + } + + public enum ImageChannelOrder + { + R = 0, + A = 1, + RG = 2, + RA = 3, + RGB = 4, + RGBA = 5, + BGRA = 6, + ARGB = 7, + Intensity = 8, + Luminance = 9, + Rx = 10, + RGx = 11, + RGBx = 12, + Depth = 13, + DepthStencil = 14, + sRGB = 15, + sRGBx = 16, + sRGBA = 17, + sBGRA = 18, + ABGR = 19, + } + + public enum ImageChannelDataType + { + SnormInt8 = 0, + SnormInt16 = 1, + UnormInt8 = 2, + UnormInt16 = 3, + UnormShort565 = 4, + UnormShort555 = 5, + UnormInt101010 = 6, + SignedInt8 = 7, + SignedInt16 = 8, + SignedInt32 = 9, + UnsignedInt8 = 10, + UnsignedInt16 = 11, + UnsignedInt32 = 12, + HalfFloat = 13, + Float = 14, + UnormInt24 = 15, + UnormInt101010_2 = 16, + } + + public enum ImageOperandsShift + { + Bias = 0, + Lod = 1, + Grad = 2, + ConstOffset = 3, + Offset = 4, + ConstOffsets = 5, + Sample = 6, + MinLod = 7, + MakeTexelAvailable = 8, + MakeTexelAvailableKHR = 8, + MakeTexelVisible = 9, + MakeTexelVisibleKHR = 9, + NonPrivateTexel = 10, + NonPrivateTexelKHR = 10, + VolatileTexel = 11, + VolatileTexelKHR = 11, + SignExtend = 12, + ZeroExtend = 13, + } + + public enum ImageOperandsMask + { + MaskNone = 0, + Bias = 0x00000001, + Lod = 0x00000002, + Grad = 0x00000004, + ConstOffset = 0x00000008, + Offset = 0x00000010, + ConstOffsets = 0x00000020, + Sample = 0x00000040, + MinLod = 0x00000080, + MakeTexelAvailable = 0x00000100, + MakeTexelAvailableKHR = 0x00000100, + MakeTexelVisible = 0x00000200, + MakeTexelVisibleKHR = 0x00000200, + NonPrivateTexel = 0x00000400, + NonPrivateTexelKHR = 0x00000400, + VolatileTexel = 0x00000800, + VolatileTexelKHR = 0x00000800, + SignExtend = 0x00001000, + ZeroExtend = 0x00002000, + } + + public enum FPFastMathModeShift + { + NotNaN = 0, + NotInf = 1, + NSZ = 2, + AllowRecip = 3, + Fast = 4, + AllowContractFastINTEL = 16, + AllowReassocINTEL = 17, + } + + public enum FPFastMathModeMask + { + MaskNone = 0, + NotNaN = 0x00000001, + NotInf = 0x00000002, + NSZ = 0x00000004, + AllowRecip = 0x00000008, + Fast = 0x00000010, + AllowContractFastINTEL = 0x00010000, + AllowReassocINTEL = 0x00020000, + } + + public enum FPRoundingMode + { + RTE = 0, + RTZ = 1, + RTP = 2, + RTN = 3, + } + + public enum LinkageType + { + Export = 0, + Import = 1, + } + + public enum AccessQualifier + { + ReadOnly = 0, + WriteOnly = 1, + ReadWrite = 2, + } + + public enum FunctionParameterAttribute + { + Zext = 0, + Sext = 1, + ByVal = 2, + Sret = 3, + NoAlias = 4, + NoCapture = 5, + NoWrite = 6, + NoReadWrite = 7, + } + + public enum Decoration + { + RelaxedPrecision = 0, + SpecId = 1, + Block = 2, + BufferBlock = 3, + RowMajor = 4, + ColMajor = 5, + ArrayStride = 6, + MatrixStride = 7, + GLSLShared = 8, + GLSLPacked = 9, + CPacked = 10, + BuiltIn = 11, + NoPerspective = 13, + Flat = 14, + Patch = 15, + Centroid = 16, + Sample = 17, + Invariant = 18, + Restrict = 19, + Aliased = 20, + Volatile = 21, + Constant = 22, + Coherent = 23, + NonWritable = 24, + NonReadable = 25, + Uniform = 26, + UniformId = 27, + SaturatedConversion = 28, + Stream = 29, + Location = 30, + Component = 31, + Index = 32, + Binding = 33, + DescriptorSet = 34, + Offset = 35, + XfbBuffer = 36, + XfbStride = 37, + FuncParamAttr = 38, + FPRoundingMode = 39, + FPFastMathMode = 40, + LinkageAttributes = 41, + NoContraction = 42, + InputAttachmentIndex = 43, + Alignment = 44, + MaxByteOffset = 45, + AlignmentId = 46, + MaxByteOffsetId = 47, + NoSignedWrap = 4469, + NoUnsignedWrap = 4470, + ExplicitInterpAMD = 4999, + OverrideCoverageNV = 5248, + PassthroughNV = 5250, + ViewportRelativeNV = 5252, + SecondaryViewportRelativeNV = 5256, + PerPrimitiveNV = 5271, + PerViewNV = 5272, + PerTaskNV = 5273, + PerVertexNV = 5285, + NonUniform = 5300, + NonUniformEXT = 5300, + RestrictPointer = 5355, + RestrictPointerEXT = 5355, + AliasedPointer = 5356, + AliasedPointerEXT = 5356, + SIMTCallINTEL = 5599, + ReferencedIndirectlyINTEL = 5602, + ClobberINTEL = 5607, + SideEffectsINTEL = 5608, + VectorComputeVariableINTEL = 5624, + FuncParamIOKindINTEL = 5625, + VectorComputeFunctionINTEL = 5626, + StackCallINTEL = 5627, + GlobalVariableOffsetINTEL = 5628, + CounterBuffer = 5634, + HlslCounterBufferGOOGLE = 5634, + HlslSemanticGOOGLE = 5635, + UserSemantic = 5635, + UserTypeGOOGLE = 5636, + FunctionRoundingModeINTEL = 5822, + FunctionDenormModeINTEL = 5823, + RegisterINTEL = 5825, + MemoryINTEL = 5826, + NumbanksINTEL = 5827, + BankwidthINTEL = 5828, + MaxPrivateCopiesINTEL = 5829, + SinglepumpINTEL = 5830, + DoublepumpINTEL = 5831, + MaxReplicatesINTEL = 5832, + SimpleDualPortINTEL = 5833, + MergeINTEL = 5834, + BankBitsINTEL = 5835, + ForcePow2DepthINTEL = 5836, + BurstCoalesceINTEL = 5899, + CacheSizeINTEL = 5900, + DontStaticallyCoalesceINTEL = 5901, + PrefetchINTEL = 5902, + StallEnableINTEL = 5905, + FuseLoopsInFunctionINTEL = 5907, + BufferLocationINTEL = 5921, + IOPipeStorageINTEL = 5944, + FunctionFloatingPointModeINTEL = 6080, + SingleElementVectorINTEL = 6085, + VectorComputeCallableFunctionINTEL = 6087, + } + + public enum BuiltIn + { + Position = 0, + PointSize = 1, + ClipDistance = 3, + CullDistance = 4, + VertexId = 5, + InstanceId = 6, + PrimitiveId = 7, + InvocationId = 8, + Layer = 9, + ViewportIndex = 10, + TessLevelOuter = 11, + TessLevelInner = 12, + TessCoord = 13, + PatchVertices = 14, + FragCoord = 15, + PointCoord = 16, + FrontFacing = 17, + SampleId = 18, + SamplePosition = 19, + SampleMask = 20, + FragDepth = 22, + HelperInvocation = 23, + NumWorkgroups = 24, + WorkgroupSize = 25, + WorkgroupId = 26, + LocalInvocationId = 27, + GlobalInvocationId = 28, + LocalInvocationIndex = 29, + WorkDim = 30, + GlobalSize = 31, + EnqueuedWorkgroupSize = 32, + GlobalOffset = 33, + GlobalLinearId = 34, + SubgroupSize = 36, + SubgroupMaxSize = 37, + NumSubgroups = 38, + NumEnqueuedSubgroups = 39, + SubgroupId = 40, + SubgroupLocalInvocationId = 41, + VertexIndex = 42, + InstanceIndex = 43, + SubgroupEqMask = 4416, + SubgroupEqMaskKHR = 4416, + SubgroupGeMask = 4417, + SubgroupGeMaskKHR = 4417, + SubgroupGtMask = 4418, + SubgroupGtMaskKHR = 4418, + SubgroupLeMask = 4419, + SubgroupLeMaskKHR = 4419, + SubgroupLtMask = 4420, + SubgroupLtMaskKHR = 4420, + BaseVertex = 4424, + BaseInstance = 4425, + DrawIndex = 4426, + PrimitiveShadingRateKHR = 4432, + DeviceIndex = 4438, + ViewIndex = 4440, + ShadingRateKHR = 4444, + BaryCoordNoPerspAMD = 4992, + BaryCoordNoPerspCentroidAMD = 4993, + BaryCoordNoPerspSampleAMD = 4994, + BaryCoordSmoothAMD = 4995, + BaryCoordSmoothCentroidAMD = 4996, + BaryCoordSmoothSampleAMD = 4997, + BaryCoordPullModelAMD = 4998, + FragStencilRefEXT = 5014, + ViewportMaskNV = 5253, + SecondaryPositionNV = 5257, + SecondaryViewportMaskNV = 5258, + PositionPerViewNV = 5261, + ViewportMaskPerViewNV = 5262, + FullyCoveredEXT = 5264, + TaskCountNV = 5274, + PrimitiveCountNV = 5275, + PrimitiveIndicesNV = 5276, + ClipDistancePerViewNV = 5277, + CullDistancePerViewNV = 5278, + LayerPerViewNV = 5279, + MeshViewCountNV = 5280, + MeshViewIndicesNV = 5281, + BaryCoordNV = 5286, + BaryCoordNoPerspNV = 5287, + FragSizeEXT = 5292, + FragmentSizeNV = 5292, + FragInvocationCountEXT = 5293, + InvocationsPerPixelNV = 5293, + LaunchIdKHR = 5319, + LaunchIdNV = 5319, + LaunchSizeKHR = 5320, + LaunchSizeNV = 5320, + WorldRayOriginKHR = 5321, + WorldRayOriginNV = 5321, + WorldRayDirectionKHR = 5322, + WorldRayDirectionNV = 5322, + ObjectRayOriginKHR = 5323, + ObjectRayOriginNV = 5323, + ObjectRayDirectionKHR = 5324, + ObjectRayDirectionNV = 5324, + RayTminKHR = 5325, + RayTminNV = 5325, + RayTmaxKHR = 5326, + RayTmaxNV = 5326, + InstanceCustomIndexKHR = 5327, + InstanceCustomIndexNV = 5327, + ObjectToWorldKHR = 5330, + ObjectToWorldNV = 5330, + WorldToObjectKHR = 5331, + WorldToObjectNV = 5331, + HitTNV = 5332, + HitKindKHR = 5333, + HitKindNV = 5333, + IncomingRayFlagsKHR = 5351, + IncomingRayFlagsNV = 5351, + RayGeometryIndexKHR = 5352, + WarpsPerSMNV = 5374, + SMCountNV = 5375, + WarpIDNV = 5376, + SMIDNV = 5377, + } + + public enum SelectionControlShift + { + Flatten = 0, + DontFlatten = 1, + } + + public enum SelectionControlMask + { + MaskNone = 0, + Flatten = 0x00000001, + DontFlatten = 0x00000002, + } + + public enum LoopControlShift + { + Unroll = 0, + DontUnroll = 1, + DependencyInfinite = 2, + DependencyLength = 3, + MinIterations = 4, + MaxIterations = 5, + IterationMultiple = 6, + PeelCount = 7, + PartialCount = 8, + InitiationIntervalINTEL = 16, + MaxConcurrencyINTEL = 17, + DependencyArrayINTEL = 18, + PipelineEnableINTEL = 19, + LoopCoalesceINTEL = 20, + MaxInterleavingINTEL = 21, + SpeculatedIterationsINTEL = 22, + NoFusionINTEL = 23, + } + + public enum LoopControlMask + { + MaskNone = 0, + Unroll = 0x00000001, + DontUnroll = 0x00000002, + DependencyInfinite = 0x00000004, + DependencyLength = 0x00000008, + MinIterations = 0x00000010, + MaxIterations = 0x00000020, + IterationMultiple = 0x00000040, + PeelCount = 0x00000080, + PartialCount = 0x00000100, + InitiationIntervalINTEL = 0x00010000, + MaxConcurrencyINTEL = 0x00020000, + DependencyArrayINTEL = 0x00040000, + PipelineEnableINTEL = 0x00080000, + LoopCoalesceINTEL = 0x00100000, + MaxInterleavingINTEL = 0x00200000, + SpeculatedIterationsINTEL = 0x00400000, + NoFusionINTEL = 0x00800000, + } + + public enum FunctionControlShift + { + Inline = 0, + DontInline = 1, + Pure = 2, + Const = 3, + } + + public enum FunctionControlMask + { + MaskNone = 0, + Inline = 0x00000001, + DontInline = 0x00000002, + Pure = 0x00000004, + Const = 0x00000008, + } + + public enum MemorySemanticsShift + { + Acquire = 1, + Release = 2, + AcquireRelease = 3, + SequentiallyConsistent = 4, + UniformMemory = 6, + SubgroupMemory = 7, + WorkgroupMemory = 8, + CrossWorkgroupMemory = 9, + AtomicCounterMemory = 10, + ImageMemory = 11, + OutputMemory = 12, + OutputMemoryKHR = 12, + MakeAvailable = 13, + MakeAvailableKHR = 13, + MakeVisible = 14, + MakeVisibleKHR = 14, + Volatile = 15, + } + + public enum MemorySemanticsMask + { + MaskNone = 0, + Acquire = 0x00000002, + Release = 0x00000004, + AcquireRelease = 0x00000008, + SequentiallyConsistent = 0x00000010, + UniformMemory = 0x00000040, + SubgroupMemory = 0x00000080, + WorkgroupMemory = 0x00000100, + CrossWorkgroupMemory = 0x00000200, + AtomicCounterMemory = 0x00000400, + ImageMemory = 0x00000800, + OutputMemory = 0x00001000, + OutputMemoryKHR = 0x00001000, + MakeAvailable = 0x00002000, + MakeAvailableKHR = 0x00002000, + MakeVisible = 0x00004000, + MakeVisibleKHR = 0x00004000, + Volatile = 0x00008000, + } + + public enum MemoryAccessShift + { + Volatile = 0, + Aligned = 1, + Nontemporal = 2, + MakePointerAvailable = 3, + MakePointerAvailableKHR = 3, + MakePointerVisible = 4, + MakePointerVisibleKHR = 4, + NonPrivatePointer = 5, + NonPrivatePointerKHR = 5, + } + + public enum MemoryAccessMask + { + MaskNone = 0, + Volatile = 0x00000001, + Aligned = 0x00000002, + Nontemporal = 0x00000004, + MakePointerAvailable = 0x00000008, + MakePointerAvailableKHR = 0x00000008, + MakePointerVisible = 0x00000010, + MakePointerVisibleKHR = 0x00000010, + NonPrivatePointer = 0x00000020, + NonPrivatePointerKHR = 0x00000020, + } + + public enum Scope + { + CrossDevice = 0, + Device = 1, + Workgroup = 2, + Subgroup = 3, + Invocation = 4, + QueueFamily = 5, + QueueFamilyKHR = 5, + ShaderCallKHR = 6, + } + + public enum GroupOperation + { + Reduce = 0, + InclusiveScan = 1, + ExclusiveScan = 2, + ClusteredReduce = 3, + PartitionedReduceNV = 6, + PartitionedInclusiveScanNV = 7, + PartitionedExclusiveScanNV = 8, + } + + public enum KernelEnqueueFlags + { + NoWait = 0, + WaitKernel = 1, + WaitWorkGroup = 2, + } + + public enum KernelProfilingInfoShift + { + CmdExecTime = 0, + } + + public enum KernelProfilingInfoMask + { + MaskNone = 0, + CmdExecTime = 0x00000001, + } + + public enum Capability + { + Matrix = 0, + Shader = 1, + Geometry = 2, + Tessellation = 3, + Addresses = 4, + Linkage = 5, + Kernel = 6, + Vector16 = 7, + Float16Buffer = 8, + Float16 = 9, + Float64 = 10, + Int64 = 11, + Int64Atomics = 12, + ImageBasic = 13, + ImageReadWrite = 14, + ImageMipmap = 15, + Pipes = 17, + Groups = 18, + DeviceEnqueue = 19, + LiteralSampler = 20, + AtomicStorage = 21, + Int16 = 22, + TessellationPointSize = 23, + GeometryPointSize = 24, + ImageGatherExtended = 25, + StorageImageMultisample = 27, + UniformBufferArrayDynamicIndexing = 28, + SampledImageArrayDynamicIndexing = 29, + StorageBufferArrayDynamicIndexing = 30, + StorageImageArrayDynamicIndexing = 31, + ClipDistance = 32, + CullDistance = 33, + ImageCubeArray = 34, + SampleRateShading = 35, + ImageRect = 36, + SampledRect = 37, + GenericPointer = 38, + Int8 = 39, + InputAttachment = 40, + SparseResidency = 41, + MinLod = 42, + Sampled1D = 43, + Image1D = 44, + SampledCubeArray = 45, + SampledBuffer = 46, + ImageBuffer = 47, + ImageMSArray = 48, + StorageImageExtendedFormats = 49, + ImageQuery = 50, + DerivativeControl = 51, + InterpolationFunction = 52, + TransformFeedback = 53, + GeometryStreams = 54, + StorageImageReadWithoutFormat = 55, + StorageImageWriteWithoutFormat = 56, + MultiViewport = 57, + SubgroupDispatch = 58, + NamedBarrier = 59, + PipeStorage = 60, + GroupNonUniform = 61, + GroupNonUniformVote = 62, + GroupNonUniformArithmetic = 63, + GroupNonUniformBallot = 64, + GroupNonUniformShuffle = 65, + GroupNonUniformShuffleRelative = 66, + GroupNonUniformClustered = 67, + GroupNonUniformQuad = 68, + ShaderLayer = 69, + ShaderViewportIndex = 70, + FragmentShadingRateKHR = 4422, + SubgroupBallotKHR = 4423, + DrawParameters = 4427, + WorkgroupMemoryExplicitLayoutKHR = 4428, + WorkgroupMemoryExplicitLayout8BitAccessKHR = 4429, + WorkgroupMemoryExplicitLayout16BitAccessKHR = 4430, + SubgroupVoteKHR = 4431, + StorageBuffer16BitAccess = 4433, + StorageUniformBufferBlock16 = 4433, + StorageUniform16 = 4434, + UniformAndStorageBuffer16BitAccess = 4434, + StoragePushConstant16 = 4435, + StorageInputOutput16 = 4436, + DeviceGroup = 4437, + MultiView = 4439, + VariablePointersStorageBuffer = 4441, + VariablePointers = 4442, + AtomicStorageOps = 4445, + SampleMaskPostDepthCoverage = 4447, + StorageBuffer8BitAccess = 4448, + UniformAndStorageBuffer8BitAccess = 4449, + StoragePushConstant8 = 4450, + DenormPreserve = 4464, + DenormFlushToZero = 4465, + SignedZeroInfNanPreserve = 4466, + RoundingModeRTE = 4467, + RoundingModeRTZ = 4468, + RayQueryProvisionalKHR = 4471, + RayQueryKHR = 4472, + RayTraversalPrimitiveCullingKHR = 4478, + RayTracingKHR = 4479, + Float16ImageAMD = 5008, + ImageGatherBiasLodAMD = 5009, + FragmentMaskAMD = 5010, + StencilExportEXT = 5013, + ImageReadWriteLodAMD = 5015, + Int64ImageEXT = 5016, + ShaderClockKHR = 5055, + SampleMaskOverrideCoverageNV = 5249, + GeometryShaderPassthroughNV = 5251, + ShaderViewportIndexLayerEXT = 5254, + ShaderViewportIndexLayerNV = 5254, + ShaderViewportMaskNV = 5255, + ShaderStereoViewNV = 5259, + PerViewAttributesNV = 5260, + FragmentFullyCoveredEXT = 5265, + MeshShadingNV = 5266, + ImageFootprintNV = 5282, + FragmentBarycentricNV = 5284, + ComputeDerivativeGroupQuadsNV = 5288, + FragmentDensityEXT = 5291, + ShadingRateNV = 5291, + GroupNonUniformPartitionedNV = 5297, + ShaderNonUniform = 5301, + ShaderNonUniformEXT = 5301, + RuntimeDescriptorArray = 5302, + RuntimeDescriptorArrayEXT = 5302, + InputAttachmentArrayDynamicIndexing = 5303, + InputAttachmentArrayDynamicIndexingEXT = 5303, + UniformTexelBufferArrayDynamicIndexing = 5304, + UniformTexelBufferArrayDynamicIndexingEXT = 5304, + StorageTexelBufferArrayDynamicIndexing = 5305, + StorageTexelBufferArrayDynamicIndexingEXT = 5305, + UniformBufferArrayNonUniformIndexing = 5306, + UniformBufferArrayNonUniformIndexingEXT = 5306, + SampledImageArrayNonUniformIndexing = 5307, + SampledImageArrayNonUniformIndexingEXT = 5307, + StorageBufferArrayNonUniformIndexing = 5308, + StorageBufferArrayNonUniformIndexingEXT = 5308, + StorageImageArrayNonUniformIndexing = 5309, + StorageImageArrayNonUniformIndexingEXT = 5309, + InputAttachmentArrayNonUniformIndexing = 5310, + InputAttachmentArrayNonUniformIndexingEXT = 5310, + UniformTexelBufferArrayNonUniformIndexing = 5311, + UniformTexelBufferArrayNonUniformIndexingEXT = 5311, + StorageTexelBufferArrayNonUniformIndexing = 5312, + StorageTexelBufferArrayNonUniformIndexingEXT = 5312, + RayTracingNV = 5340, + VulkanMemoryModel = 5345, + VulkanMemoryModelKHR = 5345, + VulkanMemoryModelDeviceScope = 5346, + VulkanMemoryModelDeviceScopeKHR = 5346, + PhysicalStorageBufferAddresses = 5347, + PhysicalStorageBufferAddressesEXT = 5347, + ComputeDerivativeGroupLinearNV = 5350, + RayTracingProvisionalKHR = 5353, + CooperativeMatrixNV = 5357, + FragmentShaderSampleInterlockEXT = 5363, + FragmentShaderShadingRateInterlockEXT = 5372, + ShaderSMBuiltinsNV = 5373, + FragmentShaderPixelInterlockEXT = 5378, + DemoteToHelperInvocationEXT = 5379, + SubgroupShuffleINTEL = 5568, + SubgroupBufferBlockIOINTEL = 5569, + SubgroupImageBlockIOINTEL = 5570, + SubgroupImageMediaBlockIOINTEL = 5579, + RoundToInfinityINTEL = 5582, + FloatingPointModeINTEL = 5583, + IntegerFunctions2INTEL = 5584, + FunctionPointersINTEL = 5603, + IndirectReferencesINTEL = 5604, + AsmINTEL = 5606, + AtomicFloat32MinMaxEXT = 5612, + AtomicFloat64MinMaxEXT = 5613, + AtomicFloat16MinMaxEXT = 5616, + VectorComputeINTEL = 5617, + VectorAnyINTEL = 5619, + SubgroupAvcMotionEstimationINTEL = 5696, + SubgroupAvcMotionEstimationIntraINTEL = 5697, + SubgroupAvcMotionEstimationChromaINTEL = 5698, + VariableLengthArrayINTEL = 5817, + FunctionFloatControlINTEL = 5821, + FPGAMemoryAttributesINTEL = 5824, + FPFastMathModeINTEL = 5837, + ArbitraryPrecisionIntegersINTEL = 5844, + UnstructuredLoopControlsINTEL = 5886, + FPGALoopControlsINTEL = 5888, + KernelAttributesINTEL = 5892, + FPGAKernelAttributesINTEL = 5897, + FPGAMemoryAccessesINTEL = 5898, + FPGAClusterAttributesINTEL = 5904, + LoopFuseINTEL = 5906, + FPGABufferLocationINTEL = 5920, + USMStorageClassesINTEL = 5935, + IOPipesINTEL = 5943, + BlockingPipesINTEL = 5945, + FPGARegINTEL = 5948, + AtomicFloat32AddEXT = 6033, + AtomicFloat64AddEXT = 6034, + LongConstantCompositeINTEL = 6089, + } + + public enum RayFlagsShift + { + OpaqueKHR = 0, + NoOpaqueKHR = 1, + TerminateOnFirstHitKHR = 2, + SkipClosestHitShaderKHR = 3, + CullBackFacingTrianglesKHR = 4, + CullFrontFacingTrianglesKHR = 5, + CullOpaqueKHR = 6, + CullNoOpaqueKHR = 7, + SkipTrianglesKHR = 8, + SkipAABBsKHR = 9, + } + + public enum RayFlagsMask + { + MaskNone = 0, + OpaqueKHR = 0x00000001, + NoOpaqueKHR = 0x00000002, + TerminateOnFirstHitKHR = 0x00000004, + SkipClosestHitShaderKHR = 0x00000008, + CullBackFacingTrianglesKHR = 0x00000010, + CullFrontFacingTrianglesKHR = 0x00000020, + CullOpaqueKHR = 0x00000040, + CullNoOpaqueKHR = 0x00000080, + SkipTrianglesKHR = 0x00000100, + SkipAABBsKHR = 0x00000200, + } + + public enum RayQueryIntersection + { + RayQueryCandidateIntersectionKHR = 0, + RayQueryCommittedIntersectionKHR = 1, + } + + public enum RayQueryCommittedIntersectionType + { + RayQueryCommittedIntersectionNoneKHR = 0, + RayQueryCommittedIntersectionTriangleKHR = 1, + RayQueryCommittedIntersectionGeneratedKHR = 2, + } + + public enum RayQueryCandidateIntersectionType + { + RayQueryCandidateIntersectionTriangleKHR = 0, + RayQueryCandidateIntersectionAABBKHR = 1, + } + + public enum FragmentShadingRateShift + { + Vertical2Pixels = 0, + Vertical4Pixels = 1, + Horizontal2Pixels = 2, + Horizontal4Pixels = 3, + } + + public enum FragmentShadingRateMask + { + MaskNone = 0, + Vertical2Pixels = 0x00000001, + Vertical4Pixels = 0x00000002, + Horizontal2Pixels = 0x00000004, + Horizontal4Pixels = 0x00000008, + } + + public enum FPDenormMode + { + Preserve = 0, + FlushToZero = 1, + } + + public enum FPOperationMode + { + IEEE = 0, + ALT = 1, + } + + public enum Op + { + OpNop = 0, + OpUndef = 1, + OpSourceContinued = 2, + OpSource = 3, + OpSourceExtension = 4, + OpName = 5, + OpMemberName = 6, + OpString = 7, + OpLine = 8, + OpExtension = 10, + OpExtInstImport = 11, + OpExtInst = 12, + OpMemoryModel = 14, + OpEntryPoint = 15, + OpExecutionMode = 16, + OpCapability = 17, + OpTypeVoid = 19, + OpTypeBool = 20, + OpTypeInt = 21, + OpTypeFloat = 22, + OpTypeVector = 23, + OpTypeMatrix = 24, + OpTypeImage = 25, + OpTypeSampler = 26, + OpTypeSampledImage = 27, + OpTypeArray = 28, + OpTypeRuntimeArray = 29, + OpTypeStruct = 30, + OpTypeOpaque = 31, + OpTypePointer = 32, + OpTypeFunction = 33, + OpTypeEvent = 34, + OpTypeDeviceEvent = 35, + OpTypeReserveId = 36, + OpTypeQueue = 37, + OpTypePipe = 38, + OpTypeForwardPointer = 39, + OpConstantTrue = 41, + OpConstantFalse = 42, + OpConstant = 43, + OpConstantComposite = 44, + OpConstantSampler = 45, + OpConstantNull = 46, + OpSpecConstantTrue = 48, + OpSpecConstantFalse = 49, + OpSpecConstant = 50, + OpSpecConstantComposite = 51, + OpSpecConstantOp = 52, + OpFunction = 54, + OpFunctionParameter = 55, + OpFunctionEnd = 56, + OpFunctionCall = 57, + OpVariable = 59, + OpImageTexelPointer = 60, + OpLoad = 61, + OpStore = 62, + OpCopyMemory = 63, + OpCopyMemorySized = 64, + OpAccessChain = 65, + OpInBoundsAccessChain = 66, + OpPtrAccessChain = 67, + OpArrayLength = 68, + OpGenericPtrMemSemantics = 69, + OpInBoundsPtrAccessChain = 70, + OpDecorate = 71, + OpMemberDecorate = 72, + OpDecorationGroup = 73, + OpGroupDecorate = 74, + OpGroupMemberDecorate = 75, + OpVectorExtractDynamic = 77, + OpVectorInsertDynamic = 78, + OpVectorShuffle = 79, + OpCompositeConstruct = 80, + OpCompositeExtract = 81, + OpCompositeInsert = 82, + OpCopyObject = 83, + OpTranspose = 84, + OpSampledImage = 86, + OpImageSampleImplicitLod = 87, + OpImageSampleExplicitLod = 88, + OpImageSampleDrefImplicitLod = 89, + OpImageSampleDrefExplicitLod = 90, + OpImageSampleProjImplicitLod = 91, + OpImageSampleProjExplicitLod = 92, + OpImageSampleProjDrefImplicitLod = 93, + OpImageSampleProjDrefExplicitLod = 94, + OpImageFetch = 95, + OpImageGather = 96, + OpImageDrefGather = 97, + OpImageRead = 98, + OpImageWrite = 99, + OpImage = 100, + OpImageQueryFormat = 101, + OpImageQueryOrder = 102, + OpImageQuerySizeLod = 103, + OpImageQuerySize = 104, + OpImageQueryLod = 105, + OpImageQueryLevels = 106, + OpImageQuerySamples = 107, + OpConvertFToU = 109, + OpConvertFToS = 110, + OpConvertSToF = 111, + OpConvertUToF = 112, + OpUConvert = 113, + OpSConvert = 114, + OpFConvert = 115, + OpQuantizeToF16 = 116, + OpConvertPtrToU = 117, + OpSatConvertSToU = 118, + OpSatConvertUToS = 119, + OpConvertUToPtr = 120, + OpPtrCastToGeneric = 121, + OpGenericCastToPtr = 122, + OpGenericCastToPtrExplicit = 123, + OpBitcast = 124, + OpSNegate = 126, + OpFNegate = 127, + OpIAdd = 128, + OpFAdd = 129, + OpISub = 130, + OpFSub = 131, + OpIMul = 132, + OpFMul = 133, + OpUDiv = 134, + OpSDiv = 135, + OpFDiv = 136, + OpUMod = 137, + OpSRem = 138, + OpSMod = 139, + OpFRem = 140, + OpFMod = 141, + OpVectorTimesScalar = 142, + OpMatrixTimesScalar = 143, + OpVectorTimesMatrix = 144, + OpMatrixTimesVector = 145, + OpMatrixTimesMatrix = 146, + OpOuterProduct = 147, + OpDot = 148, + OpIAddCarry = 149, + OpISubBorrow = 150, + OpUMulExtended = 151, + OpSMulExtended = 152, + OpAny = 154, + OpAll = 155, + OpIsNan = 156, + OpIsInf = 157, + OpIsFinite = 158, + OpIsNormal = 159, + OpSignBitSet = 160, + OpLessOrGreater = 161, + OpOrdered = 162, + OpUnordered = 163, + OpLogicalEqual = 164, + OpLogicalNotEqual = 165, + OpLogicalOr = 166, + OpLogicalAnd = 167, + OpLogicalNot = 168, + OpSelect = 169, + OpIEqual = 170, + OpINotEqual = 171, + OpUGreaterThan = 172, + OpSGreaterThan = 173, + OpUGreaterThanEqual = 174, + OpSGreaterThanEqual = 175, + OpULessThan = 176, + OpSLessThan = 177, + OpULessThanEqual = 178, + OpSLessThanEqual = 179, + OpFOrdEqual = 180, + OpFUnordEqual = 181, + OpFOrdNotEqual = 182, + OpFUnordNotEqual = 183, + OpFOrdLessThan = 184, + OpFUnordLessThan = 185, + OpFOrdGreaterThan = 186, + OpFUnordGreaterThan = 187, + OpFOrdLessThanEqual = 188, + OpFUnordLessThanEqual = 189, + OpFOrdGreaterThanEqual = 190, + OpFUnordGreaterThanEqual = 191, + OpShiftRightLogical = 194, + OpShiftRightArithmetic = 195, + OpShiftLeftLogical = 196, + OpBitwiseOr = 197, + OpBitwiseXor = 198, + OpBitwiseAnd = 199, + OpNot = 200, + OpBitFieldInsert = 201, + OpBitFieldSExtract = 202, + OpBitFieldUExtract = 203, + OpBitReverse = 204, + OpBitCount = 205, + OpDPdx = 207, + OpDPdy = 208, + OpFwidth = 209, + OpDPdxFine = 210, + OpDPdyFine = 211, + OpFwidthFine = 212, + OpDPdxCoarse = 213, + OpDPdyCoarse = 214, + OpFwidthCoarse = 215, + OpEmitVertex = 218, + OpEndPrimitive = 219, + OpEmitStreamVertex = 220, + OpEndStreamPrimitive = 221, + OpControlBarrier = 224, + OpMemoryBarrier = 225, + OpAtomicLoad = 227, + OpAtomicStore = 228, + OpAtomicExchange = 229, + OpAtomicCompareExchange = 230, + OpAtomicCompareExchangeWeak = 231, + OpAtomicIIncrement = 232, + OpAtomicIDecrement = 233, + OpAtomicIAdd = 234, + OpAtomicISub = 235, + OpAtomicSMin = 236, + OpAtomicUMin = 237, + OpAtomicSMax = 238, + OpAtomicUMax = 239, + OpAtomicAnd = 240, + OpAtomicOr = 241, + OpAtomicXor = 242, + OpPhi = 245, + OpLoopMerge = 246, + OpSelectionMerge = 247, + OpLabel = 248, + OpBranch = 249, + OpBranchConditional = 250, + OpSwitch = 251, + OpKill = 252, + OpReturn = 253, + OpReturnValue = 254, + OpUnreachable = 255, + OpLifetimeStart = 256, + OpLifetimeStop = 257, + OpGroupAsyncCopy = 259, + OpGroupWaitEvents = 260, + OpGroupAll = 261, + OpGroupAny = 262, + OpGroupBroadcast = 263, + OpGroupIAdd = 264, + OpGroupFAdd = 265, + OpGroupFMin = 266, + OpGroupUMin = 267, + OpGroupSMin = 268, + OpGroupFMax = 269, + OpGroupUMax = 270, + OpGroupSMax = 271, + OpReadPipe = 274, + OpWritePipe = 275, + OpReservedReadPipe = 276, + OpReservedWritePipe = 277, + OpReserveReadPipePackets = 278, + OpReserveWritePipePackets = 279, + OpCommitReadPipe = 280, + OpCommitWritePipe = 281, + OpIsValidReserveId = 282, + OpGetNumPipePackets = 283, + OpGetMaxPipePackets = 284, + OpGroupReserveReadPipePackets = 285, + OpGroupReserveWritePipePackets = 286, + OpGroupCommitReadPipe = 287, + OpGroupCommitWritePipe = 288, + OpEnqueueMarker = 291, + OpEnqueueKernel = 292, + OpGetKernelNDrangeSubGroupCount = 293, + OpGetKernelNDrangeMaxSubGroupSize = 294, + OpGetKernelWorkGroupSize = 295, + OpGetKernelPreferredWorkGroupSizeMultiple = 296, + OpRetainEvent = 297, + OpReleaseEvent = 298, + OpCreateUserEvent = 299, + OpIsValidEvent = 300, + OpSetUserEventStatus = 301, + OpCaptureEventProfilingInfo = 302, + OpGetDefaultQueue = 303, + OpBuildNDRange = 304, + OpImageSparseSampleImplicitLod = 305, + OpImageSparseSampleExplicitLod = 306, + OpImageSparseSampleDrefImplicitLod = 307, + OpImageSparseSampleDrefExplicitLod = 308, + OpImageSparseSampleProjImplicitLod = 309, + OpImageSparseSampleProjExplicitLod = 310, + OpImageSparseSampleProjDrefImplicitLod = 311, + OpImageSparseSampleProjDrefExplicitLod = 312, + OpImageSparseFetch = 313, + OpImageSparseGather = 314, + OpImageSparseDrefGather = 315, + OpImageSparseTexelsResident = 316, + OpNoLine = 317, + OpAtomicFlagTestAndSet = 318, + OpAtomicFlagClear = 319, + OpImageSparseRead = 320, + OpSizeOf = 321, + OpTypePipeStorage = 322, + OpConstantPipeStorage = 323, + OpCreatePipeFromPipeStorage = 324, + OpGetKernelLocalSizeForSubgroupCount = 325, + OpGetKernelMaxNumSubgroups = 326, + OpTypeNamedBarrier = 327, + OpNamedBarrierInitialize = 328, + OpMemoryNamedBarrier = 329, + OpModuleProcessed = 330, + OpExecutionModeId = 331, + OpDecorateId = 332, + OpGroupNonUniformElect = 333, + OpGroupNonUniformAll = 334, + OpGroupNonUniformAny = 335, + OpGroupNonUniformAllEqual = 336, + OpGroupNonUniformBroadcast = 337, + OpGroupNonUniformBroadcastFirst = 338, + OpGroupNonUniformBallot = 339, + OpGroupNonUniformInverseBallot = 340, + OpGroupNonUniformBallotBitExtract = 341, + OpGroupNonUniformBallotBitCount = 342, + OpGroupNonUniformBallotFindLSB = 343, + OpGroupNonUniformBallotFindMSB = 344, + OpGroupNonUniformShuffle = 345, + OpGroupNonUniformShuffleXor = 346, + OpGroupNonUniformShuffleUp = 347, + OpGroupNonUniformShuffleDown = 348, + OpGroupNonUniformIAdd = 349, + OpGroupNonUniformFAdd = 350, + OpGroupNonUniformIMul = 351, + OpGroupNonUniformFMul = 352, + OpGroupNonUniformSMin = 353, + OpGroupNonUniformUMin = 354, + OpGroupNonUniformFMin = 355, + OpGroupNonUniformSMax = 356, + OpGroupNonUniformUMax = 357, + OpGroupNonUniformFMax = 358, + OpGroupNonUniformBitwiseAnd = 359, + OpGroupNonUniformBitwiseOr = 360, + OpGroupNonUniformBitwiseXor = 361, + OpGroupNonUniformLogicalAnd = 362, + OpGroupNonUniformLogicalOr = 363, + OpGroupNonUniformLogicalXor = 364, + OpGroupNonUniformQuadBroadcast = 365, + OpGroupNonUniformQuadSwap = 366, + OpCopyLogical = 400, + OpPtrEqual = 401, + OpPtrNotEqual = 402, + OpPtrDiff = 403, + OpTerminateInvocation = 4416, + OpSubgroupBallotKHR = 4421, + OpSubgroupFirstInvocationKHR = 4422, + OpSubgroupAllKHR = 4428, + OpSubgroupAnyKHR = 4429, + OpSubgroupAllEqualKHR = 4430, + OpSubgroupReadInvocationKHR = 4432, + OpTraceRayKHR = 4445, + OpExecuteCallableKHR = 4446, + OpConvertUToAccelerationStructureKHR = 4447, + OpIgnoreIntersectionKHR = 4448, + OpTerminateRayKHR = 4449, + OpTypeRayQueryKHR = 4472, + OpRayQueryInitializeKHR = 4473, + OpRayQueryTerminateKHR = 4474, + OpRayQueryGenerateIntersectionKHR = 4475, + OpRayQueryConfirmIntersectionKHR = 4476, + OpRayQueryProceedKHR = 4477, + OpRayQueryGetIntersectionTypeKHR = 4479, + OpGroupIAddNonUniformAMD = 5000, + OpGroupFAddNonUniformAMD = 5001, + OpGroupFMinNonUniformAMD = 5002, + OpGroupUMinNonUniformAMD = 5003, + OpGroupSMinNonUniformAMD = 5004, + OpGroupFMaxNonUniformAMD = 5005, + OpGroupUMaxNonUniformAMD = 5006, + OpGroupSMaxNonUniformAMD = 5007, + OpFragmentMaskFetchAMD = 5011, + OpFragmentFetchAMD = 5012, + OpReadClockKHR = 5056, + OpImageSampleFootprintNV = 5283, + OpGroupNonUniformPartitionNV = 5296, + OpWritePackedPrimitiveIndices4x8NV = 5299, + OpReportIntersectionKHR = 5334, + OpReportIntersectionNV = 5334, + OpIgnoreIntersectionNV = 5335, + OpTerminateRayNV = 5336, + OpTraceNV = 5337, + OpTypeAccelerationStructureKHR = 5341, + OpTypeAccelerationStructureNV = 5341, + OpExecuteCallableNV = 5344, + OpTypeCooperativeMatrixNV = 5358, + OpCooperativeMatrixLoadNV = 5359, + OpCooperativeMatrixStoreNV = 5360, + OpCooperativeMatrixMulAddNV = 5361, + OpCooperativeMatrixLengthNV = 5362, + OpBeginInvocationInterlockEXT = 5364, + OpEndInvocationInterlockEXT = 5365, + OpDemoteToHelperInvocationEXT = 5380, + OpIsHelperInvocationEXT = 5381, + OpSubgroupShuffleINTEL = 5571, + OpSubgroupShuffleDownINTEL = 5572, + OpSubgroupShuffleUpINTEL = 5573, + OpSubgroupShuffleXorINTEL = 5574, + OpSubgroupBlockReadINTEL = 5575, + OpSubgroupBlockWriteINTEL = 5576, + OpSubgroupImageBlockReadINTEL = 5577, + OpSubgroupImageBlockWriteINTEL = 5578, + OpSubgroupImageMediaBlockReadINTEL = 5580, + OpSubgroupImageMediaBlockWriteINTEL = 5581, + OpUCountLeadingZerosINTEL = 5585, + OpUCountTrailingZerosINTEL = 5586, + OpAbsISubINTEL = 5587, + OpAbsUSubINTEL = 5588, + OpIAddSatINTEL = 5589, + OpUAddSatINTEL = 5590, + OpIAverageINTEL = 5591, + OpUAverageINTEL = 5592, + OpIAverageRoundedINTEL = 5593, + OpUAverageRoundedINTEL = 5594, + OpISubSatINTEL = 5595, + OpUSubSatINTEL = 5596, + OpIMul32x16INTEL = 5597, + OpUMul32x16INTEL = 5598, + OpConstFunctionPointerINTEL = 5600, + OpFunctionPointerCallINTEL = 5601, + OpAsmTargetINTEL = 5609, + OpAsmINTEL = 5610, + OpAsmCallINTEL = 5611, + OpAtomicFMinEXT = 5614, + OpAtomicFMaxEXT = 5615, + OpDecorateString = 5632, + OpDecorateStringGOOGLE = 5632, + OpMemberDecorateString = 5633, + OpMemberDecorateStringGOOGLE = 5633, + OpVmeImageINTEL = 5699, + OpTypeVmeImageINTEL = 5700, + OpTypeAvcImePayloadINTEL = 5701, + OpTypeAvcRefPayloadINTEL = 5702, + OpTypeAvcSicPayloadINTEL = 5703, + OpTypeAvcMcePayloadINTEL = 5704, + OpTypeAvcMceResultINTEL = 5705, + OpTypeAvcImeResultINTEL = 5706, + OpTypeAvcImeResultSingleReferenceStreamoutINTEL = 5707, + OpTypeAvcImeResultDualReferenceStreamoutINTEL = 5708, + OpTypeAvcImeSingleReferenceStreaminINTEL = 5709, + OpTypeAvcImeDualReferenceStreaminINTEL = 5710, + OpTypeAvcRefResultINTEL = 5711, + OpTypeAvcSicResultINTEL = 5712, + OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL = 5713, + OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL = 5714, + OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL = 5715, + OpSubgroupAvcMceSetInterShapePenaltyINTEL = 5716, + OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL = 5717, + OpSubgroupAvcMceSetInterDirectionPenaltyINTEL = 5718, + OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL = 5719, + OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL = 5720, + OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL = 5721, + OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL = 5722, + OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL = 5723, + OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL = 5724, + OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL = 5725, + OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL = 5726, + OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL = 5727, + OpSubgroupAvcMceSetAcOnlyHaarINTEL = 5728, + OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL = 5729, + OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL = 5730, + OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL = 5731, + OpSubgroupAvcMceConvertToImePayloadINTEL = 5732, + OpSubgroupAvcMceConvertToImeResultINTEL = 5733, + OpSubgroupAvcMceConvertToRefPayloadINTEL = 5734, + OpSubgroupAvcMceConvertToRefResultINTEL = 5735, + OpSubgroupAvcMceConvertToSicPayloadINTEL = 5736, + OpSubgroupAvcMceConvertToSicResultINTEL = 5737, + OpSubgroupAvcMceGetMotionVectorsINTEL = 5738, + OpSubgroupAvcMceGetInterDistortionsINTEL = 5739, + OpSubgroupAvcMceGetBestInterDistortionsINTEL = 5740, + OpSubgroupAvcMceGetInterMajorShapeINTEL = 5741, + OpSubgroupAvcMceGetInterMinorShapeINTEL = 5742, + OpSubgroupAvcMceGetInterDirectionsINTEL = 5743, + OpSubgroupAvcMceGetInterMotionVectorCountINTEL = 5744, + OpSubgroupAvcMceGetInterReferenceIdsINTEL = 5745, + OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL = 5746, + OpSubgroupAvcImeInitializeINTEL = 5747, + OpSubgroupAvcImeSetSingleReferenceINTEL = 5748, + OpSubgroupAvcImeSetDualReferenceINTEL = 5749, + OpSubgroupAvcImeRefWindowSizeINTEL = 5750, + OpSubgroupAvcImeAdjustRefOffsetINTEL = 5751, + OpSubgroupAvcImeConvertToMcePayloadINTEL = 5752, + OpSubgroupAvcImeSetMaxMotionVectorCountINTEL = 5753, + OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL = 5754, + OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL = 5755, + OpSubgroupAvcImeSetWeightedSadINTEL = 5756, + OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL = 5757, + OpSubgroupAvcImeEvaluateWithDualReferenceINTEL = 5758, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL = 5759, + OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL = 5760, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL = 5761, + OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL = 5762, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL = 5763, + OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL = 5764, + OpSubgroupAvcImeConvertToMceResultINTEL = 5765, + OpSubgroupAvcImeGetSingleReferenceStreaminINTEL = 5766, + OpSubgroupAvcImeGetDualReferenceStreaminINTEL = 5767, + OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL = 5768, + OpSubgroupAvcImeStripDualReferenceStreamoutINTEL = 5769, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL = 5770, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL = 5771, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL = 5772, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL = 5773, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL = 5774, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL = 5775, + OpSubgroupAvcImeGetBorderReachedINTEL = 5776, + OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL = 5777, + OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL = 5778, + OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL = 5779, + OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL = 5780, + OpSubgroupAvcFmeInitializeINTEL = 5781, + OpSubgroupAvcBmeInitializeINTEL = 5782, + OpSubgroupAvcRefConvertToMcePayloadINTEL = 5783, + OpSubgroupAvcRefSetBidirectionalMixDisableINTEL = 5784, + OpSubgroupAvcRefSetBilinearFilterEnableINTEL = 5785, + OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL = 5786, + OpSubgroupAvcRefEvaluateWithDualReferenceINTEL = 5787, + OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL = 5788, + OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL = 5789, + OpSubgroupAvcRefConvertToMceResultINTEL = 5790, + OpSubgroupAvcSicInitializeINTEL = 5791, + OpSubgroupAvcSicConfigureSkcINTEL = 5792, + OpSubgroupAvcSicConfigureIpeLumaINTEL = 5793, + OpSubgroupAvcSicConfigureIpeLumaChromaINTEL = 5794, + OpSubgroupAvcSicGetMotionVectorMaskINTEL = 5795, + OpSubgroupAvcSicConvertToMcePayloadINTEL = 5796, + OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL = 5797, + OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL = 5798, + OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL = 5799, + OpSubgroupAvcSicSetBilinearFilterEnableINTEL = 5800, + OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL = 5801, + OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL = 5802, + OpSubgroupAvcSicEvaluateIpeINTEL = 5803, + OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL = 5804, + OpSubgroupAvcSicEvaluateWithDualReferenceINTEL = 5805, + OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL = 5806, + OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL = 5807, + OpSubgroupAvcSicConvertToMceResultINTEL = 5808, + OpSubgroupAvcSicGetIpeLumaShapeINTEL = 5809, + OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL = 5810, + OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL = 5811, + OpSubgroupAvcSicGetPackedIpeLumaModesINTEL = 5812, + OpSubgroupAvcSicGetIpeChromaModeINTEL = 5813, + OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL = 5814, + OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL = 5815, + OpSubgroupAvcSicGetInterRawSadsINTEL = 5816, + OpVariableLengthArrayINTEL = 5818, + OpSaveMemoryINTEL = 5819, + OpRestoreMemoryINTEL = 5820, + OpLoopControlINTEL = 5887, + OpPtrCastToCrossWorkgroupINTEL = 5934, + OpCrossWorkgroupCastToPtrINTEL = 5938, + OpReadPipeBlockingINTEL = 5946, + OpWritePipeBlockingINTEL = 5947, + OpFPGARegINTEL = 5949, + OpRayQueryGetRayTMinKHR = 6016, + OpRayQueryGetRayFlagsKHR = 6017, + OpRayQueryGetIntersectionTKHR = 6018, + OpRayQueryGetIntersectionInstanceCustomIndexKHR = 6019, + OpRayQueryGetIntersectionInstanceIdKHR = 6020, + OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR = 6021, + OpRayQueryGetIntersectionGeometryIndexKHR = 6022, + OpRayQueryGetIntersectionPrimitiveIndexKHR = 6023, + OpRayQueryGetIntersectionBarycentricsKHR = 6024, + OpRayQueryGetIntersectionFrontFaceKHR = 6025, + OpRayQueryGetIntersectionCandidateAABBOpaqueKHR = 6026, + OpRayQueryGetIntersectionObjectRayDirectionKHR = 6027, + OpRayQueryGetIntersectionObjectRayOriginKHR = 6028, + OpRayQueryGetWorldRayDirectionKHR = 6029, + OpRayQueryGetWorldRayOriginKHR = 6030, + OpRayQueryGetIntersectionObjectToWorldKHR = 6031, + OpRayQueryGetIntersectionWorldToObjectKHR = 6032, + OpAtomicFAddEXT = 6035, + OpTypeBufferSurfaceINTEL = 6086, + OpTypeStructContinuedINTEL = 6090, + OpConstantCompositeContinuedINTEL = 6091, + OpSpecConstantCompositeContinuedINTEL = 6092, + } + } +} + diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.h b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.h index 4c90c936c..fbdc76330 100755 --- a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.h +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.h @@ -1,5 +1,5 @@ /* -** Copyright (c) 2014-2018 The Khronos Group Inc. +** Copyright (c) 2014-2020 The Khronos Group Inc. ** ** Permission is hereby granted, free of charge, to any person obtaining a copy ** of this software and/or associated documentation files (the "Materials"), @@ -31,13 +31,16 @@ /* ** Enumeration tokens for SPIR-V, in various styles: -** C, C++, C++11, JSON, Lua, Python +** C, C++, C++11, JSON, Lua, Python, C#, D ** ** - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL ** - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL ** - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL ** - Lua will use tables, e.g.: spv.SourceLanguage.GLSL ** - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +** - C# will use enum classes in the Specification class located in the "Spv" namespace, +** e.g.: Spv.Specification.SourceLanguage.GLSL +** - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL ** ** Some tokens act like mask values, which can be OR'd together, ** while others are mutually exclusive. The mask-like ones have @@ -50,12 +53,12 @@ typedef unsigned int SpvId; -#define SPV_VERSION 0x10300 -#define SPV_REVISION 1 +#define SPV_VERSION 0x10500 +#define SPV_REVISION 4 static const unsigned int SpvMagicNumber = 0x07230203; -static const unsigned int SpvVersion = 0x00010300; -static const unsigned int SpvRevision = 1; +static const unsigned int SpvVersion = 0x00010500; +static const unsigned int SpvRevision = 4; static const unsigned int SpvOpCodeMask = 0xffff; static const unsigned int SpvWordCountShift = 16; @@ -77,6 +80,20 @@ typedef enum SpvExecutionModel_ { SpvExecutionModelFragment = 4, SpvExecutionModelGLCompute = 5, SpvExecutionModelKernel = 6, + SpvExecutionModelTaskNV = 5267, + SpvExecutionModelMeshNV = 5268, + SpvExecutionModelRayGenerationKHR = 5313, + SpvExecutionModelRayGenerationNV = 5313, + SpvExecutionModelIntersectionKHR = 5314, + SpvExecutionModelIntersectionNV = 5314, + SpvExecutionModelAnyHitKHR = 5315, + SpvExecutionModelAnyHitNV = 5315, + SpvExecutionModelClosestHitKHR = 5316, + SpvExecutionModelClosestHitNV = 5316, + SpvExecutionModelMissKHR = 5317, + SpvExecutionModelMissNV = 5317, + SpvExecutionModelCallableKHR = 5318, + SpvExecutionModelCallableNV = 5318, SpvExecutionModelMax = 0x7fffffff, } SpvExecutionModel; @@ -84,6 +101,8 @@ typedef enum SpvAddressingModel_ { SpvAddressingModelLogical = 0, SpvAddressingModelPhysical32 = 1, SpvAddressingModelPhysical64 = 2, + SpvAddressingModelPhysicalStorageBuffer64 = 5348, + SpvAddressingModelPhysicalStorageBuffer64EXT = 5348, SpvAddressingModelMax = 0x7fffffff, } SpvAddressingModel; @@ -91,6 +110,8 @@ typedef enum SpvMemoryModel_ { SpvMemoryModelSimple = 0, SpvMemoryModelGLSL450 = 1, SpvMemoryModelOpenCL = 2, + SpvMemoryModelVulkan = 3, + SpvMemoryModelVulkanKHR = 3, SpvMemoryModelMax = 0x7fffffff, } SpvMemoryModel; @@ -134,7 +155,33 @@ typedef enum SpvExecutionMode_ { SpvExecutionModeLocalSizeId = 38, SpvExecutionModeLocalSizeHintId = 39, SpvExecutionModePostDepthCoverage = 4446, + SpvExecutionModeDenormPreserve = 4459, + SpvExecutionModeDenormFlushToZero = 4460, + SpvExecutionModeSignedZeroInfNanPreserve = 4461, + SpvExecutionModeRoundingModeRTE = 4462, + SpvExecutionModeRoundingModeRTZ = 4463, SpvExecutionModeStencilRefReplacingEXT = 5027, + SpvExecutionModeOutputLinesNV = 5269, + SpvExecutionModeOutputPrimitivesNV = 5270, + SpvExecutionModeDerivativeGroupQuadsNV = 5289, + SpvExecutionModeDerivativeGroupLinearNV = 5290, + SpvExecutionModeOutputTrianglesNV = 5298, + SpvExecutionModePixelInterlockOrderedEXT = 5366, + SpvExecutionModePixelInterlockUnorderedEXT = 5367, + SpvExecutionModeSampleInterlockOrderedEXT = 5368, + SpvExecutionModeSampleInterlockUnorderedEXT = 5369, + SpvExecutionModeShadingRateInterlockOrderedEXT = 5370, + SpvExecutionModeShadingRateInterlockUnorderedEXT = 5371, + SpvExecutionModeSharedLocalMemorySizeINTEL = 5618, + SpvExecutionModeRoundingModeRTPINTEL = 5620, + SpvExecutionModeRoundingModeRTNINTEL = 5621, + SpvExecutionModeFloatingPointModeALTINTEL = 5622, + SpvExecutionModeFloatingPointModeIEEEINTEL = 5623, + SpvExecutionModeMaxWorkgroupSizeINTEL = 5893, + SpvExecutionModeMaxWorkDimINTEL = 5894, + SpvExecutionModeNoGlobalOffsetINTEL = 5895, + SpvExecutionModeNumSIMDWorkitemsINTEL = 5896, + SpvExecutionModeSchedulerTargetFmaxMhzINTEL = 5903, SpvExecutionModeMax = 0x7fffffff, } SpvExecutionMode; @@ -152,6 +199,23 @@ typedef enum SpvStorageClass_ { SpvStorageClassAtomicCounter = 10, SpvStorageClassImage = 11, SpvStorageClassStorageBuffer = 12, + SpvStorageClassCallableDataKHR = 5328, + SpvStorageClassCallableDataNV = 5328, + SpvStorageClassIncomingCallableDataKHR = 5329, + SpvStorageClassIncomingCallableDataNV = 5329, + SpvStorageClassRayPayloadKHR = 5338, + SpvStorageClassRayPayloadNV = 5338, + SpvStorageClassHitAttributeKHR = 5339, + SpvStorageClassHitAttributeNV = 5339, + SpvStorageClassIncomingRayPayloadKHR = 5342, + SpvStorageClassIncomingRayPayloadNV = 5342, + SpvStorageClassShaderRecordBufferKHR = 5343, + SpvStorageClassShaderRecordBufferNV = 5343, + SpvStorageClassPhysicalStorageBuffer = 5349, + SpvStorageClassPhysicalStorageBufferEXT = 5349, + SpvStorageClassCodeSectionINTEL = 5605, + SpvStorageClassDeviceOnlyINTEL = 5936, + SpvStorageClassHostOnlyINTEL = 5937, SpvStorageClassMax = 0x7fffffff, } SpvStorageClass; @@ -222,6 +286,8 @@ typedef enum SpvImageFormat_ { SpvImageFormatRg8ui = 37, SpvImageFormatR16ui = 38, SpvImageFormatR8ui = 39, + SpvImageFormatR64ui = 40, + SpvImageFormatR64i = 41, SpvImageFormatMax = 0x7fffffff, } SpvImageFormat; @@ -279,6 +345,16 @@ typedef enum SpvImageOperandsShift_ { SpvImageOperandsConstOffsetsShift = 5, SpvImageOperandsSampleShift = 6, SpvImageOperandsMinLodShift = 7, + SpvImageOperandsMakeTexelAvailableShift = 8, + SpvImageOperandsMakeTexelAvailableKHRShift = 8, + SpvImageOperandsMakeTexelVisibleShift = 9, + SpvImageOperandsMakeTexelVisibleKHRShift = 9, + SpvImageOperandsNonPrivateTexelShift = 10, + SpvImageOperandsNonPrivateTexelKHRShift = 10, + SpvImageOperandsVolatileTexelShift = 11, + SpvImageOperandsVolatileTexelKHRShift = 11, + SpvImageOperandsSignExtendShift = 12, + SpvImageOperandsZeroExtendShift = 13, SpvImageOperandsMax = 0x7fffffff, } SpvImageOperandsShift; @@ -292,6 +368,16 @@ typedef enum SpvImageOperandsMask_ { SpvImageOperandsConstOffsetsMask = 0x00000020, SpvImageOperandsSampleMask = 0x00000040, SpvImageOperandsMinLodMask = 0x00000080, + SpvImageOperandsMakeTexelAvailableMask = 0x00000100, + SpvImageOperandsMakeTexelAvailableKHRMask = 0x00000100, + SpvImageOperandsMakeTexelVisibleMask = 0x00000200, + SpvImageOperandsMakeTexelVisibleKHRMask = 0x00000200, + SpvImageOperandsNonPrivateTexelMask = 0x00000400, + SpvImageOperandsNonPrivateTexelKHRMask = 0x00000400, + SpvImageOperandsVolatileTexelMask = 0x00000800, + SpvImageOperandsVolatileTexelKHRMask = 0x00000800, + SpvImageOperandsSignExtendMask = 0x00001000, + SpvImageOperandsZeroExtendMask = 0x00002000, } SpvImageOperandsMask; typedef enum SpvFPFastMathModeShift_ { @@ -300,6 +386,8 @@ typedef enum SpvFPFastMathModeShift_ { SpvFPFastMathModeNSZShift = 2, SpvFPFastMathModeAllowRecipShift = 3, SpvFPFastMathModeFastShift = 4, + SpvFPFastMathModeAllowContractFastINTELShift = 16, + SpvFPFastMathModeAllowReassocINTELShift = 17, SpvFPFastMathModeMax = 0x7fffffff, } SpvFPFastMathModeShift; @@ -310,6 +398,8 @@ typedef enum SpvFPFastMathModeMask_ { SpvFPFastMathModeNSZMask = 0x00000004, SpvFPFastMathModeAllowRecipMask = 0x00000008, SpvFPFastMathModeFastMask = 0x00000010, + SpvFPFastMathModeAllowContractFastINTELMask = 0x00010000, + SpvFPFastMathModeAllowReassocINTELMask = 0x00020000, } SpvFPFastMathModeMask; typedef enum SpvFPRoundingMode_ { @@ -372,6 +462,7 @@ typedef enum SpvDecoration_ { SpvDecorationNonWritable = 24, SpvDecorationNonReadable = 25, SpvDecorationUniform = 26, + SpvDecorationUniformId = 27, SpvDecorationSaturatedConversion = 28, SpvDecorationStream = 29, SpvDecorationLocation = 30, @@ -392,14 +483,62 @@ typedef enum SpvDecoration_ { SpvDecorationMaxByteOffset = 45, SpvDecorationAlignmentId = 46, SpvDecorationMaxByteOffsetId = 47, + SpvDecorationNoSignedWrap = 4469, + SpvDecorationNoUnsignedWrap = 4470, SpvDecorationExplicitInterpAMD = 4999, SpvDecorationOverrideCoverageNV = 5248, SpvDecorationPassthroughNV = 5250, SpvDecorationViewportRelativeNV = 5252, SpvDecorationSecondaryViewportRelativeNV = 5256, + SpvDecorationPerPrimitiveNV = 5271, + SpvDecorationPerViewNV = 5272, + SpvDecorationPerTaskNV = 5273, + SpvDecorationPerVertexNV = 5285, + SpvDecorationNonUniform = 5300, SpvDecorationNonUniformEXT = 5300, + SpvDecorationRestrictPointer = 5355, + SpvDecorationRestrictPointerEXT = 5355, + SpvDecorationAliasedPointer = 5356, + SpvDecorationAliasedPointerEXT = 5356, + SpvDecorationSIMTCallINTEL = 5599, + SpvDecorationReferencedIndirectlyINTEL = 5602, + SpvDecorationClobberINTEL = 5607, + SpvDecorationSideEffectsINTEL = 5608, + SpvDecorationVectorComputeVariableINTEL = 5624, + SpvDecorationFuncParamIOKindINTEL = 5625, + SpvDecorationVectorComputeFunctionINTEL = 5626, + SpvDecorationStackCallINTEL = 5627, + SpvDecorationGlobalVariableOffsetINTEL = 5628, + SpvDecorationCounterBuffer = 5634, SpvDecorationHlslCounterBufferGOOGLE = 5634, SpvDecorationHlslSemanticGOOGLE = 5635, + SpvDecorationUserSemantic = 5635, + SpvDecorationUserTypeGOOGLE = 5636, + SpvDecorationFunctionRoundingModeINTEL = 5822, + SpvDecorationFunctionDenormModeINTEL = 5823, + SpvDecorationRegisterINTEL = 5825, + SpvDecorationMemoryINTEL = 5826, + SpvDecorationNumbanksINTEL = 5827, + SpvDecorationBankwidthINTEL = 5828, + SpvDecorationMaxPrivateCopiesINTEL = 5829, + SpvDecorationSinglepumpINTEL = 5830, + SpvDecorationDoublepumpINTEL = 5831, + SpvDecorationMaxReplicatesINTEL = 5832, + SpvDecorationSimpleDualPortINTEL = 5833, + SpvDecorationMergeINTEL = 5834, + SpvDecorationBankBitsINTEL = 5835, + SpvDecorationForcePow2DepthINTEL = 5836, + SpvDecorationBurstCoalesceINTEL = 5899, + SpvDecorationCacheSizeINTEL = 5900, + SpvDecorationDontStaticallyCoalesceINTEL = 5901, + SpvDecorationPrefetchINTEL = 5902, + SpvDecorationStallEnableINTEL = 5905, + SpvDecorationFuseLoopsInFunctionINTEL = 5907, + SpvDecorationBufferLocationINTEL = 5921, + SpvDecorationIOPipeStorageINTEL = 5944, + SpvDecorationFunctionFloatingPointModeINTEL = 6080, + SpvDecorationSingleElementVectorINTEL = 6085, + SpvDecorationVectorComputeCallableFunctionINTEL = 6087, SpvDecorationMax = 0x7fffffff, } SpvDecoration; @@ -458,8 +597,10 @@ typedef enum SpvBuiltIn_ { SpvBuiltInBaseVertex = 4424, SpvBuiltInBaseInstance = 4425, SpvBuiltInDrawIndex = 4426, + SpvBuiltInPrimitiveShadingRateKHR = 4432, SpvBuiltInDeviceIndex = 4438, SpvBuiltInViewIndex = 4440, + SpvBuiltInShadingRateKHR = 4444, SpvBuiltInBaryCoordNoPerspAMD = 4992, SpvBuiltInBaryCoordNoPerspCentroidAMD = 4993, SpvBuiltInBaryCoordNoPerspSampleAMD = 4994, @@ -474,6 +615,52 @@ typedef enum SpvBuiltIn_ { SpvBuiltInPositionPerViewNV = 5261, SpvBuiltInViewportMaskPerViewNV = 5262, SpvBuiltInFullyCoveredEXT = 5264, + SpvBuiltInTaskCountNV = 5274, + SpvBuiltInPrimitiveCountNV = 5275, + SpvBuiltInPrimitiveIndicesNV = 5276, + SpvBuiltInClipDistancePerViewNV = 5277, + SpvBuiltInCullDistancePerViewNV = 5278, + SpvBuiltInLayerPerViewNV = 5279, + SpvBuiltInMeshViewCountNV = 5280, + SpvBuiltInMeshViewIndicesNV = 5281, + SpvBuiltInBaryCoordNV = 5286, + SpvBuiltInBaryCoordNoPerspNV = 5287, + SpvBuiltInFragSizeEXT = 5292, + SpvBuiltInFragmentSizeNV = 5292, + SpvBuiltInFragInvocationCountEXT = 5293, + SpvBuiltInInvocationsPerPixelNV = 5293, + SpvBuiltInLaunchIdKHR = 5319, + SpvBuiltInLaunchIdNV = 5319, + SpvBuiltInLaunchSizeKHR = 5320, + SpvBuiltInLaunchSizeNV = 5320, + SpvBuiltInWorldRayOriginKHR = 5321, + SpvBuiltInWorldRayOriginNV = 5321, + SpvBuiltInWorldRayDirectionKHR = 5322, + SpvBuiltInWorldRayDirectionNV = 5322, + SpvBuiltInObjectRayOriginKHR = 5323, + SpvBuiltInObjectRayOriginNV = 5323, + SpvBuiltInObjectRayDirectionKHR = 5324, + SpvBuiltInObjectRayDirectionNV = 5324, + SpvBuiltInRayTminKHR = 5325, + SpvBuiltInRayTminNV = 5325, + SpvBuiltInRayTmaxKHR = 5326, + SpvBuiltInRayTmaxNV = 5326, + SpvBuiltInInstanceCustomIndexKHR = 5327, + SpvBuiltInInstanceCustomIndexNV = 5327, + SpvBuiltInObjectToWorldKHR = 5330, + SpvBuiltInObjectToWorldNV = 5330, + SpvBuiltInWorldToObjectKHR = 5331, + SpvBuiltInWorldToObjectNV = 5331, + SpvBuiltInHitTNV = 5332, + SpvBuiltInHitKindKHR = 5333, + SpvBuiltInHitKindNV = 5333, + SpvBuiltInIncomingRayFlagsKHR = 5351, + SpvBuiltInIncomingRayFlagsNV = 5351, + SpvBuiltInRayGeometryIndexKHR = 5352, + SpvBuiltInWarpsPerSMNV = 5374, + SpvBuiltInSMCountNV = 5375, + SpvBuiltInWarpIDNV = 5376, + SpvBuiltInSMIDNV = 5377, SpvBuiltInMax = 0x7fffffff, } SpvBuiltIn; @@ -494,6 +681,19 @@ typedef enum SpvLoopControlShift_ { SpvLoopControlDontUnrollShift = 1, SpvLoopControlDependencyInfiniteShift = 2, SpvLoopControlDependencyLengthShift = 3, + SpvLoopControlMinIterationsShift = 4, + SpvLoopControlMaxIterationsShift = 5, + SpvLoopControlIterationMultipleShift = 6, + SpvLoopControlPeelCountShift = 7, + SpvLoopControlPartialCountShift = 8, + SpvLoopControlInitiationIntervalINTELShift = 16, + SpvLoopControlMaxConcurrencyINTELShift = 17, + SpvLoopControlDependencyArrayINTELShift = 18, + SpvLoopControlPipelineEnableINTELShift = 19, + SpvLoopControlLoopCoalesceINTELShift = 20, + SpvLoopControlMaxInterleavingINTELShift = 21, + SpvLoopControlSpeculatedIterationsINTELShift = 22, + SpvLoopControlNoFusionINTELShift = 23, SpvLoopControlMax = 0x7fffffff, } SpvLoopControlShift; @@ -503,6 +703,19 @@ typedef enum SpvLoopControlMask_ { SpvLoopControlDontUnrollMask = 0x00000002, SpvLoopControlDependencyInfiniteMask = 0x00000004, SpvLoopControlDependencyLengthMask = 0x00000008, + SpvLoopControlMinIterationsMask = 0x00000010, + SpvLoopControlMaxIterationsMask = 0x00000020, + SpvLoopControlIterationMultipleMask = 0x00000040, + SpvLoopControlPeelCountMask = 0x00000080, + SpvLoopControlPartialCountMask = 0x00000100, + SpvLoopControlInitiationIntervalINTELMask = 0x00010000, + SpvLoopControlMaxConcurrencyINTELMask = 0x00020000, + SpvLoopControlDependencyArrayINTELMask = 0x00040000, + SpvLoopControlPipelineEnableINTELMask = 0x00080000, + SpvLoopControlLoopCoalesceINTELMask = 0x00100000, + SpvLoopControlMaxInterleavingINTELMask = 0x00200000, + SpvLoopControlSpeculatedIterationsINTELMask = 0x00400000, + SpvLoopControlNoFusionINTELMask = 0x00800000, } SpvLoopControlMask; typedef enum SpvFunctionControlShift_ { @@ -532,6 +745,13 @@ typedef enum SpvMemorySemanticsShift_ { SpvMemorySemanticsCrossWorkgroupMemoryShift = 9, SpvMemorySemanticsAtomicCounterMemoryShift = 10, SpvMemorySemanticsImageMemoryShift = 11, + SpvMemorySemanticsOutputMemoryShift = 12, + SpvMemorySemanticsOutputMemoryKHRShift = 12, + SpvMemorySemanticsMakeAvailableShift = 13, + SpvMemorySemanticsMakeAvailableKHRShift = 13, + SpvMemorySemanticsMakeVisibleShift = 14, + SpvMemorySemanticsMakeVisibleKHRShift = 14, + SpvMemorySemanticsVolatileShift = 15, SpvMemorySemanticsMax = 0x7fffffff, } SpvMemorySemanticsShift; @@ -547,12 +767,25 @@ typedef enum SpvMemorySemanticsMask_ { SpvMemorySemanticsCrossWorkgroupMemoryMask = 0x00000200, SpvMemorySemanticsAtomicCounterMemoryMask = 0x00000400, SpvMemorySemanticsImageMemoryMask = 0x00000800, + SpvMemorySemanticsOutputMemoryMask = 0x00001000, + SpvMemorySemanticsOutputMemoryKHRMask = 0x00001000, + SpvMemorySemanticsMakeAvailableMask = 0x00002000, + SpvMemorySemanticsMakeAvailableKHRMask = 0x00002000, + SpvMemorySemanticsMakeVisibleMask = 0x00004000, + SpvMemorySemanticsMakeVisibleKHRMask = 0x00004000, + SpvMemorySemanticsVolatileMask = 0x00008000, } SpvMemorySemanticsMask; typedef enum SpvMemoryAccessShift_ { SpvMemoryAccessVolatileShift = 0, SpvMemoryAccessAlignedShift = 1, SpvMemoryAccessNontemporalShift = 2, + SpvMemoryAccessMakePointerAvailableShift = 3, + SpvMemoryAccessMakePointerAvailableKHRShift = 3, + SpvMemoryAccessMakePointerVisibleShift = 4, + SpvMemoryAccessMakePointerVisibleKHRShift = 4, + SpvMemoryAccessNonPrivatePointerShift = 5, + SpvMemoryAccessNonPrivatePointerKHRShift = 5, SpvMemoryAccessMax = 0x7fffffff, } SpvMemoryAccessShift; @@ -561,6 +794,12 @@ typedef enum SpvMemoryAccessMask_ { SpvMemoryAccessVolatileMask = 0x00000001, SpvMemoryAccessAlignedMask = 0x00000002, SpvMemoryAccessNontemporalMask = 0x00000004, + SpvMemoryAccessMakePointerAvailableMask = 0x00000008, + SpvMemoryAccessMakePointerAvailableKHRMask = 0x00000008, + SpvMemoryAccessMakePointerVisibleMask = 0x00000010, + SpvMemoryAccessMakePointerVisibleKHRMask = 0x00000010, + SpvMemoryAccessNonPrivatePointerMask = 0x00000020, + SpvMemoryAccessNonPrivatePointerKHRMask = 0x00000020, } SpvMemoryAccessMask; typedef enum SpvScope_ { @@ -569,6 +808,9 @@ typedef enum SpvScope_ { SpvScopeWorkgroup = 2, SpvScopeSubgroup = 3, SpvScopeInvocation = 4, + SpvScopeQueueFamily = 5, + SpvScopeQueueFamilyKHR = 5, + SpvScopeShaderCallKHR = 6, SpvScopeMax = 0x7fffffff, } SpvScope; @@ -668,8 +910,14 @@ typedef enum SpvCapability_ { SpvCapabilityGroupNonUniformShuffleRelative = 66, SpvCapabilityGroupNonUniformClustered = 67, SpvCapabilityGroupNonUniformQuad = 68, + SpvCapabilityShaderLayer = 69, + SpvCapabilityShaderViewportIndex = 70, + SpvCapabilityFragmentShadingRateKHR = 4422, SpvCapabilitySubgroupBallotKHR = 4423, SpvCapabilityDrawParameters = 4427, + SpvCapabilityWorkgroupMemoryExplicitLayoutKHR = 4428, + SpvCapabilityWorkgroupMemoryExplicitLayout8BitAccessKHR = 4429, + SpvCapabilityWorkgroupMemoryExplicitLayout16BitAccessKHR = 4430, SpvCapabilitySubgroupVoteKHR = 4431, SpvCapabilityStorageBuffer16BitAccess = 4433, SpvCapabilityStorageUniformBufferBlock16 = 4433, @@ -686,11 +934,22 @@ typedef enum SpvCapability_ { SpvCapabilityStorageBuffer8BitAccess = 4448, SpvCapabilityUniformAndStorageBuffer8BitAccess = 4449, SpvCapabilityStoragePushConstant8 = 4450, + SpvCapabilityDenormPreserve = 4464, + SpvCapabilityDenormFlushToZero = 4465, + SpvCapabilitySignedZeroInfNanPreserve = 4466, + SpvCapabilityRoundingModeRTE = 4467, + SpvCapabilityRoundingModeRTZ = 4468, + SpvCapabilityRayQueryProvisionalKHR = 4471, + SpvCapabilityRayQueryKHR = 4472, + SpvCapabilityRayTraversalPrimitiveCullingKHR = 4478, + SpvCapabilityRayTracingKHR = 4479, SpvCapabilityFloat16ImageAMD = 5008, SpvCapabilityImageGatherBiasLodAMD = 5009, SpvCapabilityFragmentMaskAMD = 5010, SpvCapabilityStencilExportEXT = 5013, SpvCapabilityImageReadWriteLodAMD = 5015, + SpvCapabilityInt64ImageEXT = 5016, + SpvCapabilityShaderClockKHR = 5055, SpvCapabilitySampleMaskOverrideCoverageNV = 5249, SpvCapabilityGeometryShaderPassthroughNV = 5251, SpvCapabilityShaderViewportIndexLayerEXT = 5254, @@ -699,25 +958,168 @@ typedef enum SpvCapability_ { SpvCapabilityShaderStereoViewNV = 5259, SpvCapabilityPerViewAttributesNV = 5260, SpvCapabilityFragmentFullyCoveredEXT = 5265, + SpvCapabilityMeshShadingNV = 5266, + SpvCapabilityImageFootprintNV = 5282, + SpvCapabilityFragmentBarycentricNV = 5284, + SpvCapabilityComputeDerivativeGroupQuadsNV = 5288, + SpvCapabilityFragmentDensityEXT = 5291, + SpvCapabilityShadingRateNV = 5291, SpvCapabilityGroupNonUniformPartitionedNV = 5297, + SpvCapabilityShaderNonUniform = 5301, SpvCapabilityShaderNonUniformEXT = 5301, + SpvCapabilityRuntimeDescriptorArray = 5302, SpvCapabilityRuntimeDescriptorArrayEXT = 5302, + SpvCapabilityInputAttachmentArrayDynamicIndexing = 5303, SpvCapabilityInputAttachmentArrayDynamicIndexingEXT = 5303, + SpvCapabilityUniformTexelBufferArrayDynamicIndexing = 5304, SpvCapabilityUniformTexelBufferArrayDynamicIndexingEXT = 5304, + SpvCapabilityStorageTexelBufferArrayDynamicIndexing = 5305, SpvCapabilityStorageTexelBufferArrayDynamicIndexingEXT = 5305, + SpvCapabilityUniformBufferArrayNonUniformIndexing = 5306, SpvCapabilityUniformBufferArrayNonUniformIndexingEXT = 5306, + SpvCapabilitySampledImageArrayNonUniformIndexing = 5307, SpvCapabilitySampledImageArrayNonUniformIndexingEXT = 5307, + SpvCapabilityStorageBufferArrayNonUniformIndexing = 5308, SpvCapabilityStorageBufferArrayNonUniformIndexingEXT = 5308, + SpvCapabilityStorageImageArrayNonUniformIndexing = 5309, SpvCapabilityStorageImageArrayNonUniformIndexingEXT = 5309, + SpvCapabilityInputAttachmentArrayNonUniformIndexing = 5310, SpvCapabilityInputAttachmentArrayNonUniformIndexingEXT = 5310, + SpvCapabilityUniformTexelBufferArrayNonUniformIndexing = 5311, SpvCapabilityUniformTexelBufferArrayNonUniformIndexingEXT = 5311, + SpvCapabilityStorageTexelBufferArrayNonUniformIndexing = 5312, SpvCapabilityStorageTexelBufferArrayNonUniformIndexingEXT = 5312, + SpvCapabilityRayTracingNV = 5340, + SpvCapabilityVulkanMemoryModel = 5345, + SpvCapabilityVulkanMemoryModelKHR = 5345, + SpvCapabilityVulkanMemoryModelDeviceScope = 5346, + SpvCapabilityVulkanMemoryModelDeviceScopeKHR = 5346, + SpvCapabilityPhysicalStorageBufferAddresses = 5347, + SpvCapabilityPhysicalStorageBufferAddressesEXT = 5347, + SpvCapabilityComputeDerivativeGroupLinearNV = 5350, + SpvCapabilityRayTracingProvisionalKHR = 5353, + SpvCapabilityCooperativeMatrixNV = 5357, + SpvCapabilityFragmentShaderSampleInterlockEXT = 5363, + SpvCapabilityFragmentShaderShadingRateInterlockEXT = 5372, + SpvCapabilityShaderSMBuiltinsNV = 5373, + SpvCapabilityFragmentShaderPixelInterlockEXT = 5378, + SpvCapabilityDemoteToHelperInvocationEXT = 5379, SpvCapabilitySubgroupShuffleINTEL = 5568, SpvCapabilitySubgroupBufferBlockIOINTEL = 5569, SpvCapabilitySubgroupImageBlockIOINTEL = 5570, + SpvCapabilitySubgroupImageMediaBlockIOINTEL = 5579, + SpvCapabilityRoundToInfinityINTEL = 5582, + SpvCapabilityFloatingPointModeINTEL = 5583, + SpvCapabilityIntegerFunctions2INTEL = 5584, + SpvCapabilityFunctionPointersINTEL = 5603, + SpvCapabilityIndirectReferencesINTEL = 5604, + SpvCapabilityAsmINTEL = 5606, + SpvCapabilityAtomicFloat32MinMaxEXT = 5612, + SpvCapabilityAtomicFloat64MinMaxEXT = 5613, + SpvCapabilityAtomicFloat16MinMaxEXT = 5616, + SpvCapabilityVectorComputeINTEL = 5617, + SpvCapabilityVectorAnyINTEL = 5619, + SpvCapabilitySubgroupAvcMotionEstimationINTEL = 5696, + SpvCapabilitySubgroupAvcMotionEstimationIntraINTEL = 5697, + SpvCapabilitySubgroupAvcMotionEstimationChromaINTEL = 5698, + SpvCapabilityVariableLengthArrayINTEL = 5817, + SpvCapabilityFunctionFloatControlINTEL = 5821, + SpvCapabilityFPGAMemoryAttributesINTEL = 5824, + SpvCapabilityFPFastMathModeINTEL = 5837, + SpvCapabilityArbitraryPrecisionIntegersINTEL = 5844, + SpvCapabilityUnstructuredLoopControlsINTEL = 5886, + SpvCapabilityFPGALoopControlsINTEL = 5888, + SpvCapabilityKernelAttributesINTEL = 5892, + SpvCapabilityFPGAKernelAttributesINTEL = 5897, + SpvCapabilityFPGAMemoryAccessesINTEL = 5898, + SpvCapabilityFPGAClusterAttributesINTEL = 5904, + SpvCapabilityLoopFuseINTEL = 5906, + SpvCapabilityFPGABufferLocationINTEL = 5920, + SpvCapabilityUSMStorageClassesINTEL = 5935, + SpvCapabilityIOPipesINTEL = 5943, + SpvCapabilityBlockingPipesINTEL = 5945, + SpvCapabilityFPGARegINTEL = 5948, + SpvCapabilityAtomicFloat32AddEXT = 6033, + SpvCapabilityAtomicFloat64AddEXT = 6034, + SpvCapabilityLongConstantCompositeINTEL = 6089, SpvCapabilityMax = 0x7fffffff, } SpvCapability; +typedef enum SpvRayFlagsShift_ { + SpvRayFlagsOpaqueKHRShift = 0, + SpvRayFlagsNoOpaqueKHRShift = 1, + SpvRayFlagsTerminateOnFirstHitKHRShift = 2, + SpvRayFlagsSkipClosestHitShaderKHRShift = 3, + SpvRayFlagsCullBackFacingTrianglesKHRShift = 4, + SpvRayFlagsCullFrontFacingTrianglesKHRShift = 5, + SpvRayFlagsCullOpaqueKHRShift = 6, + SpvRayFlagsCullNoOpaqueKHRShift = 7, + SpvRayFlagsSkipTrianglesKHRShift = 8, + SpvRayFlagsSkipAABBsKHRShift = 9, + SpvRayFlagsMax = 0x7fffffff, +} SpvRayFlagsShift; + +typedef enum SpvRayFlagsMask_ { + SpvRayFlagsMaskNone = 0, + SpvRayFlagsOpaqueKHRMask = 0x00000001, + SpvRayFlagsNoOpaqueKHRMask = 0x00000002, + SpvRayFlagsTerminateOnFirstHitKHRMask = 0x00000004, + SpvRayFlagsSkipClosestHitShaderKHRMask = 0x00000008, + SpvRayFlagsCullBackFacingTrianglesKHRMask = 0x00000010, + SpvRayFlagsCullFrontFacingTrianglesKHRMask = 0x00000020, + SpvRayFlagsCullOpaqueKHRMask = 0x00000040, + SpvRayFlagsCullNoOpaqueKHRMask = 0x00000080, + SpvRayFlagsSkipTrianglesKHRMask = 0x00000100, + SpvRayFlagsSkipAABBsKHRMask = 0x00000200, +} SpvRayFlagsMask; + +typedef enum SpvRayQueryIntersection_ { + SpvRayQueryIntersectionRayQueryCandidateIntersectionKHR = 0, + SpvRayQueryIntersectionRayQueryCommittedIntersectionKHR = 1, + SpvRayQueryIntersectionMax = 0x7fffffff, +} SpvRayQueryIntersection; + +typedef enum SpvRayQueryCommittedIntersectionType_ { + SpvRayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionNoneKHR = 0, + SpvRayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionTriangleKHR = 1, + SpvRayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionGeneratedKHR = 2, + SpvRayQueryCommittedIntersectionTypeMax = 0x7fffffff, +} SpvRayQueryCommittedIntersectionType; + +typedef enum SpvRayQueryCandidateIntersectionType_ { + SpvRayQueryCandidateIntersectionTypeRayQueryCandidateIntersectionTriangleKHR = 0, + SpvRayQueryCandidateIntersectionTypeRayQueryCandidateIntersectionAABBKHR = 1, + SpvRayQueryCandidateIntersectionTypeMax = 0x7fffffff, +} SpvRayQueryCandidateIntersectionType; + +typedef enum SpvFragmentShadingRateShift_ { + SpvFragmentShadingRateVertical2PixelsShift = 0, + SpvFragmentShadingRateVertical4PixelsShift = 1, + SpvFragmentShadingRateHorizontal2PixelsShift = 2, + SpvFragmentShadingRateHorizontal4PixelsShift = 3, + SpvFragmentShadingRateMax = 0x7fffffff, +} SpvFragmentShadingRateShift; + +typedef enum SpvFragmentShadingRateMask_ { + SpvFragmentShadingRateMaskNone = 0, + SpvFragmentShadingRateVertical2PixelsMask = 0x00000001, + SpvFragmentShadingRateVertical4PixelsMask = 0x00000002, + SpvFragmentShadingRateHorizontal2PixelsMask = 0x00000004, + SpvFragmentShadingRateHorizontal4PixelsMask = 0x00000008, +} SpvFragmentShadingRateMask; + +typedef enum SpvFPDenormMode_ { + SpvFPDenormModePreserve = 0, + SpvFPDenormModeFlushToZero = 1, + SpvFPDenormModeMax = 0x7fffffff, +} SpvFPDenormMode; + +typedef enum SpvFPOperationMode_ { + SpvFPOperationModeIEEE = 0, + SpvFPOperationModeALT = 1, + SpvFPOperationModeMax = 0x7fffffff, +} SpvFPOperationMode; + typedef enum SpvOp_ { SpvOpNop = 0, SpvOpUndef = 1, @@ -1059,12 +1461,29 @@ typedef enum SpvOp_ { SpvOpGroupNonUniformLogicalXor = 364, SpvOpGroupNonUniformQuadBroadcast = 365, SpvOpGroupNonUniformQuadSwap = 366, + SpvOpCopyLogical = 400, + SpvOpPtrEqual = 401, + SpvOpPtrNotEqual = 402, + SpvOpPtrDiff = 403, + SpvOpTerminateInvocation = 4416, SpvOpSubgroupBallotKHR = 4421, SpvOpSubgroupFirstInvocationKHR = 4422, SpvOpSubgroupAllKHR = 4428, SpvOpSubgroupAnyKHR = 4429, SpvOpSubgroupAllEqualKHR = 4430, SpvOpSubgroupReadInvocationKHR = 4432, + SpvOpTraceRayKHR = 4445, + SpvOpExecuteCallableKHR = 4446, + SpvOpConvertUToAccelerationStructureKHR = 4447, + SpvOpIgnoreIntersectionKHR = 4448, + SpvOpTerminateRayKHR = 4449, + SpvOpTypeRayQueryKHR = 4472, + SpvOpRayQueryInitializeKHR = 4473, + SpvOpRayQueryTerminateKHR = 4474, + SpvOpRayQueryGenerateIntersectionKHR = 4475, + SpvOpRayQueryConfirmIntersectionKHR = 4476, + SpvOpRayQueryProceedKHR = 4477, + SpvOpRayQueryGetIntersectionTypeKHR = 4479, SpvOpGroupIAddNonUniformAMD = 5000, SpvOpGroupFAddNonUniformAMD = 5001, SpvOpGroupFMinNonUniformAMD = 5002, @@ -1075,7 +1494,27 @@ typedef enum SpvOp_ { SpvOpGroupSMaxNonUniformAMD = 5007, SpvOpFragmentMaskFetchAMD = 5011, SpvOpFragmentFetchAMD = 5012, + SpvOpReadClockKHR = 5056, + SpvOpImageSampleFootprintNV = 5283, SpvOpGroupNonUniformPartitionNV = 5296, + SpvOpWritePackedPrimitiveIndices4x8NV = 5299, + SpvOpReportIntersectionKHR = 5334, + SpvOpReportIntersectionNV = 5334, + SpvOpIgnoreIntersectionNV = 5335, + SpvOpTerminateRayNV = 5336, + SpvOpTraceNV = 5337, + SpvOpTypeAccelerationStructureKHR = 5341, + SpvOpTypeAccelerationStructureNV = 5341, + SpvOpExecuteCallableNV = 5344, + SpvOpTypeCooperativeMatrixNV = 5358, + SpvOpCooperativeMatrixLoadNV = 5359, + SpvOpCooperativeMatrixStoreNV = 5360, + SpvOpCooperativeMatrixMulAddNV = 5361, + SpvOpCooperativeMatrixLengthNV = 5362, + SpvOpBeginInvocationInterlockEXT = 5364, + SpvOpEndInvocationInterlockEXT = 5365, + SpvOpDemoteToHelperInvocationEXT = 5380, + SpvOpIsHelperInvocationEXT = 5381, SpvOpSubgroupShuffleINTEL = 5571, SpvOpSubgroupShuffleDownINTEL = 5572, SpvOpSubgroupShuffleUpINTEL = 5573, @@ -1084,10 +1523,767 @@ typedef enum SpvOp_ { SpvOpSubgroupBlockWriteINTEL = 5576, SpvOpSubgroupImageBlockReadINTEL = 5577, SpvOpSubgroupImageBlockWriteINTEL = 5578, + SpvOpSubgroupImageMediaBlockReadINTEL = 5580, + SpvOpSubgroupImageMediaBlockWriteINTEL = 5581, + SpvOpUCountLeadingZerosINTEL = 5585, + SpvOpUCountTrailingZerosINTEL = 5586, + SpvOpAbsISubINTEL = 5587, + SpvOpAbsUSubINTEL = 5588, + SpvOpIAddSatINTEL = 5589, + SpvOpUAddSatINTEL = 5590, + SpvOpIAverageINTEL = 5591, + SpvOpUAverageINTEL = 5592, + SpvOpIAverageRoundedINTEL = 5593, + SpvOpUAverageRoundedINTEL = 5594, + SpvOpISubSatINTEL = 5595, + SpvOpUSubSatINTEL = 5596, + SpvOpIMul32x16INTEL = 5597, + SpvOpUMul32x16INTEL = 5598, + SpvOpConstFunctionPointerINTEL = 5600, + SpvOpFunctionPointerCallINTEL = 5601, + SpvOpAsmTargetINTEL = 5609, + SpvOpAsmINTEL = 5610, + SpvOpAsmCallINTEL = 5611, + SpvOpAtomicFMinEXT = 5614, + SpvOpAtomicFMaxEXT = 5615, + SpvOpDecorateString = 5632, SpvOpDecorateStringGOOGLE = 5632, + SpvOpMemberDecorateString = 5633, SpvOpMemberDecorateStringGOOGLE = 5633, + SpvOpVmeImageINTEL = 5699, + SpvOpTypeVmeImageINTEL = 5700, + SpvOpTypeAvcImePayloadINTEL = 5701, + SpvOpTypeAvcRefPayloadINTEL = 5702, + SpvOpTypeAvcSicPayloadINTEL = 5703, + SpvOpTypeAvcMcePayloadINTEL = 5704, + SpvOpTypeAvcMceResultINTEL = 5705, + SpvOpTypeAvcImeResultINTEL = 5706, + SpvOpTypeAvcImeResultSingleReferenceStreamoutINTEL = 5707, + SpvOpTypeAvcImeResultDualReferenceStreamoutINTEL = 5708, + SpvOpTypeAvcImeSingleReferenceStreaminINTEL = 5709, + SpvOpTypeAvcImeDualReferenceStreaminINTEL = 5710, + SpvOpTypeAvcRefResultINTEL = 5711, + SpvOpTypeAvcSicResultINTEL = 5712, + SpvOpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL = 5713, + SpvOpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL = 5714, + SpvOpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL = 5715, + SpvOpSubgroupAvcMceSetInterShapePenaltyINTEL = 5716, + SpvOpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL = 5717, + SpvOpSubgroupAvcMceSetInterDirectionPenaltyINTEL = 5718, + SpvOpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL = 5719, + SpvOpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL = 5720, + SpvOpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL = 5721, + SpvOpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL = 5722, + SpvOpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL = 5723, + SpvOpSubgroupAvcMceSetMotionVectorCostFunctionINTEL = 5724, + SpvOpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL = 5725, + SpvOpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL = 5726, + SpvOpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL = 5727, + SpvOpSubgroupAvcMceSetAcOnlyHaarINTEL = 5728, + SpvOpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL = 5729, + SpvOpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL = 5730, + SpvOpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL = 5731, + SpvOpSubgroupAvcMceConvertToImePayloadINTEL = 5732, + SpvOpSubgroupAvcMceConvertToImeResultINTEL = 5733, + SpvOpSubgroupAvcMceConvertToRefPayloadINTEL = 5734, + SpvOpSubgroupAvcMceConvertToRefResultINTEL = 5735, + SpvOpSubgroupAvcMceConvertToSicPayloadINTEL = 5736, + SpvOpSubgroupAvcMceConvertToSicResultINTEL = 5737, + SpvOpSubgroupAvcMceGetMotionVectorsINTEL = 5738, + SpvOpSubgroupAvcMceGetInterDistortionsINTEL = 5739, + SpvOpSubgroupAvcMceGetBestInterDistortionsINTEL = 5740, + SpvOpSubgroupAvcMceGetInterMajorShapeINTEL = 5741, + SpvOpSubgroupAvcMceGetInterMinorShapeINTEL = 5742, + SpvOpSubgroupAvcMceGetInterDirectionsINTEL = 5743, + SpvOpSubgroupAvcMceGetInterMotionVectorCountINTEL = 5744, + SpvOpSubgroupAvcMceGetInterReferenceIdsINTEL = 5745, + SpvOpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL = 5746, + SpvOpSubgroupAvcImeInitializeINTEL = 5747, + SpvOpSubgroupAvcImeSetSingleReferenceINTEL = 5748, + SpvOpSubgroupAvcImeSetDualReferenceINTEL = 5749, + SpvOpSubgroupAvcImeRefWindowSizeINTEL = 5750, + SpvOpSubgroupAvcImeAdjustRefOffsetINTEL = 5751, + SpvOpSubgroupAvcImeConvertToMcePayloadINTEL = 5752, + SpvOpSubgroupAvcImeSetMaxMotionVectorCountINTEL = 5753, + SpvOpSubgroupAvcImeSetUnidirectionalMixDisableINTEL = 5754, + SpvOpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL = 5755, + SpvOpSubgroupAvcImeSetWeightedSadINTEL = 5756, + SpvOpSubgroupAvcImeEvaluateWithSingleReferenceINTEL = 5757, + SpvOpSubgroupAvcImeEvaluateWithDualReferenceINTEL = 5758, + SpvOpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL = 5759, + SpvOpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL = 5760, + SpvOpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL = 5761, + SpvOpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL = 5762, + SpvOpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL = 5763, + SpvOpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL = 5764, + SpvOpSubgroupAvcImeConvertToMceResultINTEL = 5765, + SpvOpSubgroupAvcImeGetSingleReferenceStreaminINTEL = 5766, + SpvOpSubgroupAvcImeGetDualReferenceStreaminINTEL = 5767, + SpvOpSubgroupAvcImeStripSingleReferenceStreamoutINTEL = 5768, + SpvOpSubgroupAvcImeStripDualReferenceStreamoutINTEL = 5769, + SpvOpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL = 5770, + SpvOpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL = 5771, + SpvOpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL = 5772, + SpvOpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL = 5773, + SpvOpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL = 5774, + SpvOpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL = 5775, + SpvOpSubgroupAvcImeGetBorderReachedINTEL = 5776, + SpvOpSubgroupAvcImeGetTruncatedSearchIndicationINTEL = 5777, + SpvOpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL = 5778, + SpvOpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL = 5779, + SpvOpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL = 5780, + SpvOpSubgroupAvcFmeInitializeINTEL = 5781, + SpvOpSubgroupAvcBmeInitializeINTEL = 5782, + SpvOpSubgroupAvcRefConvertToMcePayloadINTEL = 5783, + SpvOpSubgroupAvcRefSetBidirectionalMixDisableINTEL = 5784, + SpvOpSubgroupAvcRefSetBilinearFilterEnableINTEL = 5785, + SpvOpSubgroupAvcRefEvaluateWithSingleReferenceINTEL = 5786, + SpvOpSubgroupAvcRefEvaluateWithDualReferenceINTEL = 5787, + SpvOpSubgroupAvcRefEvaluateWithMultiReferenceINTEL = 5788, + SpvOpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL = 5789, + SpvOpSubgroupAvcRefConvertToMceResultINTEL = 5790, + SpvOpSubgroupAvcSicInitializeINTEL = 5791, + SpvOpSubgroupAvcSicConfigureSkcINTEL = 5792, + SpvOpSubgroupAvcSicConfigureIpeLumaINTEL = 5793, + SpvOpSubgroupAvcSicConfigureIpeLumaChromaINTEL = 5794, + SpvOpSubgroupAvcSicGetMotionVectorMaskINTEL = 5795, + SpvOpSubgroupAvcSicConvertToMcePayloadINTEL = 5796, + SpvOpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL = 5797, + SpvOpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL = 5798, + SpvOpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL = 5799, + SpvOpSubgroupAvcSicSetBilinearFilterEnableINTEL = 5800, + SpvOpSubgroupAvcSicSetSkcForwardTransformEnableINTEL = 5801, + SpvOpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL = 5802, + SpvOpSubgroupAvcSicEvaluateIpeINTEL = 5803, + SpvOpSubgroupAvcSicEvaluateWithSingleReferenceINTEL = 5804, + SpvOpSubgroupAvcSicEvaluateWithDualReferenceINTEL = 5805, + SpvOpSubgroupAvcSicEvaluateWithMultiReferenceINTEL = 5806, + SpvOpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL = 5807, + SpvOpSubgroupAvcSicConvertToMceResultINTEL = 5808, + SpvOpSubgroupAvcSicGetIpeLumaShapeINTEL = 5809, + SpvOpSubgroupAvcSicGetBestIpeLumaDistortionINTEL = 5810, + SpvOpSubgroupAvcSicGetBestIpeChromaDistortionINTEL = 5811, + SpvOpSubgroupAvcSicGetPackedIpeLumaModesINTEL = 5812, + SpvOpSubgroupAvcSicGetIpeChromaModeINTEL = 5813, + SpvOpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL = 5814, + SpvOpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL = 5815, + SpvOpSubgroupAvcSicGetInterRawSadsINTEL = 5816, + SpvOpVariableLengthArrayINTEL = 5818, + SpvOpSaveMemoryINTEL = 5819, + SpvOpRestoreMemoryINTEL = 5820, + SpvOpLoopControlINTEL = 5887, + SpvOpPtrCastToCrossWorkgroupINTEL = 5934, + SpvOpCrossWorkgroupCastToPtrINTEL = 5938, + SpvOpReadPipeBlockingINTEL = 5946, + SpvOpWritePipeBlockingINTEL = 5947, + SpvOpFPGARegINTEL = 5949, + SpvOpRayQueryGetRayTMinKHR = 6016, + SpvOpRayQueryGetRayFlagsKHR = 6017, + SpvOpRayQueryGetIntersectionTKHR = 6018, + SpvOpRayQueryGetIntersectionInstanceCustomIndexKHR = 6019, + SpvOpRayQueryGetIntersectionInstanceIdKHR = 6020, + SpvOpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR = 6021, + SpvOpRayQueryGetIntersectionGeometryIndexKHR = 6022, + SpvOpRayQueryGetIntersectionPrimitiveIndexKHR = 6023, + SpvOpRayQueryGetIntersectionBarycentricsKHR = 6024, + SpvOpRayQueryGetIntersectionFrontFaceKHR = 6025, + SpvOpRayQueryGetIntersectionCandidateAABBOpaqueKHR = 6026, + SpvOpRayQueryGetIntersectionObjectRayDirectionKHR = 6027, + SpvOpRayQueryGetIntersectionObjectRayOriginKHR = 6028, + SpvOpRayQueryGetWorldRayDirectionKHR = 6029, + SpvOpRayQueryGetWorldRayOriginKHR = 6030, + SpvOpRayQueryGetIntersectionObjectToWorldKHR = 6031, + SpvOpRayQueryGetIntersectionWorldToObjectKHR = 6032, + SpvOpAtomicFAddEXT = 6035, + SpvOpTypeBufferSurfaceINTEL = 6086, + SpvOpTypeStructContinuedINTEL = 6090, + SpvOpConstantCompositeContinuedINTEL = 6091, + SpvOpSpecConstantCompositeContinuedINTEL = 6092, SpvOpMax = 0x7fffffff, } SpvOp; -#endif // #ifndef spirv_H +#ifdef SPV_ENABLE_UTILITY_CODE +inline void SpvHasResultAndType(SpvOp opcode, bool *hasResult, bool *hasResultType) { + *hasResult = *hasResultType = false; + switch (opcode) { + default: /* unknown opcode */ break; + case SpvOpNop: *hasResult = false; *hasResultType = false; break; + case SpvOpUndef: *hasResult = true; *hasResultType = true; break; + case SpvOpSourceContinued: *hasResult = false; *hasResultType = false; break; + case SpvOpSource: *hasResult = false; *hasResultType = false; break; + case SpvOpSourceExtension: *hasResult = false; *hasResultType = false; break; + case SpvOpName: *hasResult = false; *hasResultType = false; break; + case SpvOpMemberName: *hasResult = false; *hasResultType = false; break; + case SpvOpString: *hasResult = true; *hasResultType = false; break; + case SpvOpLine: *hasResult = false; *hasResultType = false; break; + case SpvOpExtension: *hasResult = false; *hasResultType = false; break; + case SpvOpExtInstImport: *hasResult = true; *hasResultType = false; break; + case SpvOpExtInst: *hasResult = true; *hasResultType = true; break; + case SpvOpMemoryModel: *hasResult = false; *hasResultType = false; break; + case SpvOpEntryPoint: *hasResult = false; *hasResultType = false; break; + case SpvOpExecutionMode: *hasResult = false; *hasResultType = false; break; + case SpvOpCapability: *hasResult = false; *hasResultType = false; break; + case SpvOpTypeVoid: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeBool: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeInt: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeFloat: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeVector: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeMatrix: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeImage: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeSampler: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeSampledImage: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeArray: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeRuntimeArray: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeStruct: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeOpaque: *hasResult = true; *hasResultType = false; break; + case SpvOpTypePointer: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeFunction: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeEvent: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeDeviceEvent: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeReserveId: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeQueue: *hasResult = true; *hasResultType = false; break; + case SpvOpTypePipe: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeForwardPointer: *hasResult = false; *hasResultType = false; break; + case SpvOpConstantTrue: *hasResult = true; *hasResultType = true; break; + case SpvOpConstantFalse: *hasResult = true; *hasResultType = true; break; + case SpvOpConstant: *hasResult = true; *hasResultType = true; break; + case SpvOpConstantComposite: *hasResult = true; *hasResultType = true; break; + case SpvOpConstantSampler: *hasResult = true; *hasResultType = true; break; + case SpvOpConstantNull: *hasResult = true; *hasResultType = true; break; + case SpvOpSpecConstantTrue: *hasResult = true; *hasResultType = true; break; + case SpvOpSpecConstantFalse: *hasResult = true; *hasResultType = true; break; + case SpvOpSpecConstant: *hasResult = true; *hasResultType = true; break; + case SpvOpSpecConstantComposite: *hasResult = true; *hasResultType = true; break; + case SpvOpSpecConstantOp: *hasResult = true; *hasResultType = true; break; + case SpvOpFunction: *hasResult = true; *hasResultType = true; break; + case SpvOpFunctionParameter: *hasResult = true; *hasResultType = true; break; + case SpvOpFunctionEnd: *hasResult = false; *hasResultType = false; break; + case SpvOpFunctionCall: *hasResult = true; *hasResultType = true; break; + case SpvOpVariable: *hasResult = true; *hasResultType = true; break; + case SpvOpImageTexelPointer: *hasResult = true; *hasResultType = true; break; + case SpvOpLoad: *hasResult = true; *hasResultType = true; break; + case SpvOpStore: *hasResult = false; *hasResultType = false; break; + case SpvOpCopyMemory: *hasResult = false; *hasResultType = false; break; + case SpvOpCopyMemorySized: *hasResult = false; *hasResultType = false; break; + case SpvOpAccessChain: *hasResult = true; *hasResultType = true; break; + case SpvOpInBoundsAccessChain: *hasResult = true; *hasResultType = true; break; + case SpvOpPtrAccessChain: *hasResult = true; *hasResultType = true; break; + case SpvOpArrayLength: *hasResult = true; *hasResultType = true; break; + case SpvOpGenericPtrMemSemantics: *hasResult = true; *hasResultType = true; break; + case SpvOpInBoundsPtrAccessChain: *hasResult = true; *hasResultType = true; break; + case SpvOpDecorate: *hasResult = false; *hasResultType = false; break; + case SpvOpMemberDecorate: *hasResult = false; *hasResultType = false; break; + case SpvOpDecorationGroup: *hasResult = true; *hasResultType = false; break; + case SpvOpGroupDecorate: *hasResult = false; *hasResultType = false; break; + case SpvOpGroupMemberDecorate: *hasResult = false; *hasResultType = false; break; + case SpvOpVectorExtractDynamic: *hasResult = true; *hasResultType = true; break; + case SpvOpVectorInsertDynamic: *hasResult = true; *hasResultType = true; break; + case SpvOpVectorShuffle: *hasResult = true; *hasResultType = true; break; + case SpvOpCompositeConstruct: *hasResult = true; *hasResultType = true; break; + case SpvOpCompositeExtract: *hasResult = true; *hasResultType = true; break; + case SpvOpCompositeInsert: *hasResult = true; *hasResultType = true; break; + case SpvOpCopyObject: *hasResult = true; *hasResultType = true; break; + case SpvOpTranspose: *hasResult = true; *hasResultType = true; break; + case SpvOpSampledImage: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSampleImplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSampleExplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSampleDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSampleDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSampleProjImplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSampleProjExplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSampleProjDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSampleProjDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageFetch: *hasResult = true; *hasResultType = true; break; + case SpvOpImageGather: *hasResult = true; *hasResultType = true; break; + case SpvOpImageDrefGather: *hasResult = true; *hasResultType = true; break; + case SpvOpImageRead: *hasResult = true; *hasResultType = true; break; + case SpvOpImageWrite: *hasResult = false; *hasResultType = false; break; + case SpvOpImage: *hasResult = true; *hasResultType = true; break; + case SpvOpImageQueryFormat: *hasResult = true; *hasResultType = true; break; + case SpvOpImageQueryOrder: *hasResult = true; *hasResultType = true; break; + case SpvOpImageQuerySizeLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageQuerySize: *hasResult = true; *hasResultType = true; break; + case SpvOpImageQueryLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageQueryLevels: *hasResult = true; *hasResultType = true; break; + case SpvOpImageQuerySamples: *hasResult = true; *hasResultType = true; break; + case SpvOpConvertFToU: *hasResult = true; *hasResultType = true; break; + case SpvOpConvertFToS: *hasResult = true; *hasResultType = true; break; + case SpvOpConvertSToF: *hasResult = true; *hasResultType = true; break; + case SpvOpConvertUToF: *hasResult = true; *hasResultType = true; break; + case SpvOpUConvert: *hasResult = true; *hasResultType = true; break; + case SpvOpSConvert: *hasResult = true; *hasResultType = true; break; + case SpvOpFConvert: *hasResult = true; *hasResultType = true; break; + case SpvOpQuantizeToF16: *hasResult = true; *hasResultType = true; break; + case SpvOpConvertPtrToU: *hasResult = true; *hasResultType = true; break; + case SpvOpSatConvertSToU: *hasResult = true; *hasResultType = true; break; + case SpvOpSatConvertUToS: *hasResult = true; *hasResultType = true; break; + case SpvOpConvertUToPtr: *hasResult = true; *hasResultType = true; break; + case SpvOpPtrCastToGeneric: *hasResult = true; *hasResultType = true; break; + case SpvOpGenericCastToPtr: *hasResult = true; *hasResultType = true; break; + case SpvOpGenericCastToPtrExplicit: *hasResult = true; *hasResultType = true; break; + case SpvOpBitcast: *hasResult = true; *hasResultType = true; break; + case SpvOpSNegate: *hasResult = true; *hasResultType = true; break; + case SpvOpFNegate: *hasResult = true; *hasResultType = true; break; + case SpvOpIAdd: *hasResult = true; *hasResultType = true; break; + case SpvOpFAdd: *hasResult = true; *hasResultType = true; break; + case SpvOpISub: *hasResult = true; *hasResultType = true; break; + case SpvOpFSub: *hasResult = true; *hasResultType = true; break; + case SpvOpIMul: *hasResult = true; *hasResultType = true; break; + case SpvOpFMul: *hasResult = true; *hasResultType = true; break; + case SpvOpUDiv: *hasResult = true; *hasResultType = true; break; + case SpvOpSDiv: *hasResult = true; *hasResultType = true; break; + case SpvOpFDiv: *hasResult = true; *hasResultType = true; break; + case SpvOpUMod: *hasResult = true; *hasResultType = true; break; + case SpvOpSRem: *hasResult = true; *hasResultType = true; break; + case SpvOpSMod: *hasResult = true; *hasResultType = true; break; + case SpvOpFRem: *hasResult = true; *hasResultType = true; break; + case SpvOpFMod: *hasResult = true; *hasResultType = true; break; + case SpvOpVectorTimesScalar: *hasResult = true; *hasResultType = true; break; + case SpvOpMatrixTimesScalar: *hasResult = true; *hasResultType = true; break; + case SpvOpVectorTimesMatrix: *hasResult = true; *hasResultType = true; break; + case SpvOpMatrixTimesVector: *hasResult = true; *hasResultType = true; break; + case SpvOpMatrixTimesMatrix: *hasResult = true; *hasResultType = true; break; + case SpvOpOuterProduct: *hasResult = true; *hasResultType = true; break; + case SpvOpDot: *hasResult = true; *hasResultType = true; break; + case SpvOpIAddCarry: *hasResult = true; *hasResultType = true; break; + case SpvOpISubBorrow: *hasResult = true; *hasResultType = true; break; + case SpvOpUMulExtended: *hasResult = true; *hasResultType = true; break; + case SpvOpSMulExtended: *hasResult = true; *hasResultType = true; break; + case SpvOpAny: *hasResult = true; *hasResultType = true; break; + case SpvOpAll: *hasResult = true; *hasResultType = true; break; + case SpvOpIsNan: *hasResult = true; *hasResultType = true; break; + case SpvOpIsInf: *hasResult = true; *hasResultType = true; break; + case SpvOpIsFinite: *hasResult = true; *hasResultType = true; break; + case SpvOpIsNormal: *hasResult = true; *hasResultType = true; break; + case SpvOpSignBitSet: *hasResult = true; *hasResultType = true; break; + case SpvOpLessOrGreater: *hasResult = true; *hasResultType = true; break; + case SpvOpOrdered: *hasResult = true; *hasResultType = true; break; + case SpvOpUnordered: *hasResult = true; *hasResultType = true; break; + case SpvOpLogicalEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpLogicalNotEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpLogicalOr: *hasResult = true; *hasResultType = true; break; + case SpvOpLogicalAnd: *hasResult = true; *hasResultType = true; break; + case SpvOpLogicalNot: *hasResult = true; *hasResultType = true; break; + case SpvOpSelect: *hasResult = true; *hasResultType = true; break; + case SpvOpIEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpINotEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpUGreaterThan: *hasResult = true; *hasResultType = true; break; + case SpvOpSGreaterThan: *hasResult = true; *hasResultType = true; break; + case SpvOpUGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpSGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpULessThan: *hasResult = true; *hasResultType = true; break; + case SpvOpSLessThan: *hasResult = true; *hasResultType = true; break; + case SpvOpULessThanEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpSLessThanEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpFOrdEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpFUnordEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpFOrdNotEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpFUnordNotEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpFOrdLessThan: *hasResult = true; *hasResultType = true; break; + case SpvOpFUnordLessThan: *hasResult = true; *hasResultType = true; break; + case SpvOpFOrdGreaterThan: *hasResult = true; *hasResultType = true; break; + case SpvOpFUnordGreaterThan: *hasResult = true; *hasResultType = true; break; + case SpvOpFOrdLessThanEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpFUnordLessThanEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpFOrdGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpFUnordGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpShiftRightLogical: *hasResult = true; *hasResultType = true; break; + case SpvOpShiftRightArithmetic: *hasResult = true; *hasResultType = true; break; + case SpvOpShiftLeftLogical: *hasResult = true; *hasResultType = true; break; + case SpvOpBitwiseOr: *hasResult = true; *hasResultType = true; break; + case SpvOpBitwiseXor: *hasResult = true; *hasResultType = true; break; + case SpvOpBitwiseAnd: *hasResult = true; *hasResultType = true; break; + case SpvOpNot: *hasResult = true; *hasResultType = true; break; + case SpvOpBitFieldInsert: *hasResult = true; *hasResultType = true; break; + case SpvOpBitFieldSExtract: *hasResult = true; *hasResultType = true; break; + case SpvOpBitFieldUExtract: *hasResult = true; *hasResultType = true; break; + case SpvOpBitReverse: *hasResult = true; *hasResultType = true; break; + case SpvOpBitCount: *hasResult = true; *hasResultType = true; break; + case SpvOpDPdx: *hasResult = true; *hasResultType = true; break; + case SpvOpDPdy: *hasResult = true; *hasResultType = true; break; + case SpvOpFwidth: *hasResult = true; *hasResultType = true; break; + case SpvOpDPdxFine: *hasResult = true; *hasResultType = true; break; + case SpvOpDPdyFine: *hasResult = true; *hasResultType = true; break; + case SpvOpFwidthFine: *hasResult = true; *hasResultType = true; break; + case SpvOpDPdxCoarse: *hasResult = true; *hasResultType = true; break; + case SpvOpDPdyCoarse: *hasResult = true; *hasResultType = true; break; + case SpvOpFwidthCoarse: *hasResult = true; *hasResultType = true; break; + case SpvOpEmitVertex: *hasResult = false; *hasResultType = false; break; + case SpvOpEndPrimitive: *hasResult = false; *hasResultType = false; break; + case SpvOpEmitStreamVertex: *hasResult = false; *hasResultType = false; break; + case SpvOpEndStreamPrimitive: *hasResult = false; *hasResultType = false; break; + case SpvOpControlBarrier: *hasResult = false; *hasResultType = false; break; + case SpvOpMemoryBarrier: *hasResult = false; *hasResultType = false; break; + case SpvOpAtomicLoad: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicStore: *hasResult = false; *hasResultType = false; break; + case SpvOpAtomicExchange: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicCompareExchange: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicCompareExchangeWeak: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicIIncrement: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicIDecrement: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicIAdd: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicISub: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicSMin: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicUMin: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicSMax: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicUMax: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicAnd: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicOr: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicXor: *hasResult = true; *hasResultType = true; break; + case SpvOpPhi: *hasResult = true; *hasResultType = true; break; + case SpvOpLoopMerge: *hasResult = false; *hasResultType = false; break; + case SpvOpSelectionMerge: *hasResult = false; *hasResultType = false; break; + case SpvOpLabel: *hasResult = true; *hasResultType = false; break; + case SpvOpBranch: *hasResult = false; *hasResultType = false; break; + case SpvOpBranchConditional: *hasResult = false; *hasResultType = false; break; + case SpvOpSwitch: *hasResult = false; *hasResultType = false; break; + case SpvOpKill: *hasResult = false; *hasResultType = false; break; + case SpvOpReturn: *hasResult = false; *hasResultType = false; break; + case SpvOpReturnValue: *hasResult = false; *hasResultType = false; break; + case SpvOpUnreachable: *hasResult = false; *hasResultType = false; break; + case SpvOpLifetimeStart: *hasResult = false; *hasResultType = false; break; + case SpvOpLifetimeStop: *hasResult = false; *hasResultType = false; break; + case SpvOpGroupAsyncCopy: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupWaitEvents: *hasResult = false; *hasResultType = false; break; + case SpvOpGroupAll: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupAny: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupBroadcast: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupIAdd: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupFAdd: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupFMin: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupUMin: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupSMin: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupFMax: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupUMax: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupSMax: *hasResult = true; *hasResultType = true; break; + case SpvOpReadPipe: *hasResult = true; *hasResultType = true; break; + case SpvOpWritePipe: *hasResult = true; *hasResultType = true; break; + case SpvOpReservedReadPipe: *hasResult = true; *hasResultType = true; break; + case SpvOpReservedWritePipe: *hasResult = true; *hasResultType = true; break; + case SpvOpReserveReadPipePackets: *hasResult = true; *hasResultType = true; break; + case SpvOpReserveWritePipePackets: *hasResult = true; *hasResultType = true; break; + case SpvOpCommitReadPipe: *hasResult = false; *hasResultType = false; break; + case SpvOpCommitWritePipe: *hasResult = false; *hasResultType = false; break; + case SpvOpIsValidReserveId: *hasResult = true; *hasResultType = true; break; + case SpvOpGetNumPipePackets: *hasResult = true; *hasResultType = true; break; + case SpvOpGetMaxPipePackets: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupReserveReadPipePackets: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupReserveWritePipePackets: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupCommitReadPipe: *hasResult = false; *hasResultType = false; break; + case SpvOpGroupCommitWritePipe: *hasResult = false; *hasResultType = false; break; + case SpvOpEnqueueMarker: *hasResult = true; *hasResultType = true; break; + case SpvOpEnqueueKernel: *hasResult = true; *hasResultType = true; break; + case SpvOpGetKernelNDrangeSubGroupCount: *hasResult = true; *hasResultType = true; break; + case SpvOpGetKernelNDrangeMaxSubGroupSize: *hasResult = true; *hasResultType = true; break; + case SpvOpGetKernelWorkGroupSize: *hasResult = true; *hasResultType = true; break; + case SpvOpGetKernelPreferredWorkGroupSizeMultiple: *hasResult = true; *hasResultType = true; break; + case SpvOpRetainEvent: *hasResult = false; *hasResultType = false; break; + case SpvOpReleaseEvent: *hasResult = false; *hasResultType = false; break; + case SpvOpCreateUserEvent: *hasResult = true; *hasResultType = true; break; + case SpvOpIsValidEvent: *hasResult = true; *hasResultType = true; break; + case SpvOpSetUserEventStatus: *hasResult = false; *hasResultType = false; break; + case SpvOpCaptureEventProfilingInfo: *hasResult = false; *hasResultType = false; break; + case SpvOpGetDefaultQueue: *hasResult = true; *hasResultType = true; break; + case SpvOpBuildNDRange: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseSampleImplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseSampleExplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseSampleDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseSampleDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseSampleProjImplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseSampleProjExplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseSampleProjDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseSampleProjDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseFetch: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseGather: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseDrefGather: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSparseTexelsResident: *hasResult = true; *hasResultType = true; break; + case SpvOpNoLine: *hasResult = false; *hasResultType = false; break; + case SpvOpAtomicFlagTestAndSet: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicFlagClear: *hasResult = false; *hasResultType = false; break; + case SpvOpImageSparseRead: *hasResult = true; *hasResultType = true; break; + case SpvOpSizeOf: *hasResult = true; *hasResultType = true; break; + case SpvOpTypePipeStorage: *hasResult = true; *hasResultType = false; break; + case SpvOpConstantPipeStorage: *hasResult = true; *hasResultType = true; break; + case SpvOpCreatePipeFromPipeStorage: *hasResult = true; *hasResultType = true; break; + case SpvOpGetKernelLocalSizeForSubgroupCount: *hasResult = true; *hasResultType = true; break; + case SpvOpGetKernelMaxNumSubgroups: *hasResult = true; *hasResultType = true; break; + case SpvOpTypeNamedBarrier: *hasResult = true; *hasResultType = false; break; + case SpvOpNamedBarrierInitialize: *hasResult = true; *hasResultType = true; break; + case SpvOpMemoryNamedBarrier: *hasResult = false; *hasResultType = false; break; + case SpvOpModuleProcessed: *hasResult = false; *hasResultType = false; break; + case SpvOpExecutionModeId: *hasResult = false; *hasResultType = false; break; + case SpvOpDecorateId: *hasResult = false; *hasResultType = false; break; + case SpvOpGroupNonUniformElect: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformAll: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformAny: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformAllEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformBroadcast: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformBroadcastFirst: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformBallot: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformInverseBallot: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformBallotBitExtract: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformBallotBitCount: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformBallotFindLSB: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformBallotFindMSB: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformShuffle: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformShuffleXor: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformShuffleUp: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformShuffleDown: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformIAdd: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformFAdd: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformIMul: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformFMul: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformSMin: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformUMin: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformFMin: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformSMax: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformUMax: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformFMax: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformBitwiseAnd: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformBitwiseOr: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformBitwiseXor: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformLogicalAnd: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformLogicalOr: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformLogicalXor: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformQuadBroadcast: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformQuadSwap: *hasResult = true; *hasResultType = true; break; + case SpvOpCopyLogical: *hasResult = true; *hasResultType = true; break; + case SpvOpPtrEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpPtrNotEqual: *hasResult = true; *hasResultType = true; break; + case SpvOpPtrDiff: *hasResult = true; *hasResultType = true; break; + case SpvOpTerminateInvocation: *hasResult = false; *hasResultType = false; break; + case SpvOpSubgroupBallotKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupFirstInvocationKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAllKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAnyKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAllEqualKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupReadInvocationKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpTraceRayKHR: *hasResult = false; *hasResultType = false; break; + case SpvOpExecuteCallableKHR: *hasResult = false; *hasResultType = false; break; + case SpvOpConvertUToAccelerationStructureKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpIgnoreIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case SpvOpTerminateRayKHR: *hasResult = false; *hasResultType = false; break; + case SpvOpTypeRayQueryKHR: *hasResult = true; *hasResultType = false; break; + case SpvOpRayQueryInitializeKHR: *hasResult = false; *hasResultType = false; break; + case SpvOpRayQueryTerminateKHR: *hasResult = false; *hasResultType = false; break; + case SpvOpRayQueryGenerateIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case SpvOpRayQueryConfirmIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case SpvOpRayQueryProceedKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionTypeKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupIAddNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupFAddNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupFMinNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupUMinNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupSMinNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupFMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupUMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupSMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case SpvOpFragmentMaskFetchAMD: *hasResult = true; *hasResultType = true; break; + case SpvOpFragmentFetchAMD: *hasResult = true; *hasResultType = true; break; + case SpvOpReadClockKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpImageSampleFootprintNV: *hasResult = true; *hasResultType = true; break; + case SpvOpGroupNonUniformPartitionNV: *hasResult = true; *hasResultType = true; break; + case SpvOpWritePackedPrimitiveIndices4x8NV: *hasResult = false; *hasResultType = false; break; + case SpvOpReportIntersectionNV: *hasResult = true; *hasResultType = true; break; + case SpvOpIgnoreIntersectionNV: *hasResult = false; *hasResultType = false; break; + case SpvOpTerminateRayNV: *hasResult = false; *hasResultType = false; break; + case SpvOpTraceNV: *hasResult = false; *hasResultType = false; break; + case SpvOpTypeAccelerationStructureNV: *hasResult = true; *hasResultType = false; break; + case SpvOpExecuteCallableNV: *hasResult = false; *hasResultType = false; break; + case SpvOpTypeCooperativeMatrixNV: *hasResult = true; *hasResultType = false; break; + case SpvOpCooperativeMatrixLoadNV: *hasResult = true; *hasResultType = true; break; + case SpvOpCooperativeMatrixStoreNV: *hasResult = false; *hasResultType = false; break; + case SpvOpCooperativeMatrixMulAddNV: *hasResult = true; *hasResultType = true; break; + case SpvOpCooperativeMatrixLengthNV: *hasResult = true; *hasResultType = true; break; + case SpvOpBeginInvocationInterlockEXT: *hasResult = false; *hasResultType = false; break; + case SpvOpEndInvocationInterlockEXT: *hasResult = false; *hasResultType = false; break; + case SpvOpDemoteToHelperInvocationEXT: *hasResult = false; *hasResultType = false; break; + case SpvOpIsHelperInvocationEXT: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupShuffleINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupShuffleDownINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupShuffleUpINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupShuffleXorINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupBlockReadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupBlockWriteINTEL: *hasResult = false; *hasResultType = false; break; + case SpvOpSubgroupImageBlockReadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupImageBlockWriteINTEL: *hasResult = false; *hasResultType = false; break; + case SpvOpSubgroupImageMediaBlockReadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupImageMediaBlockWriteINTEL: *hasResult = false; *hasResultType = false; break; + case SpvOpUCountLeadingZerosINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpUCountTrailingZerosINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpAbsISubINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpAbsUSubINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpIAddSatINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpUAddSatINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpIAverageINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpUAverageINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpIAverageRoundedINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpUAverageRoundedINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpISubSatINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpUSubSatINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpIMul32x16INTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpUMul32x16INTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpConstFunctionPointerINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpFunctionPointerCallINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpAsmTargetINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpAsmINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpAsmCallINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicFMinEXT: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicFMaxEXT: *hasResult = true; *hasResultType = true; break; + case SpvOpDecorateString: *hasResult = false; *hasResultType = false; break; + case SpvOpMemberDecorateString: *hasResult = false; *hasResultType = false; break; + case SpvOpVmeImageINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpTypeVmeImageINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcImePayloadINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcRefPayloadINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcSicPayloadINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcMcePayloadINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcMceResultINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcImeResultINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcImeResultSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcImeResultDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcImeSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcImeDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcRefResultINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeAvcSicResultINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceSetInterShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceSetInterDirectionPenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceSetMotionVectorCostFunctionINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceSetAcOnlyHaarINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceConvertToImePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceConvertToImeResultINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceConvertToRefPayloadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceConvertToRefResultINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceConvertToSicPayloadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceConvertToSicResultINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetInterDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetBestInterDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetInterMajorShapeINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetInterMinorShapeINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetInterDirectionsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetInterMotionVectorCountINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetInterReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeSetSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeSetDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeRefWindowSizeINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeAdjustRefOffsetINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeSetMaxMotionVectorCountINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeSetUnidirectionalMixDisableINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeSetWeightedSadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeStripSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeStripDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetBorderReachedINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetTruncatedSearchIndicationINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcFmeInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcBmeInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcRefConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcRefSetBidirectionalMixDisableINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcRefSetBilinearFilterEnableINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcRefEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcRefEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcRefEvaluateWithMultiReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcRefConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicConfigureSkcINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicConfigureIpeLumaINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicConfigureIpeLumaChromaINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicGetMotionVectorMaskINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicSetBilinearFilterEnableINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicSetSkcForwardTransformEnableINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicEvaluateIpeINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicEvaluateWithMultiReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicGetIpeLumaShapeINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicGetBestIpeLumaDistortionINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicGetBestIpeChromaDistortionINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicGetPackedIpeLumaModesINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicGetIpeChromaModeINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSubgroupAvcSicGetInterRawSadsINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpVariableLengthArrayINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpSaveMemoryINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpRestoreMemoryINTEL: *hasResult = false; *hasResultType = false; break; + case SpvOpLoopControlINTEL: *hasResult = false; *hasResultType = false; break; + case SpvOpPtrCastToCrossWorkgroupINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpCrossWorkgroupCastToPtrINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpReadPipeBlockingINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpWritePipeBlockingINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpFPGARegINTEL: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetRayTMinKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetRayFlagsKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionTKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionInstanceCustomIndexKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionInstanceIdKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionGeometryIndexKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionPrimitiveIndexKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionBarycentricsKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionFrontFaceKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionCandidateAABBOpaqueKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionObjectRayDirectionKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionObjectRayOriginKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetWorldRayDirectionKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetWorldRayOriginKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionObjectToWorldKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpRayQueryGetIntersectionWorldToObjectKHR: *hasResult = true; *hasResultType = true; break; + case SpvOpAtomicFAddEXT: *hasResult = true; *hasResultType = true; break; + case SpvOpTypeBufferSurfaceINTEL: *hasResult = true; *hasResultType = false; break; + case SpvOpTypeStructContinuedINTEL: *hasResult = false; *hasResultType = false; break; + case SpvOpConstantCompositeContinuedINTEL: *hasResult = false; *hasResultType = false; break; + case SpvOpSpecConstantCompositeContinuedINTEL: *hasResult = false; *hasResultType = false; break; + } +} +#endif /* SPV_ENABLE_UTILITY_CODE */ + +#endif diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.hpp b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.hpp index f16c2963e..19e28b8cb 100755 --- a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.hpp +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.hpp @@ -1,4 +1,4 @@ -// Copyright (c) 2014-2018 The Khronos Group Inc. +// Copyright (c) 2014-2020 The Khronos Group Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and/or associated documentation files (the "Materials"), @@ -26,13 +26,16 @@ // the Binary Section of the SPIR-V specification. // Enumeration tokens for SPIR-V, in various styles: -// C, C++, C++11, JSON, Lua, Python +// C, C++, C++11, JSON, Lua, Python, C#, D // // - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL // - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL // - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL // - Lua will use tables, e.g.: spv.SourceLanguage.GLSL // - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +// - C# will use enum classes in the Specification class located in the "Spv" namespace, +// e.g.: Spv.Specification.SourceLanguage.GLSL +// - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL // // Some tokens act like mask values, which can be OR'd together, // while others are mutually exclusive. The mask-like ones have @@ -46,12 +49,12 @@ namespace spv { typedef unsigned int Id; -#define SPV_VERSION 0x10300 -#define SPV_REVISION 1 +#define SPV_VERSION 0x10500 +#define SPV_REVISION 4 static const unsigned int MagicNumber = 0x07230203; -static const unsigned int Version = 0x00010300; -static const unsigned int Revision = 1; +static const unsigned int Version = 0x00010500; +static const unsigned int Revision = 4; static const unsigned int OpCodeMask = 0xffff; static const unsigned int WordCountShift = 16; @@ -73,6 +76,20 @@ enum ExecutionModel { ExecutionModelFragment = 4, ExecutionModelGLCompute = 5, ExecutionModelKernel = 6, + ExecutionModelTaskNV = 5267, + ExecutionModelMeshNV = 5268, + ExecutionModelRayGenerationKHR = 5313, + ExecutionModelRayGenerationNV = 5313, + ExecutionModelIntersectionKHR = 5314, + ExecutionModelIntersectionNV = 5314, + ExecutionModelAnyHitKHR = 5315, + ExecutionModelAnyHitNV = 5315, + ExecutionModelClosestHitKHR = 5316, + ExecutionModelClosestHitNV = 5316, + ExecutionModelMissKHR = 5317, + ExecutionModelMissNV = 5317, + ExecutionModelCallableKHR = 5318, + ExecutionModelCallableNV = 5318, ExecutionModelMax = 0x7fffffff, }; @@ -80,6 +97,8 @@ enum AddressingModel { AddressingModelLogical = 0, AddressingModelPhysical32 = 1, AddressingModelPhysical64 = 2, + AddressingModelPhysicalStorageBuffer64 = 5348, + AddressingModelPhysicalStorageBuffer64EXT = 5348, AddressingModelMax = 0x7fffffff, }; @@ -87,6 +106,8 @@ enum MemoryModel { MemoryModelSimple = 0, MemoryModelGLSL450 = 1, MemoryModelOpenCL = 2, + MemoryModelVulkan = 3, + MemoryModelVulkanKHR = 3, MemoryModelMax = 0x7fffffff, }; @@ -130,7 +151,33 @@ enum ExecutionMode { ExecutionModeLocalSizeId = 38, ExecutionModeLocalSizeHintId = 39, ExecutionModePostDepthCoverage = 4446, + ExecutionModeDenormPreserve = 4459, + ExecutionModeDenormFlushToZero = 4460, + ExecutionModeSignedZeroInfNanPreserve = 4461, + ExecutionModeRoundingModeRTE = 4462, + ExecutionModeRoundingModeRTZ = 4463, ExecutionModeStencilRefReplacingEXT = 5027, + ExecutionModeOutputLinesNV = 5269, + ExecutionModeOutputPrimitivesNV = 5270, + ExecutionModeDerivativeGroupQuadsNV = 5289, + ExecutionModeDerivativeGroupLinearNV = 5290, + ExecutionModeOutputTrianglesNV = 5298, + ExecutionModePixelInterlockOrderedEXT = 5366, + ExecutionModePixelInterlockUnorderedEXT = 5367, + ExecutionModeSampleInterlockOrderedEXT = 5368, + ExecutionModeSampleInterlockUnorderedEXT = 5369, + ExecutionModeShadingRateInterlockOrderedEXT = 5370, + ExecutionModeShadingRateInterlockUnorderedEXT = 5371, + ExecutionModeSharedLocalMemorySizeINTEL = 5618, + ExecutionModeRoundingModeRTPINTEL = 5620, + ExecutionModeRoundingModeRTNINTEL = 5621, + ExecutionModeFloatingPointModeALTINTEL = 5622, + ExecutionModeFloatingPointModeIEEEINTEL = 5623, + ExecutionModeMaxWorkgroupSizeINTEL = 5893, + ExecutionModeMaxWorkDimINTEL = 5894, + ExecutionModeNoGlobalOffsetINTEL = 5895, + ExecutionModeNumSIMDWorkitemsINTEL = 5896, + ExecutionModeSchedulerTargetFmaxMhzINTEL = 5903, ExecutionModeMax = 0x7fffffff, }; @@ -148,6 +195,23 @@ enum StorageClass { StorageClassAtomicCounter = 10, StorageClassImage = 11, StorageClassStorageBuffer = 12, + StorageClassCallableDataKHR = 5328, + StorageClassCallableDataNV = 5328, + StorageClassIncomingCallableDataKHR = 5329, + StorageClassIncomingCallableDataNV = 5329, + StorageClassRayPayloadKHR = 5338, + StorageClassRayPayloadNV = 5338, + StorageClassHitAttributeKHR = 5339, + StorageClassHitAttributeNV = 5339, + StorageClassIncomingRayPayloadKHR = 5342, + StorageClassIncomingRayPayloadNV = 5342, + StorageClassShaderRecordBufferKHR = 5343, + StorageClassShaderRecordBufferNV = 5343, + StorageClassPhysicalStorageBuffer = 5349, + StorageClassPhysicalStorageBufferEXT = 5349, + StorageClassCodeSectionINTEL = 5605, + StorageClassDeviceOnlyINTEL = 5936, + StorageClassHostOnlyINTEL = 5937, StorageClassMax = 0x7fffffff, }; @@ -218,6 +282,8 @@ enum ImageFormat { ImageFormatRg8ui = 37, ImageFormatR16ui = 38, ImageFormatR8ui = 39, + ImageFormatR64ui = 40, + ImageFormatR64i = 41, ImageFormatMax = 0x7fffffff, }; @@ -275,6 +341,16 @@ enum ImageOperandsShift { ImageOperandsConstOffsetsShift = 5, ImageOperandsSampleShift = 6, ImageOperandsMinLodShift = 7, + ImageOperandsMakeTexelAvailableShift = 8, + ImageOperandsMakeTexelAvailableKHRShift = 8, + ImageOperandsMakeTexelVisibleShift = 9, + ImageOperandsMakeTexelVisibleKHRShift = 9, + ImageOperandsNonPrivateTexelShift = 10, + ImageOperandsNonPrivateTexelKHRShift = 10, + ImageOperandsVolatileTexelShift = 11, + ImageOperandsVolatileTexelKHRShift = 11, + ImageOperandsSignExtendShift = 12, + ImageOperandsZeroExtendShift = 13, ImageOperandsMax = 0x7fffffff, }; @@ -288,6 +364,16 @@ enum ImageOperandsMask { ImageOperandsConstOffsetsMask = 0x00000020, ImageOperandsSampleMask = 0x00000040, ImageOperandsMinLodMask = 0x00000080, + ImageOperandsMakeTexelAvailableMask = 0x00000100, + ImageOperandsMakeTexelAvailableKHRMask = 0x00000100, + ImageOperandsMakeTexelVisibleMask = 0x00000200, + ImageOperandsMakeTexelVisibleKHRMask = 0x00000200, + ImageOperandsNonPrivateTexelMask = 0x00000400, + ImageOperandsNonPrivateTexelKHRMask = 0x00000400, + ImageOperandsVolatileTexelMask = 0x00000800, + ImageOperandsVolatileTexelKHRMask = 0x00000800, + ImageOperandsSignExtendMask = 0x00001000, + ImageOperandsZeroExtendMask = 0x00002000, }; enum FPFastMathModeShift { @@ -296,6 +382,8 @@ enum FPFastMathModeShift { FPFastMathModeNSZShift = 2, FPFastMathModeAllowRecipShift = 3, FPFastMathModeFastShift = 4, + FPFastMathModeAllowContractFastINTELShift = 16, + FPFastMathModeAllowReassocINTELShift = 17, FPFastMathModeMax = 0x7fffffff, }; @@ -306,6 +394,8 @@ enum FPFastMathModeMask { FPFastMathModeNSZMask = 0x00000004, FPFastMathModeAllowRecipMask = 0x00000008, FPFastMathModeFastMask = 0x00000010, + FPFastMathModeAllowContractFastINTELMask = 0x00010000, + FPFastMathModeAllowReassocINTELMask = 0x00020000, }; enum FPRoundingMode { @@ -368,6 +458,7 @@ enum Decoration { DecorationNonWritable = 24, DecorationNonReadable = 25, DecorationUniform = 26, + DecorationUniformId = 27, DecorationSaturatedConversion = 28, DecorationStream = 29, DecorationLocation = 30, @@ -388,14 +479,62 @@ enum Decoration { DecorationMaxByteOffset = 45, DecorationAlignmentId = 46, DecorationMaxByteOffsetId = 47, + DecorationNoSignedWrap = 4469, + DecorationNoUnsignedWrap = 4470, DecorationExplicitInterpAMD = 4999, DecorationOverrideCoverageNV = 5248, DecorationPassthroughNV = 5250, DecorationViewportRelativeNV = 5252, DecorationSecondaryViewportRelativeNV = 5256, + DecorationPerPrimitiveNV = 5271, + DecorationPerViewNV = 5272, + DecorationPerTaskNV = 5273, + DecorationPerVertexNV = 5285, + DecorationNonUniform = 5300, DecorationNonUniformEXT = 5300, + DecorationRestrictPointer = 5355, + DecorationRestrictPointerEXT = 5355, + DecorationAliasedPointer = 5356, + DecorationAliasedPointerEXT = 5356, + DecorationSIMTCallINTEL = 5599, + DecorationReferencedIndirectlyINTEL = 5602, + DecorationClobberINTEL = 5607, + DecorationSideEffectsINTEL = 5608, + DecorationVectorComputeVariableINTEL = 5624, + DecorationFuncParamIOKindINTEL = 5625, + DecorationVectorComputeFunctionINTEL = 5626, + DecorationStackCallINTEL = 5627, + DecorationGlobalVariableOffsetINTEL = 5628, + DecorationCounterBuffer = 5634, DecorationHlslCounterBufferGOOGLE = 5634, DecorationHlslSemanticGOOGLE = 5635, + DecorationUserSemantic = 5635, + DecorationUserTypeGOOGLE = 5636, + DecorationFunctionRoundingModeINTEL = 5822, + DecorationFunctionDenormModeINTEL = 5823, + DecorationRegisterINTEL = 5825, + DecorationMemoryINTEL = 5826, + DecorationNumbanksINTEL = 5827, + DecorationBankwidthINTEL = 5828, + DecorationMaxPrivateCopiesINTEL = 5829, + DecorationSinglepumpINTEL = 5830, + DecorationDoublepumpINTEL = 5831, + DecorationMaxReplicatesINTEL = 5832, + DecorationSimpleDualPortINTEL = 5833, + DecorationMergeINTEL = 5834, + DecorationBankBitsINTEL = 5835, + DecorationForcePow2DepthINTEL = 5836, + DecorationBurstCoalesceINTEL = 5899, + DecorationCacheSizeINTEL = 5900, + DecorationDontStaticallyCoalesceINTEL = 5901, + DecorationPrefetchINTEL = 5902, + DecorationStallEnableINTEL = 5905, + DecorationFuseLoopsInFunctionINTEL = 5907, + DecorationBufferLocationINTEL = 5921, + DecorationIOPipeStorageINTEL = 5944, + DecorationFunctionFloatingPointModeINTEL = 6080, + DecorationSingleElementVectorINTEL = 6085, + DecorationVectorComputeCallableFunctionINTEL = 6087, DecorationMax = 0x7fffffff, }; @@ -454,8 +593,10 @@ enum BuiltIn { BuiltInBaseVertex = 4424, BuiltInBaseInstance = 4425, BuiltInDrawIndex = 4426, + BuiltInPrimitiveShadingRateKHR = 4432, BuiltInDeviceIndex = 4438, BuiltInViewIndex = 4440, + BuiltInShadingRateKHR = 4444, BuiltInBaryCoordNoPerspAMD = 4992, BuiltInBaryCoordNoPerspCentroidAMD = 4993, BuiltInBaryCoordNoPerspSampleAMD = 4994, @@ -470,6 +611,52 @@ enum BuiltIn { BuiltInPositionPerViewNV = 5261, BuiltInViewportMaskPerViewNV = 5262, BuiltInFullyCoveredEXT = 5264, + BuiltInTaskCountNV = 5274, + BuiltInPrimitiveCountNV = 5275, + BuiltInPrimitiveIndicesNV = 5276, + BuiltInClipDistancePerViewNV = 5277, + BuiltInCullDistancePerViewNV = 5278, + BuiltInLayerPerViewNV = 5279, + BuiltInMeshViewCountNV = 5280, + BuiltInMeshViewIndicesNV = 5281, + BuiltInBaryCoordNV = 5286, + BuiltInBaryCoordNoPerspNV = 5287, + BuiltInFragSizeEXT = 5292, + BuiltInFragmentSizeNV = 5292, + BuiltInFragInvocationCountEXT = 5293, + BuiltInInvocationsPerPixelNV = 5293, + BuiltInLaunchIdKHR = 5319, + BuiltInLaunchIdNV = 5319, + BuiltInLaunchSizeKHR = 5320, + BuiltInLaunchSizeNV = 5320, + BuiltInWorldRayOriginKHR = 5321, + BuiltInWorldRayOriginNV = 5321, + BuiltInWorldRayDirectionKHR = 5322, + BuiltInWorldRayDirectionNV = 5322, + BuiltInObjectRayOriginKHR = 5323, + BuiltInObjectRayOriginNV = 5323, + BuiltInObjectRayDirectionKHR = 5324, + BuiltInObjectRayDirectionNV = 5324, + BuiltInRayTminKHR = 5325, + BuiltInRayTminNV = 5325, + BuiltInRayTmaxKHR = 5326, + BuiltInRayTmaxNV = 5326, + BuiltInInstanceCustomIndexKHR = 5327, + BuiltInInstanceCustomIndexNV = 5327, + BuiltInObjectToWorldKHR = 5330, + BuiltInObjectToWorldNV = 5330, + BuiltInWorldToObjectKHR = 5331, + BuiltInWorldToObjectNV = 5331, + BuiltInHitTNV = 5332, + BuiltInHitKindKHR = 5333, + BuiltInHitKindNV = 5333, + BuiltInIncomingRayFlagsKHR = 5351, + BuiltInIncomingRayFlagsNV = 5351, + BuiltInRayGeometryIndexKHR = 5352, + BuiltInWarpsPerSMNV = 5374, + BuiltInSMCountNV = 5375, + BuiltInWarpIDNV = 5376, + BuiltInSMIDNV = 5377, BuiltInMax = 0x7fffffff, }; @@ -490,6 +677,19 @@ enum LoopControlShift { LoopControlDontUnrollShift = 1, LoopControlDependencyInfiniteShift = 2, LoopControlDependencyLengthShift = 3, + LoopControlMinIterationsShift = 4, + LoopControlMaxIterationsShift = 5, + LoopControlIterationMultipleShift = 6, + LoopControlPeelCountShift = 7, + LoopControlPartialCountShift = 8, + LoopControlInitiationIntervalINTELShift = 16, + LoopControlMaxConcurrencyINTELShift = 17, + LoopControlDependencyArrayINTELShift = 18, + LoopControlPipelineEnableINTELShift = 19, + LoopControlLoopCoalesceINTELShift = 20, + LoopControlMaxInterleavingINTELShift = 21, + LoopControlSpeculatedIterationsINTELShift = 22, + LoopControlNoFusionINTELShift = 23, LoopControlMax = 0x7fffffff, }; @@ -499,6 +699,19 @@ enum LoopControlMask { LoopControlDontUnrollMask = 0x00000002, LoopControlDependencyInfiniteMask = 0x00000004, LoopControlDependencyLengthMask = 0x00000008, + LoopControlMinIterationsMask = 0x00000010, + LoopControlMaxIterationsMask = 0x00000020, + LoopControlIterationMultipleMask = 0x00000040, + LoopControlPeelCountMask = 0x00000080, + LoopControlPartialCountMask = 0x00000100, + LoopControlInitiationIntervalINTELMask = 0x00010000, + LoopControlMaxConcurrencyINTELMask = 0x00020000, + LoopControlDependencyArrayINTELMask = 0x00040000, + LoopControlPipelineEnableINTELMask = 0x00080000, + LoopControlLoopCoalesceINTELMask = 0x00100000, + LoopControlMaxInterleavingINTELMask = 0x00200000, + LoopControlSpeculatedIterationsINTELMask = 0x00400000, + LoopControlNoFusionINTELMask = 0x00800000, }; enum FunctionControlShift { @@ -528,6 +741,13 @@ enum MemorySemanticsShift { MemorySemanticsCrossWorkgroupMemoryShift = 9, MemorySemanticsAtomicCounterMemoryShift = 10, MemorySemanticsImageMemoryShift = 11, + MemorySemanticsOutputMemoryShift = 12, + MemorySemanticsOutputMemoryKHRShift = 12, + MemorySemanticsMakeAvailableShift = 13, + MemorySemanticsMakeAvailableKHRShift = 13, + MemorySemanticsMakeVisibleShift = 14, + MemorySemanticsMakeVisibleKHRShift = 14, + MemorySemanticsVolatileShift = 15, MemorySemanticsMax = 0x7fffffff, }; @@ -543,12 +763,25 @@ enum MemorySemanticsMask { MemorySemanticsCrossWorkgroupMemoryMask = 0x00000200, MemorySemanticsAtomicCounterMemoryMask = 0x00000400, MemorySemanticsImageMemoryMask = 0x00000800, + MemorySemanticsOutputMemoryMask = 0x00001000, + MemorySemanticsOutputMemoryKHRMask = 0x00001000, + MemorySemanticsMakeAvailableMask = 0x00002000, + MemorySemanticsMakeAvailableKHRMask = 0x00002000, + MemorySemanticsMakeVisibleMask = 0x00004000, + MemorySemanticsMakeVisibleKHRMask = 0x00004000, + MemorySemanticsVolatileMask = 0x00008000, }; enum MemoryAccessShift { MemoryAccessVolatileShift = 0, MemoryAccessAlignedShift = 1, MemoryAccessNontemporalShift = 2, + MemoryAccessMakePointerAvailableShift = 3, + MemoryAccessMakePointerAvailableKHRShift = 3, + MemoryAccessMakePointerVisibleShift = 4, + MemoryAccessMakePointerVisibleKHRShift = 4, + MemoryAccessNonPrivatePointerShift = 5, + MemoryAccessNonPrivatePointerKHRShift = 5, MemoryAccessMax = 0x7fffffff, }; @@ -557,6 +790,12 @@ enum MemoryAccessMask { MemoryAccessVolatileMask = 0x00000001, MemoryAccessAlignedMask = 0x00000002, MemoryAccessNontemporalMask = 0x00000004, + MemoryAccessMakePointerAvailableMask = 0x00000008, + MemoryAccessMakePointerAvailableKHRMask = 0x00000008, + MemoryAccessMakePointerVisibleMask = 0x00000010, + MemoryAccessMakePointerVisibleKHRMask = 0x00000010, + MemoryAccessNonPrivatePointerMask = 0x00000020, + MemoryAccessNonPrivatePointerKHRMask = 0x00000020, }; enum Scope { @@ -565,6 +804,9 @@ enum Scope { ScopeWorkgroup = 2, ScopeSubgroup = 3, ScopeInvocation = 4, + ScopeQueueFamily = 5, + ScopeQueueFamilyKHR = 5, + ScopeShaderCallKHR = 6, ScopeMax = 0x7fffffff, }; @@ -664,8 +906,14 @@ enum Capability { CapabilityGroupNonUniformShuffleRelative = 66, CapabilityGroupNonUniformClustered = 67, CapabilityGroupNonUniformQuad = 68, + CapabilityShaderLayer = 69, + CapabilityShaderViewportIndex = 70, + CapabilityFragmentShadingRateKHR = 4422, CapabilitySubgroupBallotKHR = 4423, CapabilityDrawParameters = 4427, + CapabilityWorkgroupMemoryExplicitLayoutKHR = 4428, + CapabilityWorkgroupMemoryExplicitLayout8BitAccessKHR = 4429, + CapabilityWorkgroupMemoryExplicitLayout16BitAccessKHR = 4430, CapabilitySubgroupVoteKHR = 4431, CapabilityStorageBuffer16BitAccess = 4433, CapabilityStorageUniformBufferBlock16 = 4433, @@ -682,11 +930,22 @@ enum Capability { CapabilityStorageBuffer8BitAccess = 4448, CapabilityUniformAndStorageBuffer8BitAccess = 4449, CapabilityStoragePushConstant8 = 4450, + CapabilityDenormPreserve = 4464, + CapabilityDenormFlushToZero = 4465, + CapabilitySignedZeroInfNanPreserve = 4466, + CapabilityRoundingModeRTE = 4467, + CapabilityRoundingModeRTZ = 4468, + CapabilityRayQueryProvisionalKHR = 4471, + CapabilityRayQueryKHR = 4472, + CapabilityRayTraversalPrimitiveCullingKHR = 4478, + CapabilityRayTracingKHR = 4479, CapabilityFloat16ImageAMD = 5008, CapabilityImageGatherBiasLodAMD = 5009, CapabilityFragmentMaskAMD = 5010, CapabilityStencilExportEXT = 5013, CapabilityImageReadWriteLodAMD = 5015, + CapabilityInt64ImageEXT = 5016, + CapabilityShaderClockKHR = 5055, CapabilitySampleMaskOverrideCoverageNV = 5249, CapabilityGeometryShaderPassthroughNV = 5251, CapabilityShaderViewportIndexLayerEXT = 5254, @@ -695,25 +954,168 @@ enum Capability { CapabilityShaderStereoViewNV = 5259, CapabilityPerViewAttributesNV = 5260, CapabilityFragmentFullyCoveredEXT = 5265, + CapabilityMeshShadingNV = 5266, + CapabilityImageFootprintNV = 5282, + CapabilityFragmentBarycentricNV = 5284, + CapabilityComputeDerivativeGroupQuadsNV = 5288, + CapabilityFragmentDensityEXT = 5291, + CapabilityShadingRateNV = 5291, CapabilityGroupNonUniformPartitionedNV = 5297, + CapabilityShaderNonUniform = 5301, CapabilityShaderNonUniformEXT = 5301, + CapabilityRuntimeDescriptorArray = 5302, CapabilityRuntimeDescriptorArrayEXT = 5302, + CapabilityInputAttachmentArrayDynamicIndexing = 5303, CapabilityInputAttachmentArrayDynamicIndexingEXT = 5303, + CapabilityUniformTexelBufferArrayDynamicIndexing = 5304, CapabilityUniformTexelBufferArrayDynamicIndexingEXT = 5304, + CapabilityStorageTexelBufferArrayDynamicIndexing = 5305, CapabilityStorageTexelBufferArrayDynamicIndexingEXT = 5305, + CapabilityUniformBufferArrayNonUniformIndexing = 5306, CapabilityUniformBufferArrayNonUniformIndexingEXT = 5306, + CapabilitySampledImageArrayNonUniformIndexing = 5307, CapabilitySampledImageArrayNonUniformIndexingEXT = 5307, + CapabilityStorageBufferArrayNonUniformIndexing = 5308, CapabilityStorageBufferArrayNonUniformIndexingEXT = 5308, + CapabilityStorageImageArrayNonUniformIndexing = 5309, CapabilityStorageImageArrayNonUniformIndexingEXT = 5309, + CapabilityInputAttachmentArrayNonUniformIndexing = 5310, CapabilityInputAttachmentArrayNonUniformIndexingEXT = 5310, + CapabilityUniformTexelBufferArrayNonUniformIndexing = 5311, CapabilityUniformTexelBufferArrayNonUniformIndexingEXT = 5311, + CapabilityStorageTexelBufferArrayNonUniformIndexing = 5312, CapabilityStorageTexelBufferArrayNonUniformIndexingEXT = 5312, + CapabilityRayTracingNV = 5340, + CapabilityVulkanMemoryModel = 5345, + CapabilityVulkanMemoryModelKHR = 5345, + CapabilityVulkanMemoryModelDeviceScope = 5346, + CapabilityVulkanMemoryModelDeviceScopeKHR = 5346, + CapabilityPhysicalStorageBufferAddresses = 5347, + CapabilityPhysicalStorageBufferAddressesEXT = 5347, + CapabilityComputeDerivativeGroupLinearNV = 5350, + CapabilityRayTracingProvisionalKHR = 5353, + CapabilityCooperativeMatrixNV = 5357, + CapabilityFragmentShaderSampleInterlockEXT = 5363, + CapabilityFragmentShaderShadingRateInterlockEXT = 5372, + CapabilityShaderSMBuiltinsNV = 5373, + CapabilityFragmentShaderPixelInterlockEXT = 5378, + CapabilityDemoteToHelperInvocationEXT = 5379, CapabilitySubgroupShuffleINTEL = 5568, CapabilitySubgroupBufferBlockIOINTEL = 5569, CapabilitySubgroupImageBlockIOINTEL = 5570, + CapabilitySubgroupImageMediaBlockIOINTEL = 5579, + CapabilityRoundToInfinityINTEL = 5582, + CapabilityFloatingPointModeINTEL = 5583, + CapabilityIntegerFunctions2INTEL = 5584, + CapabilityFunctionPointersINTEL = 5603, + CapabilityIndirectReferencesINTEL = 5604, + CapabilityAsmINTEL = 5606, + CapabilityAtomicFloat32MinMaxEXT = 5612, + CapabilityAtomicFloat64MinMaxEXT = 5613, + CapabilityAtomicFloat16MinMaxEXT = 5616, + CapabilityVectorComputeINTEL = 5617, + CapabilityVectorAnyINTEL = 5619, + CapabilitySubgroupAvcMotionEstimationINTEL = 5696, + CapabilitySubgroupAvcMotionEstimationIntraINTEL = 5697, + CapabilitySubgroupAvcMotionEstimationChromaINTEL = 5698, + CapabilityVariableLengthArrayINTEL = 5817, + CapabilityFunctionFloatControlINTEL = 5821, + CapabilityFPGAMemoryAttributesINTEL = 5824, + CapabilityFPFastMathModeINTEL = 5837, + CapabilityArbitraryPrecisionIntegersINTEL = 5844, + CapabilityUnstructuredLoopControlsINTEL = 5886, + CapabilityFPGALoopControlsINTEL = 5888, + CapabilityKernelAttributesINTEL = 5892, + CapabilityFPGAKernelAttributesINTEL = 5897, + CapabilityFPGAMemoryAccessesINTEL = 5898, + CapabilityFPGAClusterAttributesINTEL = 5904, + CapabilityLoopFuseINTEL = 5906, + CapabilityFPGABufferLocationINTEL = 5920, + CapabilityUSMStorageClassesINTEL = 5935, + CapabilityIOPipesINTEL = 5943, + CapabilityBlockingPipesINTEL = 5945, + CapabilityFPGARegINTEL = 5948, + CapabilityAtomicFloat32AddEXT = 6033, + CapabilityAtomicFloat64AddEXT = 6034, + CapabilityLongConstantCompositeINTEL = 6089, CapabilityMax = 0x7fffffff, }; +enum RayFlagsShift { + RayFlagsOpaqueKHRShift = 0, + RayFlagsNoOpaqueKHRShift = 1, + RayFlagsTerminateOnFirstHitKHRShift = 2, + RayFlagsSkipClosestHitShaderKHRShift = 3, + RayFlagsCullBackFacingTrianglesKHRShift = 4, + RayFlagsCullFrontFacingTrianglesKHRShift = 5, + RayFlagsCullOpaqueKHRShift = 6, + RayFlagsCullNoOpaqueKHRShift = 7, + RayFlagsSkipTrianglesKHRShift = 8, + RayFlagsSkipAABBsKHRShift = 9, + RayFlagsMax = 0x7fffffff, +}; + +enum RayFlagsMask { + RayFlagsMaskNone = 0, + RayFlagsOpaqueKHRMask = 0x00000001, + RayFlagsNoOpaqueKHRMask = 0x00000002, + RayFlagsTerminateOnFirstHitKHRMask = 0x00000004, + RayFlagsSkipClosestHitShaderKHRMask = 0x00000008, + RayFlagsCullBackFacingTrianglesKHRMask = 0x00000010, + RayFlagsCullFrontFacingTrianglesKHRMask = 0x00000020, + RayFlagsCullOpaqueKHRMask = 0x00000040, + RayFlagsCullNoOpaqueKHRMask = 0x00000080, + RayFlagsSkipTrianglesKHRMask = 0x00000100, + RayFlagsSkipAABBsKHRMask = 0x00000200, +}; + +enum RayQueryIntersection { + RayQueryIntersectionRayQueryCandidateIntersectionKHR = 0, + RayQueryIntersectionRayQueryCommittedIntersectionKHR = 1, + RayQueryIntersectionMax = 0x7fffffff, +}; + +enum RayQueryCommittedIntersectionType { + RayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionNoneKHR = 0, + RayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionTriangleKHR = 1, + RayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionGeneratedKHR = 2, + RayQueryCommittedIntersectionTypeMax = 0x7fffffff, +}; + +enum RayQueryCandidateIntersectionType { + RayQueryCandidateIntersectionTypeRayQueryCandidateIntersectionTriangleKHR = 0, + RayQueryCandidateIntersectionTypeRayQueryCandidateIntersectionAABBKHR = 1, + RayQueryCandidateIntersectionTypeMax = 0x7fffffff, +}; + +enum FragmentShadingRateShift { + FragmentShadingRateVertical2PixelsShift = 0, + FragmentShadingRateVertical4PixelsShift = 1, + FragmentShadingRateHorizontal2PixelsShift = 2, + FragmentShadingRateHorizontal4PixelsShift = 3, + FragmentShadingRateMax = 0x7fffffff, +}; + +enum FragmentShadingRateMask { + FragmentShadingRateMaskNone = 0, + FragmentShadingRateVertical2PixelsMask = 0x00000001, + FragmentShadingRateVertical4PixelsMask = 0x00000002, + FragmentShadingRateHorizontal2PixelsMask = 0x00000004, + FragmentShadingRateHorizontal4PixelsMask = 0x00000008, +}; + +enum FPDenormMode { + FPDenormModePreserve = 0, + FPDenormModeFlushToZero = 1, + FPDenormModeMax = 0x7fffffff, +}; + +enum FPOperationMode { + FPOperationModeIEEE = 0, + FPOperationModeALT = 1, + FPOperationModeMax = 0x7fffffff, +}; + enum Op { OpNop = 0, OpUndef = 1, @@ -1055,12 +1457,29 @@ enum Op { OpGroupNonUniformLogicalXor = 364, OpGroupNonUniformQuadBroadcast = 365, OpGroupNonUniformQuadSwap = 366, + OpCopyLogical = 400, + OpPtrEqual = 401, + OpPtrNotEqual = 402, + OpPtrDiff = 403, + OpTerminateInvocation = 4416, OpSubgroupBallotKHR = 4421, OpSubgroupFirstInvocationKHR = 4422, OpSubgroupAllKHR = 4428, OpSubgroupAnyKHR = 4429, OpSubgroupAllEqualKHR = 4430, OpSubgroupReadInvocationKHR = 4432, + OpTraceRayKHR = 4445, + OpExecuteCallableKHR = 4446, + OpConvertUToAccelerationStructureKHR = 4447, + OpIgnoreIntersectionKHR = 4448, + OpTerminateRayKHR = 4449, + OpTypeRayQueryKHR = 4472, + OpRayQueryInitializeKHR = 4473, + OpRayQueryTerminateKHR = 4474, + OpRayQueryGenerateIntersectionKHR = 4475, + OpRayQueryConfirmIntersectionKHR = 4476, + OpRayQueryProceedKHR = 4477, + OpRayQueryGetIntersectionTypeKHR = 4479, OpGroupIAddNonUniformAMD = 5000, OpGroupFAddNonUniformAMD = 5001, OpGroupFMinNonUniformAMD = 5002, @@ -1071,7 +1490,27 @@ enum Op { OpGroupSMaxNonUniformAMD = 5007, OpFragmentMaskFetchAMD = 5011, OpFragmentFetchAMD = 5012, + OpReadClockKHR = 5056, + OpImageSampleFootprintNV = 5283, OpGroupNonUniformPartitionNV = 5296, + OpWritePackedPrimitiveIndices4x8NV = 5299, + OpReportIntersectionKHR = 5334, + OpReportIntersectionNV = 5334, + OpIgnoreIntersectionNV = 5335, + OpTerminateRayNV = 5336, + OpTraceNV = 5337, + OpTypeAccelerationStructureKHR = 5341, + OpTypeAccelerationStructureNV = 5341, + OpExecuteCallableNV = 5344, + OpTypeCooperativeMatrixNV = 5358, + OpCooperativeMatrixLoadNV = 5359, + OpCooperativeMatrixStoreNV = 5360, + OpCooperativeMatrixMulAddNV = 5361, + OpCooperativeMatrixLengthNV = 5362, + OpBeginInvocationInterlockEXT = 5364, + OpEndInvocationInterlockEXT = 5365, + OpDemoteToHelperInvocationEXT = 5380, + OpIsHelperInvocationEXT = 5381, OpSubgroupShuffleINTEL = 5571, OpSubgroupShuffleDownINTEL = 5572, OpSubgroupShuffleUpINTEL = 5573, @@ -1080,11 +1519,768 @@ enum Op { OpSubgroupBlockWriteINTEL = 5576, OpSubgroupImageBlockReadINTEL = 5577, OpSubgroupImageBlockWriteINTEL = 5578, + OpSubgroupImageMediaBlockReadINTEL = 5580, + OpSubgroupImageMediaBlockWriteINTEL = 5581, + OpUCountLeadingZerosINTEL = 5585, + OpUCountTrailingZerosINTEL = 5586, + OpAbsISubINTEL = 5587, + OpAbsUSubINTEL = 5588, + OpIAddSatINTEL = 5589, + OpUAddSatINTEL = 5590, + OpIAverageINTEL = 5591, + OpUAverageINTEL = 5592, + OpIAverageRoundedINTEL = 5593, + OpUAverageRoundedINTEL = 5594, + OpISubSatINTEL = 5595, + OpUSubSatINTEL = 5596, + OpIMul32x16INTEL = 5597, + OpUMul32x16INTEL = 5598, + OpConstFunctionPointerINTEL = 5600, + OpFunctionPointerCallINTEL = 5601, + OpAsmTargetINTEL = 5609, + OpAsmINTEL = 5610, + OpAsmCallINTEL = 5611, + OpAtomicFMinEXT = 5614, + OpAtomicFMaxEXT = 5615, + OpDecorateString = 5632, OpDecorateStringGOOGLE = 5632, + OpMemberDecorateString = 5633, OpMemberDecorateStringGOOGLE = 5633, + OpVmeImageINTEL = 5699, + OpTypeVmeImageINTEL = 5700, + OpTypeAvcImePayloadINTEL = 5701, + OpTypeAvcRefPayloadINTEL = 5702, + OpTypeAvcSicPayloadINTEL = 5703, + OpTypeAvcMcePayloadINTEL = 5704, + OpTypeAvcMceResultINTEL = 5705, + OpTypeAvcImeResultINTEL = 5706, + OpTypeAvcImeResultSingleReferenceStreamoutINTEL = 5707, + OpTypeAvcImeResultDualReferenceStreamoutINTEL = 5708, + OpTypeAvcImeSingleReferenceStreaminINTEL = 5709, + OpTypeAvcImeDualReferenceStreaminINTEL = 5710, + OpTypeAvcRefResultINTEL = 5711, + OpTypeAvcSicResultINTEL = 5712, + OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL = 5713, + OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL = 5714, + OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL = 5715, + OpSubgroupAvcMceSetInterShapePenaltyINTEL = 5716, + OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL = 5717, + OpSubgroupAvcMceSetInterDirectionPenaltyINTEL = 5718, + OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL = 5719, + OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL = 5720, + OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL = 5721, + OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL = 5722, + OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL = 5723, + OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL = 5724, + OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL = 5725, + OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL = 5726, + OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL = 5727, + OpSubgroupAvcMceSetAcOnlyHaarINTEL = 5728, + OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL = 5729, + OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL = 5730, + OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL = 5731, + OpSubgroupAvcMceConvertToImePayloadINTEL = 5732, + OpSubgroupAvcMceConvertToImeResultINTEL = 5733, + OpSubgroupAvcMceConvertToRefPayloadINTEL = 5734, + OpSubgroupAvcMceConvertToRefResultINTEL = 5735, + OpSubgroupAvcMceConvertToSicPayloadINTEL = 5736, + OpSubgroupAvcMceConvertToSicResultINTEL = 5737, + OpSubgroupAvcMceGetMotionVectorsINTEL = 5738, + OpSubgroupAvcMceGetInterDistortionsINTEL = 5739, + OpSubgroupAvcMceGetBestInterDistortionsINTEL = 5740, + OpSubgroupAvcMceGetInterMajorShapeINTEL = 5741, + OpSubgroupAvcMceGetInterMinorShapeINTEL = 5742, + OpSubgroupAvcMceGetInterDirectionsINTEL = 5743, + OpSubgroupAvcMceGetInterMotionVectorCountINTEL = 5744, + OpSubgroupAvcMceGetInterReferenceIdsINTEL = 5745, + OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL = 5746, + OpSubgroupAvcImeInitializeINTEL = 5747, + OpSubgroupAvcImeSetSingleReferenceINTEL = 5748, + OpSubgroupAvcImeSetDualReferenceINTEL = 5749, + OpSubgroupAvcImeRefWindowSizeINTEL = 5750, + OpSubgroupAvcImeAdjustRefOffsetINTEL = 5751, + OpSubgroupAvcImeConvertToMcePayloadINTEL = 5752, + OpSubgroupAvcImeSetMaxMotionVectorCountINTEL = 5753, + OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL = 5754, + OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL = 5755, + OpSubgroupAvcImeSetWeightedSadINTEL = 5756, + OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL = 5757, + OpSubgroupAvcImeEvaluateWithDualReferenceINTEL = 5758, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL = 5759, + OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL = 5760, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL = 5761, + OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL = 5762, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL = 5763, + OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL = 5764, + OpSubgroupAvcImeConvertToMceResultINTEL = 5765, + OpSubgroupAvcImeGetSingleReferenceStreaminINTEL = 5766, + OpSubgroupAvcImeGetDualReferenceStreaminINTEL = 5767, + OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL = 5768, + OpSubgroupAvcImeStripDualReferenceStreamoutINTEL = 5769, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL = 5770, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL = 5771, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL = 5772, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL = 5773, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL = 5774, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL = 5775, + OpSubgroupAvcImeGetBorderReachedINTEL = 5776, + OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL = 5777, + OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL = 5778, + OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL = 5779, + OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL = 5780, + OpSubgroupAvcFmeInitializeINTEL = 5781, + OpSubgroupAvcBmeInitializeINTEL = 5782, + OpSubgroupAvcRefConvertToMcePayloadINTEL = 5783, + OpSubgroupAvcRefSetBidirectionalMixDisableINTEL = 5784, + OpSubgroupAvcRefSetBilinearFilterEnableINTEL = 5785, + OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL = 5786, + OpSubgroupAvcRefEvaluateWithDualReferenceINTEL = 5787, + OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL = 5788, + OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL = 5789, + OpSubgroupAvcRefConvertToMceResultINTEL = 5790, + OpSubgroupAvcSicInitializeINTEL = 5791, + OpSubgroupAvcSicConfigureSkcINTEL = 5792, + OpSubgroupAvcSicConfigureIpeLumaINTEL = 5793, + OpSubgroupAvcSicConfigureIpeLumaChromaINTEL = 5794, + OpSubgroupAvcSicGetMotionVectorMaskINTEL = 5795, + OpSubgroupAvcSicConvertToMcePayloadINTEL = 5796, + OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL = 5797, + OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL = 5798, + OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL = 5799, + OpSubgroupAvcSicSetBilinearFilterEnableINTEL = 5800, + OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL = 5801, + OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL = 5802, + OpSubgroupAvcSicEvaluateIpeINTEL = 5803, + OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL = 5804, + OpSubgroupAvcSicEvaluateWithDualReferenceINTEL = 5805, + OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL = 5806, + OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL = 5807, + OpSubgroupAvcSicConvertToMceResultINTEL = 5808, + OpSubgroupAvcSicGetIpeLumaShapeINTEL = 5809, + OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL = 5810, + OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL = 5811, + OpSubgroupAvcSicGetPackedIpeLumaModesINTEL = 5812, + OpSubgroupAvcSicGetIpeChromaModeINTEL = 5813, + OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL = 5814, + OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL = 5815, + OpSubgroupAvcSicGetInterRawSadsINTEL = 5816, + OpVariableLengthArrayINTEL = 5818, + OpSaveMemoryINTEL = 5819, + OpRestoreMemoryINTEL = 5820, + OpLoopControlINTEL = 5887, + OpPtrCastToCrossWorkgroupINTEL = 5934, + OpCrossWorkgroupCastToPtrINTEL = 5938, + OpReadPipeBlockingINTEL = 5946, + OpWritePipeBlockingINTEL = 5947, + OpFPGARegINTEL = 5949, + OpRayQueryGetRayTMinKHR = 6016, + OpRayQueryGetRayFlagsKHR = 6017, + OpRayQueryGetIntersectionTKHR = 6018, + OpRayQueryGetIntersectionInstanceCustomIndexKHR = 6019, + OpRayQueryGetIntersectionInstanceIdKHR = 6020, + OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR = 6021, + OpRayQueryGetIntersectionGeometryIndexKHR = 6022, + OpRayQueryGetIntersectionPrimitiveIndexKHR = 6023, + OpRayQueryGetIntersectionBarycentricsKHR = 6024, + OpRayQueryGetIntersectionFrontFaceKHR = 6025, + OpRayQueryGetIntersectionCandidateAABBOpaqueKHR = 6026, + OpRayQueryGetIntersectionObjectRayDirectionKHR = 6027, + OpRayQueryGetIntersectionObjectRayOriginKHR = 6028, + OpRayQueryGetWorldRayDirectionKHR = 6029, + OpRayQueryGetWorldRayOriginKHR = 6030, + OpRayQueryGetIntersectionObjectToWorldKHR = 6031, + OpRayQueryGetIntersectionWorldToObjectKHR = 6032, + OpAtomicFAddEXT = 6035, + OpTypeBufferSurfaceINTEL = 6086, + OpTypeStructContinuedINTEL = 6090, + OpConstantCompositeContinuedINTEL = 6091, + OpSpecConstantCompositeContinuedINTEL = 6092, OpMax = 0x7fffffff, }; +#ifdef SPV_ENABLE_UTILITY_CODE +inline void HasResultAndType(Op opcode, bool *hasResult, bool *hasResultType) { + *hasResult = *hasResultType = false; + switch (opcode) { + default: /* unknown opcode */ break; + case OpNop: *hasResult = false; *hasResultType = false; break; + case OpUndef: *hasResult = true; *hasResultType = true; break; + case OpSourceContinued: *hasResult = false; *hasResultType = false; break; + case OpSource: *hasResult = false; *hasResultType = false; break; + case OpSourceExtension: *hasResult = false; *hasResultType = false; break; + case OpName: *hasResult = false; *hasResultType = false; break; + case OpMemberName: *hasResult = false; *hasResultType = false; break; + case OpString: *hasResult = true; *hasResultType = false; break; + case OpLine: *hasResult = false; *hasResultType = false; break; + case OpExtension: *hasResult = false; *hasResultType = false; break; + case OpExtInstImport: *hasResult = true; *hasResultType = false; break; + case OpExtInst: *hasResult = true; *hasResultType = true; break; + case OpMemoryModel: *hasResult = false; *hasResultType = false; break; + case OpEntryPoint: *hasResult = false; *hasResultType = false; break; + case OpExecutionMode: *hasResult = false; *hasResultType = false; break; + case OpCapability: *hasResult = false; *hasResultType = false; break; + case OpTypeVoid: *hasResult = true; *hasResultType = false; break; + case OpTypeBool: *hasResult = true; *hasResultType = false; break; + case OpTypeInt: *hasResult = true; *hasResultType = false; break; + case OpTypeFloat: *hasResult = true; *hasResultType = false; break; + case OpTypeVector: *hasResult = true; *hasResultType = false; break; + case OpTypeMatrix: *hasResult = true; *hasResultType = false; break; + case OpTypeImage: *hasResult = true; *hasResultType = false; break; + case OpTypeSampler: *hasResult = true; *hasResultType = false; break; + case OpTypeSampledImage: *hasResult = true; *hasResultType = false; break; + case OpTypeArray: *hasResult = true; *hasResultType = false; break; + case OpTypeRuntimeArray: *hasResult = true; *hasResultType = false; break; + case OpTypeStruct: *hasResult = true; *hasResultType = false; break; + case OpTypeOpaque: *hasResult = true; *hasResultType = false; break; + case OpTypePointer: *hasResult = true; *hasResultType = false; break; + case OpTypeFunction: *hasResult = true; *hasResultType = false; break; + case OpTypeEvent: *hasResult = true; *hasResultType = false; break; + case OpTypeDeviceEvent: *hasResult = true; *hasResultType = false; break; + case OpTypeReserveId: *hasResult = true; *hasResultType = false; break; + case OpTypeQueue: *hasResult = true; *hasResultType = false; break; + case OpTypePipe: *hasResult = true; *hasResultType = false; break; + case OpTypeForwardPointer: *hasResult = false; *hasResultType = false; break; + case OpConstantTrue: *hasResult = true; *hasResultType = true; break; + case OpConstantFalse: *hasResult = true; *hasResultType = true; break; + case OpConstant: *hasResult = true; *hasResultType = true; break; + case OpConstantComposite: *hasResult = true; *hasResultType = true; break; + case OpConstantSampler: *hasResult = true; *hasResultType = true; break; + case OpConstantNull: *hasResult = true; *hasResultType = true; break; + case OpSpecConstantTrue: *hasResult = true; *hasResultType = true; break; + case OpSpecConstantFalse: *hasResult = true; *hasResultType = true; break; + case OpSpecConstant: *hasResult = true; *hasResultType = true; break; + case OpSpecConstantComposite: *hasResult = true; *hasResultType = true; break; + case OpSpecConstantOp: *hasResult = true; *hasResultType = true; break; + case OpFunction: *hasResult = true; *hasResultType = true; break; + case OpFunctionParameter: *hasResult = true; *hasResultType = true; break; + case OpFunctionEnd: *hasResult = false; *hasResultType = false; break; + case OpFunctionCall: *hasResult = true; *hasResultType = true; break; + case OpVariable: *hasResult = true; *hasResultType = true; break; + case OpImageTexelPointer: *hasResult = true; *hasResultType = true; break; + case OpLoad: *hasResult = true; *hasResultType = true; break; + case OpStore: *hasResult = false; *hasResultType = false; break; + case OpCopyMemory: *hasResult = false; *hasResultType = false; break; + case OpCopyMemorySized: *hasResult = false; *hasResultType = false; break; + case OpAccessChain: *hasResult = true; *hasResultType = true; break; + case OpInBoundsAccessChain: *hasResult = true; *hasResultType = true; break; + case OpPtrAccessChain: *hasResult = true; *hasResultType = true; break; + case OpArrayLength: *hasResult = true; *hasResultType = true; break; + case OpGenericPtrMemSemantics: *hasResult = true; *hasResultType = true; break; + case OpInBoundsPtrAccessChain: *hasResult = true; *hasResultType = true; break; + case OpDecorate: *hasResult = false; *hasResultType = false; break; + case OpMemberDecorate: *hasResult = false; *hasResultType = false; break; + case OpDecorationGroup: *hasResult = true; *hasResultType = false; break; + case OpGroupDecorate: *hasResult = false; *hasResultType = false; break; + case OpGroupMemberDecorate: *hasResult = false; *hasResultType = false; break; + case OpVectorExtractDynamic: *hasResult = true; *hasResultType = true; break; + case OpVectorInsertDynamic: *hasResult = true; *hasResultType = true; break; + case OpVectorShuffle: *hasResult = true; *hasResultType = true; break; + case OpCompositeConstruct: *hasResult = true; *hasResultType = true; break; + case OpCompositeExtract: *hasResult = true; *hasResultType = true; break; + case OpCompositeInsert: *hasResult = true; *hasResultType = true; break; + case OpCopyObject: *hasResult = true; *hasResultType = true; break; + case OpTranspose: *hasResult = true; *hasResultType = true; break; + case OpSampledImage: *hasResult = true; *hasResultType = true; break; + case OpImageSampleImplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSampleExplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSampleDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSampleDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSampleProjImplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSampleProjExplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSampleProjDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSampleProjDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageFetch: *hasResult = true; *hasResultType = true; break; + case OpImageGather: *hasResult = true; *hasResultType = true; break; + case OpImageDrefGather: *hasResult = true; *hasResultType = true; break; + case OpImageRead: *hasResult = true; *hasResultType = true; break; + case OpImageWrite: *hasResult = false; *hasResultType = false; break; + case OpImage: *hasResult = true; *hasResultType = true; break; + case OpImageQueryFormat: *hasResult = true; *hasResultType = true; break; + case OpImageQueryOrder: *hasResult = true; *hasResultType = true; break; + case OpImageQuerySizeLod: *hasResult = true; *hasResultType = true; break; + case OpImageQuerySize: *hasResult = true; *hasResultType = true; break; + case OpImageQueryLod: *hasResult = true; *hasResultType = true; break; + case OpImageQueryLevels: *hasResult = true; *hasResultType = true; break; + case OpImageQuerySamples: *hasResult = true; *hasResultType = true; break; + case OpConvertFToU: *hasResult = true; *hasResultType = true; break; + case OpConvertFToS: *hasResult = true; *hasResultType = true; break; + case OpConvertSToF: *hasResult = true; *hasResultType = true; break; + case OpConvertUToF: *hasResult = true; *hasResultType = true; break; + case OpUConvert: *hasResult = true; *hasResultType = true; break; + case OpSConvert: *hasResult = true; *hasResultType = true; break; + case OpFConvert: *hasResult = true; *hasResultType = true; break; + case OpQuantizeToF16: *hasResult = true; *hasResultType = true; break; + case OpConvertPtrToU: *hasResult = true; *hasResultType = true; break; + case OpSatConvertSToU: *hasResult = true; *hasResultType = true; break; + case OpSatConvertUToS: *hasResult = true; *hasResultType = true; break; + case OpConvertUToPtr: *hasResult = true; *hasResultType = true; break; + case OpPtrCastToGeneric: *hasResult = true; *hasResultType = true; break; + case OpGenericCastToPtr: *hasResult = true; *hasResultType = true; break; + case OpGenericCastToPtrExplicit: *hasResult = true; *hasResultType = true; break; + case OpBitcast: *hasResult = true; *hasResultType = true; break; + case OpSNegate: *hasResult = true; *hasResultType = true; break; + case OpFNegate: *hasResult = true; *hasResultType = true; break; + case OpIAdd: *hasResult = true; *hasResultType = true; break; + case OpFAdd: *hasResult = true; *hasResultType = true; break; + case OpISub: *hasResult = true; *hasResultType = true; break; + case OpFSub: *hasResult = true; *hasResultType = true; break; + case OpIMul: *hasResult = true; *hasResultType = true; break; + case OpFMul: *hasResult = true; *hasResultType = true; break; + case OpUDiv: *hasResult = true; *hasResultType = true; break; + case OpSDiv: *hasResult = true; *hasResultType = true; break; + case OpFDiv: *hasResult = true; *hasResultType = true; break; + case OpUMod: *hasResult = true; *hasResultType = true; break; + case OpSRem: *hasResult = true; *hasResultType = true; break; + case OpSMod: *hasResult = true; *hasResultType = true; break; + case OpFRem: *hasResult = true; *hasResultType = true; break; + case OpFMod: *hasResult = true; *hasResultType = true; break; + case OpVectorTimesScalar: *hasResult = true; *hasResultType = true; break; + case OpMatrixTimesScalar: *hasResult = true; *hasResultType = true; break; + case OpVectorTimesMatrix: *hasResult = true; *hasResultType = true; break; + case OpMatrixTimesVector: *hasResult = true; *hasResultType = true; break; + case OpMatrixTimesMatrix: *hasResult = true; *hasResultType = true; break; + case OpOuterProduct: *hasResult = true; *hasResultType = true; break; + case OpDot: *hasResult = true; *hasResultType = true; break; + case OpIAddCarry: *hasResult = true; *hasResultType = true; break; + case OpISubBorrow: *hasResult = true; *hasResultType = true; break; + case OpUMulExtended: *hasResult = true; *hasResultType = true; break; + case OpSMulExtended: *hasResult = true; *hasResultType = true; break; + case OpAny: *hasResult = true; *hasResultType = true; break; + case OpAll: *hasResult = true; *hasResultType = true; break; + case OpIsNan: *hasResult = true; *hasResultType = true; break; + case OpIsInf: *hasResult = true; *hasResultType = true; break; + case OpIsFinite: *hasResult = true; *hasResultType = true; break; + case OpIsNormal: *hasResult = true; *hasResultType = true; break; + case OpSignBitSet: *hasResult = true; *hasResultType = true; break; + case OpLessOrGreater: *hasResult = true; *hasResultType = true; break; + case OpOrdered: *hasResult = true; *hasResultType = true; break; + case OpUnordered: *hasResult = true; *hasResultType = true; break; + case OpLogicalEqual: *hasResult = true; *hasResultType = true; break; + case OpLogicalNotEqual: *hasResult = true; *hasResultType = true; break; + case OpLogicalOr: *hasResult = true; *hasResultType = true; break; + case OpLogicalAnd: *hasResult = true; *hasResultType = true; break; + case OpLogicalNot: *hasResult = true; *hasResultType = true; break; + case OpSelect: *hasResult = true; *hasResultType = true; break; + case OpIEqual: *hasResult = true; *hasResultType = true; break; + case OpINotEqual: *hasResult = true; *hasResultType = true; break; + case OpUGreaterThan: *hasResult = true; *hasResultType = true; break; + case OpSGreaterThan: *hasResult = true; *hasResultType = true; break; + case OpUGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case OpSGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case OpULessThan: *hasResult = true; *hasResultType = true; break; + case OpSLessThan: *hasResult = true; *hasResultType = true; break; + case OpULessThanEqual: *hasResult = true; *hasResultType = true; break; + case OpSLessThanEqual: *hasResult = true; *hasResultType = true; break; + case OpFOrdEqual: *hasResult = true; *hasResultType = true; break; + case OpFUnordEqual: *hasResult = true; *hasResultType = true; break; + case OpFOrdNotEqual: *hasResult = true; *hasResultType = true; break; + case OpFUnordNotEqual: *hasResult = true; *hasResultType = true; break; + case OpFOrdLessThan: *hasResult = true; *hasResultType = true; break; + case OpFUnordLessThan: *hasResult = true; *hasResultType = true; break; + case OpFOrdGreaterThan: *hasResult = true; *hasResultType = true; break; + case OpFUnordGreaterThan: *hasResult = true; *hasResultType = true; break; + case OpFOrdLessThanEqual: *hasResult = true; *hasResultType = true; break; + case OpFUnordLessThanEqual: *hasResult = true; *hasResultType = true; break; + case OpFOrdGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case OpFUnordGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case OpShiftRightLogical: *hasResult = true; *hasResultType = true; break; + case OpShiftRightArithmetic: *hasResult = true; *hasResultType = true; break; + case OpShiftLeftLogical: *hasResult = true; *hasResultType = true; break; + case OpBitwiseOr: *hasResult = true; *hasResultType = true; break; + case OpBitwiseXor: *hasResult = true; *hasResultType = true; break; + case OpBitwiseAnd: *hasResult = true; *hasResultType = true; break; + case OpNot: *hasResult = true; *hasResultType = true; break; + case OpBitFieldInsert: *hasResult = true; *hasResultType = true; break; + case OpBitFieldSExtract: *hasResult = true; *hasResultType = true; break; + case OpBitFieldUExtract: *hasResult = true; *hasResultType = true; break; + case OpBitReverse: *hasResult = true; *hasResultType = true; break; + case OpBitCount: *hasResult = true; *hasResultType = true; break; + case OpDPdx: *hasResult = true; *hasResultType = true; break; + case OpDPdy: *hasResult = true; *hasResultType = true; break; + case OpFwidth: *hasResult = true; *hasResultType = true; break; + case OpDPdxFine: *hasResult = true; *hasResultType = true; break; + case OpDPdyFine: *hasResult = true; *hasResultType = true; break; + case OpFwidthFine: *hasResult = true; *hasResultType = true; break; + case OpDPdxCoarse: *hasResult = true; *hasResultType = true; break; + case OpDPdyCoarse: *hasResult = true; *hasResultType = true; break; + case OpFwidthCoarse: *hasResult = true; *hasResultType = true; break; + case OpEmitVertex: *hasResult = false; *hasResultType = false; break; + case OpEndPrimitive: *hasResult = false; *hasResultType = false; break; + case OpEmitStreamVertex: *hasResult = false; *hasResultType = false; break; + case OpEndStreamPrimitive: *hasResult = false; *hasResultType = false; break; + case OpControlBarrier: *hasResult = false; *hasResultType = false; break; + case OpMemoryBarrier: *hasResult = false; *hasResultType = false; break; + case OpAtomicLoad: *hasResult = true; *hasResultType = true; break; + case OpAtomicStore: *hasResult = false; *hasResultType = false; break; + case OpAtomicExchange: *hasResult = true; *hasResultType = true; break; + case OpAtomicCompareExchange: *hasResult = true; *hasResultType = true; break; + case OpAtomicCompareExchangeWeak: *hasResult = true; *hasResultType = true; break; + case OpAtomicIIncrement: *hasResult = true; *hasResultType = true; break; + case OpAtomicIDecrement: *hasResult = true; *hasResultType = true; break; + case OpAtomicIAdd: *hasResult = true; *hasResultType = true; break; + case OpAtomicISub: *hasResult = true; *hasResultType = true; break; + case OpAtomicSMin: *hasResult = true; *hasResultType = true; break; + case OpAtomicUMin: *hasResult = true; *hasResultType = true; break; + case OpAtomicSMax: *hasResult = true; *hasResultType = true; break; + case OpAtomicUMax: *hasResult = true; *hasResultType = true; break; + case OpAtomicAnd: *hasResult = true; *hasResultType = true; break; + case OpAtomicOr: *hasResult = true; *hasResultType = true; break; + case OpAtomicXor: *hasResult = true; *hasResultType = true; break; + case OpPhi: *hasResult = true; *hasResultType = true; break; + case OpLoopMerge: *hasResult = false; *hasResultType = false; break; + case OpSelectionMerge: *hasResult = false; *hasResultType = false; break; + case OpLabel: *hasResult = true; *hasResultType = false; break; + case OpBranch: *hasResult = false; *hasResultType = false; break; + case OpBranchConditional: *hasResult = false; *hasResultType = false; break; + case OpSwitch: *hasResult = false; *hasResultType = false; break; + case OpKill: *hasResult = false; *hasResultType = false; break; + case OpReturn: *hasResult = false; *hasResultType = false; break; + case OpReturnValue: *hasResult = false; *hasResultType = false; break; + case OpUnreachable: *hasResult = false; *hasResultType = false; break; + case OpLifetimeStart: *hasResult = false; *hasResultType = false; break; + case OpLifetimeStop: *hasResult = false; *hasResultType = false; break; + case OpGroupAsyncCopy: *hasResult = true; *hasResultType = true; break; + case OpGroupWaitEvents: *hasResult = false; *hasResultType = false; break; + case OpGroupAll: *hasResult = true; *hasResultType = true; break; + case OpGroupAny: *hasResult = true; *hasResultType = true; break; + case OpGroupBroadcast: *hasResult = true; *hasResultType = true; break; + case OpGroupIAdd: *hasResult = true; *hasResultType = true; break; + case OpGroupFAdd: *hasResult = true; *hasResultType = true; break; + case OpGroupFMin: *hasResult = true; *hasResultType = true; break; + case OpGroupUMin: *hasResult = true; *hasResultType = true; break; + case OpGroupSMin: *hasResult = true; *hasResultType = true; break; + case OpGroupFMax: *hasResult = true; *hasResultType = true; break; + case OpGroupUMax: *hasResult = true; *hasResultType = true; break; + case OpGroupSMax: *hasResult = true; *hasResultType = true; break; + case OpReadPipe: *hasResult = true; *hasResultType = true; break; + case OpWritePipe: *hasResult = true; *hasResultType = true; break; + case OpReservedReadPipe: *hasResult = true; *hasResultType = true; break; + case OpReservedWritePipe: *hasResult = true; *hasResultType = true; break; + case OpReserveReadPipePackets: *hasResult = true; *hasResultType = true; break; + case OpReserveWritePipePackets: *hasResult = true; *hasResultType = true; break; + case OpCommitReadPipe: *hasResult = false; *hasResultType = false; break; + case OpCommitWritePipe: *hasResult = false; *hasResultType = false; break; + case OpIsValidReserveId: *hasResult = true; *hasResultType = true; break; + case OpGetNumPipePackets: *hasResult = true; *hasResultType = true; break; + case OpGetMaxPipePackets: *hasResult = true; *hasResultType = true; break; + case OpGroupReserveReadPipePackets: *hasResult = true; *hasResultType = true; break; + case OpGroupReserveWritePipePackets: *hasResult = true; *hasResultType = true; break; + case OpGroupCommitReadPipe: *hasResult = false; *hasResultType = false; break; + case OpGroupCommitWritePipe: *hasResult = false; *hasResultType = false; break; + case OpEnqueueMarker: *hasResult = true; *hasResultType = true; break; + case OpEnqueueKernel: *hasResult = true; *hasResultType = true; break; + case OpGetKernelNDrangeSubGroupCount: *hasResult = true; *hasResultType = true; break; + case OpGetKernelNDrangeMaxSubGroupSize: *hasResult = true; *hasResultType = true; break; + case OpGetKernelWorkGroupSize: *hasResult = true; *hasResultType = true; break; + case OpGetKernelPreferredWorkGroupSizeMultiple: *hasResult = true; *hasResultType = true; break; + case OpRetainEvent: *hasResult = false; *hasResultType = false; break; + case OpReleaseEvent: *hasResult = false; *hasResultType = false; break; + case OpCreateUserEvent: *hasResult = true; *hasResultType = true; break; + case OpIsValidEvent: *hasResult = true; *hasResultType = true; break; + case OpSetUserEventStatus: *hasResult = false; *hasResultType = false; break; + case OpCaptureEventProfilingInfo: *hasResult = false; *hasResultType = false; break; + case OpGetDefaultQueue: *hasResult = true; *hasResultType = true; break; + case OpBuildNDRange: *hasResult = true; *hasResultType = true; break; + case OpImageSparseSampleImplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSparseSampleExplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSparseSampleDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSparseSampleDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSparseSampleProjImplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSparseSampleProjExplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSparseSampleProjDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSparseSampleProjDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case OpImageSparseFetch: *hasResult = true; *hasResultType = true; break; + case OpImageSparseGather: *hasResult = true; *hasResultType = true; break; + case OpImageSparseDrefGather: *hasResult = true; *hasResultType = true; break; + case OpImageSparseTexelsResident: *hasResult = true; *hasResultType = true; break; + case OpNoLine: *hasResult = false; *hasResultType = false; break; + case OpAtomicFlagTestAndSet: *hasResult = true; *hasResultType = true; break; + case OpAtomicFlagClear: *hasResult = false; *hasResultType = false; break; + case OpImageSparseRead: *hasResult = true; *hasResultType = true; break; + case OpSizeOf: *hasResult = true; *hasResultType = true; break; + case OpTypePipeStorage: *hasResult = true; *hasResultType = false; break; + case OpConstantPipeStorage: *hasResult = true; *hasResultType = true; break; + case OpCreatePipeFromPipeStorage: *hasResult = true; *hasResultType = true; break; + case OpGetKernelLocalSizeForSubgroupCount: *hasResult = true; *hasResultType = true; break; + case OpGetKernelMaxNumSubgroups: *hasResult = true; *hasResultType = true; break; + case OpTypeNamedBarrier: *hasResult = true; *hasResultType = false; break; + case OpNamedBarrierInitialize: *hasResult = true; *hasResultType = true; break; + case OpMemoryNamedBarrier: *hasResult = false; *hasResultType = false; break; + case OpModuleProcessed: *hasResult = false; *hasResultType = false; break; + case OpExecutionModeId: *hasResult = false; *hasResultType = false; break; + case OpDecorateId: *hasResult = false; *hasResultType = false; break; + case OpGroupNonUniformElect: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformAll: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformAny: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformAllEqual: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformBroadcast: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformBroadcastFirst: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformBallot: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformInverseBallot: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformBallotBitExtract: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformBallotBitCount: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformBallotFindLSB: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformBallotFindMSB: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformShuffle: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformShuffleXor: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformShuffleUp: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformShuffleDown: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformIAdd: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformFAdd: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformIMul: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformFMul: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformSMin: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformUMin: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformFMin: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformSMax: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformUMax: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformFMax: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformBitwiseAnd: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformBitwiseOr: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformBitwiseXor: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformLogicalAnd: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformLogicalOr: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformLogicalXor: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformQuadBroadcast: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformQuadSwap: *hasResult = true; *hasResultType = true; break; + case OpCopyLogical: *hasResult = true; *hasResultType = true; break; + case OpPtrEqual: *hasResult = true; *hasResultType = true; break; + case OpPtrNotEqual: *hasResult = true; *hasResultType = true; break; + case OpPtrDiff: *hasResult = true; *hasResultType = true; break; + case OpTerminateInvocation: *hasResult = false; *hasResultType = false; break; + case OpSubgroupBallotKHR: *hasResult = true; *hasResultType = true; break; + case OpSubgroupFirstInvocationKHR: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAllKHR: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAnyKHR: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAllEqualKHR: *hasResult = true; *hasResultType = true; break; + case OpSubgroupReadInvocationKHR: *hasResult = true; *hasResultType = true; break; + case OpTraceRayKHR: *hasResult = false; *hasResultType = false; break; + case OpExecuteCallableKHR: *hasResult = false; *hasResultType = false; break; + case OpConvertUToAccelerationStructureKHR: *hasResult = true; *hasResultType = true; break; + case OpIgnoreIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case OpTerminateRayKHR: *hasResult = false; *hasResultType = false; break; + case OpTypeRayQueryKHR: *hasResult = true; *hasResultType = false; break; + case OpRayQueryInitializeKHR: *hasResult = false; *hasResultType = false; break; + case OpRayQueryTerminateKHR: *hasResult = false; *hasResultType = false; break; + case OpRayQueryGenerateIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case OpRayQueryConfirmIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case OpRayQueryProceedKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionTypeKHR: *hasResult = true; *hasResultType = true; break; + case OpGroupIAddNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case OpGroupFAddNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case OpGroupFMinNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case OpGroupUMinNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case OpGroupSMinNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case OpGroupFMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case OpGroupUMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case OpGroupSMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case OpFragmentMaskFetchAMD: *hasResult = true; *hasResultType = true; break; + case OpFragmentFetchAMD: *hasResult = true; *hasResultType = true; break; + case OpReadClockKHR: *hasResult = true; *hasResultType = true; break; + case OpImageSampleFootprintNV: *hasResult = true; *hasResultType = true; break; + case OpGroupNonUniformPartitionNV: *hasResult = true; *hasResultType = true; break; + case OpWritePackedPrimitiveIndices4x8NV: *hasResult = false; *hasResultType = false; break; + case OpReportIntersectionNV: *hasResult = true; *hasResultType = true; break; + case OpIgnoreIntersectionNV: *hasResult = false; *hasResultType = false; break; + case OpTerminateRayNV: *hasResult = false; *hasResultType = false; break; + case OpTraceNV: *hasResult = false; *hasResultType = false; break; + case OpTypeAccelerationStructureNV: *hasResult = true; *hasResultType = false; break; + case OpExecuteCallableNV: *hasResult = false; *hasResultType = false; break; + case OpTypeCooperativeMatrixNV: *hasResult = true; *hasResultType = false; break; + case OpCooperativeMatrixLoadNV: *hasResult = true; *hasResultType = true; break; + case OpCooperativeMatrixStoreNV: *hasResult = false; *hasResultType = false; break; + case OpCooperativeMatrixMulAddNV: *hasResult = true; *hasResultType = true; break; + case OpCooperativeMatrixLengthNV: *hasResult = true; *hasResultType = true; break; + case OpBeginInvocationInterlockEXT: *hasResult = false; *hasResultType = false; break; + case OpEndInvocationInterlockEXT: *hasResult = false; *hasResultType = false; break; + case OpDemoteToHelperInvocationEXT: *hasResult = false; *hasResultType = false; break; + case OpIsHelperInvocationEXT: *hasResult = true; *hasResultType = true; break; + case OpSubgroupShuffleINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupShuffleDownINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupShuffleUpINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupShuffleXorINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupBlockReadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupBlockWriteINTEL: *hasResult = false; *hasResultType = false; break; + case OpSubgroupImageBlockReadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupImageBlockWriteINTEL: *hasResult = false; *hasResultType = false; break; + case OpSubgroupImageMediaBlockReadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupImageMediaBlockWriteINTEL: *hasResult = false; *hasResultType = false; break; + case OpUCountLeadingZerosINTEL: *hasResult = true; *hasResultType = true; break; + case OpUCountTrailingZerosINTEL: *hasResult = true; *hasResultType = true; break; + case OpAbsISubINTEL: *hasResult = true; *hasResultType = true; break; + case OpAbsUSubINTEL: *hasResult = true; *hasResultType = true; break; + case OpIAddSatINTEL: *hasResult = true; *hasResultType = true; break; + case OpUAddSatINTEL: *hasResult = true; *hasResultType = true; break; + case OpIAverageINTEL: *hasResult = true; *hasResultType = true; break; + case OpUAverageINTEL: *hasResult = true; *hasResultType = true; break; + case OpIAverageRoundedINTEL: *hasResult = true; *hasResultType = true; break; + case OpUAverageRoundedINTEL: *hasResult = true; *hasResultType = true; break; + case OpISubSatINTEL: *hasResult = true; *hasResultType = true; break; + case OpUSubSatINTEL: *hasResult = true; *hasResultType = true; break; + case OpIMul32x16INTEL: *hasResult = true; *hasResultType = true; break; + case OpUMul32x16INTEL: *hasResult = true; *hasResultType = true; break; + case OpConstFunctionPointerINTEL: *hasResult = true; *hasResultType = true; break; + case OpFunctionPointerCallINTEL: *hasResult = true; *hasResultType = true; break; + case OpAsmTargetINTEL: *hasResult = true; *hasResultType = true; break; + case OpAsmINTEL: *hasResult = true; *hasResultType = true; break; + case OpAsmCallINTEL: *hasResult = true; *hasResultType = true; break; + case OpAtomicFMinEXT: *hasResult = true; *hasResultType = true; break; + case OpAtomicFMaxEXT: *hasResult = true; *hasResultType = true; break; + case OpDecorateString: *hasResult = false; *hasResultType = false; break; + case OpMemberDecorateString: *hasResult = false; *hasResultType = false; break; + case OpVmeImageINTEL: *hasResult = true; *hasResultType = true; break; + case OpTypeVmeImageINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcImePayloadINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcRefPayloadINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcSicPayloadINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcMcePayloadINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcMceResultINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcImeResultINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcImeResultSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcImeResultDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcImeSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcImeDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcRefResultINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeAvcSicResultINTEL: *hasResult = true; *hasResultType = false; break; + case OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceSetInterShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceSetInterDirectionPenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceSetAcOnlyHaarINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceConvertToImePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceConvertToImeResultINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceConvertToRefPayloadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceConvertToRefResultINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceConvertToSicPayloadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceConvertToSicResultINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetInterDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetBestInterDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetInterMajorShapeINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetInterMinorShapeINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetInterDirectionsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetInterMotionVectorCountINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetInterReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeSetSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeSetDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeRefWindowSizeINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeAdjustRefOffsetINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeSetMaxMotionVectorCountINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeSetWeightedSadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeStripDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetBorderReachedINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcFmeInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcBmeInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcRefConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcRefSetBidirectionalMixDisableINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcRefSetBilinearFilterEnableINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcRefEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcRefConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicConfigureSkcINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicConfigureIpeLumaINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicConfigureIpeLumaChromaINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicGetMotionVectorMaskINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicSetBilinearFilterEnableINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicEvaluateIpeINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicGetIpeLumaShapeINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicGetPackedIpeLumaModesINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicGetIpeChromaModeINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL: *hasResult = true; *hasResultType = true; break; + case OpSubgroupAvcSicGetInterRawSadsINTEL: *hasResult = true; *hasResultType = true; break; + case OpVariableLengthArrayINTEL: *hasResult = true; *hasResultType = true; break; + case OpSaveMemoryINTEL: *hasResult = true; *hasResultType = true; break; + case OpRestoreMemoryINTEL: *hasResult = false; *hasResultType = false; break; + case OpLoopControlINTEL: *hasResult = false; *hasResultType = false; break; + case OpPtrCastToCrossWorkgroupINTEL: *hasResult = true; *hasResultType = true; break; + case OpCrossWorkgroupCastToPtrINTEL: *hasResult = true; *hasResultType = true; break; + case OpReadPipeBlockingINTEL: *hasResult = true; *hasResultType = true; break; + case OpWritePipeBlockingINTEL: *hasResult = true; *hasResultType = true; break; + case OpFPGARegINTEL: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetRayTMinKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetRayFlagsKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionTKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionInstanceCustomIndexKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionInstanceIdKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionGeometryIndexKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionPrimitiveIndexKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionBarycentricsKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionFrontFaceKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionCandidateAABBOpaqueKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionObjectRayDirectionKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionObjectRayOriginKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetWorldRayDirectionKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetWorldRayOriginKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionObjectToWorldKHR: *hasResult = true; *hasResultType = true; break; + case OpRayQueryGetIntersectionWorldToObjectKHR: *hasResult = true; *hasResultType = true; break; + case OpAtomicFAddEXT: *hasResult = true; *hasResultType = true; break; + case OpTypeBufferSurfaceINTEL: *hasResult = true; *hasResultType = false; break; + case OpTypeStructContinuedINTEL: *hasResult = false; *hasResultType = false; break; + case OpConstantCompositeContinuedINTEL: *hasResult = false; *hasResultType = false; break; + case OpSpecConstantCompositeContinuedINTEL: *hasResult = false; *hasResultType = false; break; + } +} +#endif /* SPV_ENABLE_UTILITY_CODE */ + // Overload operator| for mask bit combining inline ImageOperandsMask operator|(ImageOperandsMask a, ImageOperandsMask b) { return ImageOperandsMask(unsigned(a) | unsigned(b)); } @@ -1095,6 +2291,8 @@ inline FunctionControlMask operator|(FunctionControlMask a, FunctionControlMask inline MemorySemanticsMask operator|(MemorySemanticsMask a, MemorySemanticsMask b) { return MemorySemanticsMask(unsigned(a) | unsigned(b)); } inline MemoryAccessMask operator|(MemoryAccessMask a, MemoryAccessMask b) { return MemoryAccessMask(unsigned(a) | unsigned(b)); } inline KernelProfilingInfoMask operator|(KernelProfilingInfoMask a, KernelProfilingInfoMask b) { return KernelProfilingInfoMask(unsigned(a) | unsigned(b)); } +inline RayFlagsMask operator|(RayFlagsMask a, RayFlagsMask b) { return RayFlagsMask(unsigned(a) | unsigned(b)); } +inline FragmentShadingRateMask operator|(FragmentShadingRateMask a, FragmentShadingRateMask b) { return FragmentShadingRateMask(unsigned(a) | unsigned(b)); } } // end namespace spv diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.hpp11 b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.hpp11 index 3bd5b8a0d..26ee33add 100755 --- a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.hpp11 +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.hpp11 @@ -1,4 +1,4 @@ -// Copyright (c) 2014-2018 The Khronos Group Inc. +// Copyright (c) 2014-2020 The Khronos Group Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and/or associated documentation files (the "Materials"), @@ -26,13 +26,16 @@ // the Binary Section of the SPIR-V specification. // Enumeration tokens for SPIR-V, in various styles: -// C, C++, C++11, JSON, Lua, Python +// C, C++, C++11, JSON, Lua, Python, C#, D // // - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL // - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL // - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL // - Lua will use tables, e.g.: spv.SourceLanguage.GLSL // - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +// - C# will use enum classes in the Specification class located in the "Spv" namespace, +// e.g.: Spv.Specification.SourceLanguage.GLSL +// - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL // // Some tokens act like mask values, which can be OR'd together, // while others are mutually exclusive. The mask-like ones have @@ -46,12 +49,12 @@ namespace spv { typedef unsigned int Id; -#define SPV_VERSION 0x10300 -#define SPV_REVISION 1 +#define SPV_VERSION 0x10500 +#define SPV_REVISION 4 static const unsigned int MagicNumber = 0x07230203; -static const unsigned int Version = 0x00010300; -static const unsigned int Revision = 1; +static const unsigned int Version = 0x00010500; +static const unsigned int Revision = 4; static const unsigned int OpCodeMask = 0xffff; static const unsigned int WordCountShift = 16; @@ -73,6 +76,20 @@ enum class ExecutionModel : unsigned { Fragment = 4, GLCompute = 5, Kernel = 6, + TaskNV = 5267, + MeshNV = 5268, + RayGenerationKHR = 5313, + RayGenerationNV = 5313, + IntersectionKHR = 5314, + IntersectionNV = 5314, + AnyHitKHR = 5315, + AnyHitNV = 5315, + ClosestHitKHR = 5316, + ClosestHitNV = 5316, + MissKHR = 5317, + MissNV = 5317, + CallableKHR = 5318, + CallableNV = 5318, Max = 0x7fffffff, }; @@ -80,6 +97,8 @@ enum class AddressingModel : unsigned { Logical = 0, Physical32 = 1, Physical64 = 2, + PhysicalStorageBuffer64 = 5348, + PhysicalStorageBuffer64EXT = 5348, Max = 0x7fffffff, }; @@ -87,6 +106,8 @@ enum class MemoryModel : unsigned { Simple = 0, GLSL450 = 1, OpenCL = 2, + Vulkan = 3, + VulkanKHR = 3, Max = 0x7fffffff, }; @@ -130,7 +151,33 @@ enum class ExecutionMode : unsigned { LocalSizeId = 38, LocalSizeHintId = 39, PostDepthCoverage = 4446, + DenormPreserve = 4459, + DenormFlushToZero = 4460, + SignedZeroInfNanPreserve = 4461, + RoundingModeRTE = 4462, + RoundingModeRTZ = 4463, StencilRefReplacingEXT = 5027, + OutputLinesNV = 5269, + OutputPrimitivesNV = 5270, + DerivativeGroupQuadsNV = 5289, + DerivativeGroupLinearNV = 5290, + OutputTrianglesNV = 5298, + PixelInterlockOrderedEXT = 5366, + PixelInterlockUnorderedEXT = 5367, + SampleInterlockOrderedEXT = 5368, + SampleInterlockUnorderedEXT = 5369, + ShadingRateInterlockOrderedEXT = 5370, + ShadingRateInterlockUnorderedEXT = 5371, + SharedLocalMemorySizeINTEL = 5618, + RoundingModeRTPINTEL = 5620, + RoundingModeRTNINTEL = 5621, + FloatingPointModeALTINTEL = 5622, + FloatingPointModeIEEEINTEL = 5623, + MaxWorkgroupSizeINTEL = 5893, + MaxWorkDimINTEL = 5894, + NoGlobalOffsetINTEL = 5895, + NumSIMDWorkitemsINTEL = 5896, + SchedulerTargetFmaxMhzINTEL = 5903, Max = 0x7fffffff, }; @@ -148,6 +195,23 @@ enum class StorageClass : unsigned { AtomicCounter = 10, Image = 11, StorageBuffer = 12, + CallableDataKHR = 5328, + CallableDataNV = 5328, + IncomingCallableDataKHR = 5329, + IncomingCallableDataNV = 5329, + RayPayloadKHR = 5338, + RayPayloadNV = 5338, + HitAttributeKHR = 5339, + HitAttributeNV = 5339, + IncomingRayPayloadKHR = 5342, + IncomingRayPayloadNV = 5342, + ShaderRecordBufferKHR = 5343, + ShaderRecordBufferNV = 5343, + PhysicalStorageBuffer = 5349, + PhysicalStorageBufferEXT = 5349, + CodeSectionINTEL = 5605, + DeviceOnlyINTEL = 5936, + HostOnlyINTEL = 5937, Max = 0x7fffffff, }; @@ -218,6 +282,8 @@ enum class ImageFormat : unsigned { Rg8ui = 37, R16ui = 38, R8ui = 39, + R64ui = 40, + R64i = 41, Max = 0x7fffffff, }; @@ -275,6 +341,16 @@ enum class ImageOperandsShift : unsigned { ConstOffsets = 5, Sample = 6, MinLod = 7, + MakeTexelAvailable = 8, + MakeTexelAvailableKHR = 8, + MakeTexelVisible = 9, + MakeTexelVisibleKHR = 9, + NonPrivateTexel = 10, + NonPrivateTexelKHR = 10, + VolatileTexel = 11, + VolatileTexelKHR = 11, + SignExtend = 12, + ZeroExtend = 13, Max = 0x7fffffff, }; @@ -288,6 +364,16 @@ enum class ImageOperandsMask : unsigned { ConstOffsets = 0x00000020, Sample = 0x00000040, MinLod = 0x00000080, + MakeTexelAvailable = 0x00000100, + MakeTexelAvailableKHR = 0x00000100, + MakeTexelVisible = 0x00000200, + MakeTexelVisibleKHR = 0x00000200, + NonPrivateTexel = 0x00000400, + NonPrivateTexelKHR = 0x00000400, + VolatileTexel = 0x00000800, + VolatileTexelKHR = 0x00000800, + SignExtend = 0x00001000, + ZeroExtend = 0x00002000, }; enum class FPFastMathModeShift : unsigned { @@ -296,6 +382,8 @@ enum class FPFastMathModeShift : unsigned { NSZ = 2, AllowRecip = 3, Fast = 4, + AllowContractFastINTEL = 16, + AllowReassocINTEL = 17, Max = 0x7fffffff, }; @@ -306,6 +394,8 @@ enum class FPFastMathModeMask : unsigned { NSZ = 0x00000004, AllowRecip = 0x00000008, Fast = 0x00000010, + AllowContractFastINTEL = 0x00010000, + AllowReassocINTEL = 0x00020000, }; enum class FPRoundingMode : unsigned { @@ -368,6 +458,7 @@ enum class Decoration : unsigned { NonWritable = 24, NonReadable = 25, Uniform = 26, + UniformId = 27, SaturatedConversion = 28, Stream = 29, Location = 30, @@ -388,14 +479,62 @@ enum class Decoration : unsigned { MaxByteOffset = 45, AlignmentId = 46, MaxByteOffsetId = 47, + NoSignedWrap = 4469, + NoUnsignedWrap = 4470, ExplicitInterpAMD = 4999, OverrideCoverageNV = 5248, PassthroughNV = 5250, ViewportRelativeNV = 5252, SecondaryViewportRelativeNV = 5256, + PerPrimitiveNV = 5271, + PerViewNV = 5272, + PerTaskNV = 5273, + PerVertexNV = 5285, + NonUniform = 5300, NonUniformEXT = 5300, + RestrictPointer = 5355, + RestrictPointerEXT = 5355, + AliasedPointer = 5356, + AliasedPointerEXT = 5356, + SIMTCallINTEL = 5599, + ReferencedIndirectlyINTEL = 5602, + ClobberINTEL = 5607, + SideEffectsINTEL = 5608, + VectorComputeVariableINTEL = 5624, + FuncParamIOKindINTEL = 5625, + VectorComputeFunctionINTEL = 5626, + StackCallINTEL = 5627, + GlobalVariableOffsetINTEL = 5628, + CounterBuffer = 5634, HlslCounterBufferGOOGLE = 5634, HlslSemanticGOOGLE = 5635, + UserSemantic = 5635, + UserTypeGOOGLE = 5636, + FunctionRoundingModeINTEL = 5822, + FunctionDenormModeINTEL = 5823, + RegisterINTEL = 5825, + MemoryINTEL = 5826, + NumbanksINTEL = 5827, + BankwidthINTEL = 5828, + MaxPrivateCopiesINTEL = 5829, + SinglepumpINTEL = 5830, + DoublepumpINTEL = 5831, + MaxReplicatesINTEL = 5832, + SimpleDualPortINTEL = 5833, + MergeINTEL = 5834, + BankBitsINTEL = 5835, + ForcePow2DepthINTEL = 5836, + BurstCoalesceINTEL = 5899, + CacheSizeINTEL = 5900, + DontStaticallyCoalesceINTEL = 5901, + PrefetchINTEL = 5902, + StallEnableINTEL = 5905, + FuseLoopsInFunctionINTEL = 5907, + BufferLocationINTEL = 5921, + IOPipeStorageINTEL = 5944, + FunctionFloatingPointModeINTEL = 6080, + SingleElementVectorINTEL = 6085, + VectorComputeCallableFunctionINTEL = 6087, Max = 0x7fffffff, }; @@ -454,8 +593,10 @@ enum class BuiltIn : unsigned { BaseVertex = 4424, BaseInstance = 4425, DrawIndex = 4426, + PrimitiveShadingRateKHR = 4432, DeviceIndex = 4438, ViewIndex = 4440, + ShadingRateKHR = 4444, BaryCoordNoPerspAMD = 4992, BaryCoordNoPerspCentroidAMD = 4993, BaryCoordNoPerspSampleAMD = 4994, @@ -470,6 +611,52 @@ enum class BuiltIn : unsigned { PositionPerViewNV = 5261, ViewportMaskPerViewNV = 5262, FullyCoveredEXT = 5264, + TaskCountNV = 5274, + PrimitiveCountNV = 5275, + PrimitiveIndicesNV = 5276, + ClipDistancePerViewNV = 5277, + CullDistancePerViewNV = 5278, + LayerPerViewNV = 5279, + MeshViewCountNV = 5280, + MeshViewIndicesNV = 5281, + BaryCoordNV = 5286, + BaryCoordNoPerspNV = 5287, + FragSizeEXT = 5292, + FragmentSizeNV = 5292, + FragInvocationCountEXT = 5293, + InvocationsPerPixelNV = 5293, + LaunchIdKHR = 5319, + LaunchIdNV = 5319, + LaunchSizeKHR = 5320, + LaunchSizeNV = 5320, + WorldRayOriginKHR = 5321, + WorldRayOriginNV = 5321, + WorldRayDirectionKHR = 5322, + WorldRayDirectionNV = 5322, + ObjectRayOriginKHR = 5323, + ObjectRayOriginNV = 5323, + ObjectRayDirectionKHR = 5324, + ObjectRayDirectionNV = 5324, + RayTminKHR = 5325, + RayTminNV = 5325, + RayTmaxKHR = 5326, + RayTmaxNV = 5326, + InstanceCustomIndexKHR = 5327, + InstanceCustomIndexNV = 5327, + ObjectToWorldKHR = 5330, + ObjectToWorldNV = 5330, + WorldToObjectKHR = 5331, + WorldToObjectNV = 5331, + HitTNV = 5332, + HitKindKHR = 5333, + HitKindNV = 5333, + IncomingRayFlagsKHR = 5351, + IncomingRayFlagsNV = 5351, + RayGeometryIndexKHR = 5352, + WarpsPerSMNV = 5374, + SMCountNV = 5375, + WarpIDNV = 5376, + SMIDNV = 5377, Max = 0x7fffffff, }; @@ -490,6 +677,19 @@ enum class LoopControlShift : unsigned { DontUnroll = 1, DependencyInfinite = 2, DependencyLength = 3, + MinIterations = 4, + MaxIterations = 5, + IterationMultiple = 6, + PeelCount = 7, + PartialCount = 8, + InitiationIntervalINTEL = 16, + MaxConcurrencyINTEL = 17, + DependencyArrayINTEL = 18, + PipelineEnableINTEL = 19, + LoopCoalesceINTEL = 20, + MaxInterleavingINTEL = 21, + SpeculatedIterationsINTEL = 22, + NoFusionINTEL = 23, Max = 0x7fffffff, }; @@ -499,6 +699,19 @@ enum class LoopControlMask : unsigned { DontUnroll = 0x00000002, DependencyInfinite = 0x00000004, DependencyLength = 0x00000008, + MinIterations = 0x00000010, + MaxIterations = 0x00000020, + IterationMultiple = 0x00000040, + PeelCount = 0x00000080, + PartialCount = 0x00000100, + InitiationIntervalINTEL = 0x00010000, + MaxConcurrencyINTEL = 0x00020000, + DependencyArrayINTEL = 0x00040000, + PipelineEnableINTEL = 0x00080000, + LoopCoalesceINTEL = 0x00100000, + MaxInterleavingINTEL = 0x00200000, + SpeculatedIterationsINTEL = 0x00400000, + NoFusionINTEL = 0x00800000, }; enum class FunctionControlShift : unsigned { @@ -528,6 +741,13 @@ enum class MemorySemanticsShift : unsigned { CrossWorkgroupMemory = 9, AtomicCounterMemory = 10, ImageMemory = 11, + OutputMemory = 12, + OutputMemoryKHR = 12, + MakeAvailable = 13, + MakeAvailableKHR = 13, + MakeVisible = 14, + MakeVisibleKHR = 14, + Volatile = 15, Max = 0x7fffffff, }; @@ -543,12 +763,25 @@ enum class MemorySemanticsMask : unsigned { CrossWorkgroupMemory = 0x00000200, AtomicCounterMemory = 0x00000400, ImageMemory = 0x00000800, + OutputMemory = 0x00001000, + OutputMemoryKHR = 0x00001000, + MakeAvailable = 0x00002000, + MakeAvailableKHR = 0x00002000, + MakeVisible = 0x00004000, + MakeVisibleKHR = 0x00004000, + Volatile = 0x00008000, }; enum class MemoryAccessShift : unsigned { Volatile = 0, Aligned = 1, Nontemporal = 2, + MakePointerAvailable = 3, + MakePointerAvailableKHR = 3, + MakePointerVisible = 4, + MakePointerVisibleKHR = 4, + NonPrivatePointer = 5, + NonPrivatePointerKHR = 5, Max = 0x7fffffff, }; @@ -557,6 +790,12 @@ enum class MemoryAccessMask : unsigned { Volatile = 0x00000001, Aligned = 0x00000002, Nontemporal = 0x00000004, + MakePointerAvailable = 0x00000008, + MakePointerAvailableKHR = 0x00000008, + MakePointerVisible = 0x00000010, + MakePointerVisibleKHR = 0x00000010, + NonPrivatePointer = 0x00000020, + NonPrivatePointerKHR = 0x00000020, }; enum class Scope : unsigned { @@ -565,6 +804,9 @@ enum class Scope : unsigned { Workgroup = 2, Subgroup = 3, Invocation = 4, + QueueFamily = 5, + QueueFamilyKHR = 5, + ShaderCallKHR = 6, Max = 0x7fffffff, }; @@ -664,8 +906,14 @@ enum class Capability : unsigned { GroupNonUniformShuffleRelative = 66, GroupNonUniformClustered = 67, GroupNonUniformQuad = 68, + ShaderLayer = 69, + ShaderViewportIndex = 70, + FragmentShadingRateKHR = 4422, SubgroupBallotKHR = 4423, DrawParameters = 4427, + WorkgroupMemoryExplicitLayoutKHR = 4428, + WorkgroupMemoryExplicitLayout8BitAccessKHR = 4429, + WorkgroupMemoryExplicitLayout16BitAccessKHR = 4430, SubgroupVoteKHR = 4431, StorageBuffer16BitAccess = 4433, StorageUniformBufferBlock16 = 4433, @@ -682,11 +930,22 @@ enum class Capability : unsigned { StorageBuffer8BitAccess = 4448, UniformAndStorageBuffer8BitAccess = 4449, StoragePushConstant8 = 4450, + DenormPreserve = 4464, + DenormFlushToZero = 4465, + SignedZeroInfNanPreserve = 4466, + RoundingModeRTE = 4467, + RoundingModeRTZ = 4468, + RayQueryProvisionalKHR = 4471, + RayQueryKHR = 4472, + RayTraversalPrimitiveCullingKHR = 4478, + RayTracingKHR = 4479, Float16ImageAMD = 5008, ImageGatherBiasLodAMD = 5009, FragmentMaskAMD = 5010, StencilExportEXT = 5013, ImageReadWriteLodAMD = 5015, + Int64ImageEXT = 5016, + ShaderClockKHR = 5055, SampleMaskOverrideCoverageNV = 5249, GeometryShaderPassthroughNV = 5251, ShaderViewportIndexLayerEXT = 5254, @@ -695,22 +954,165 @@ enum class Capability : unsigned { ShaderStereoViewNV = 5259, PerViewAttributesNV = 5260, FragmentFullyCoveredEXT = 5265, + MeshShadingNV = 5266, + ImageFootprintNV = 5282, + FragmentBarycentricNV = 5284, + ComputeDerivativeGroupQuadsNV = 5288, + FragmentDensityEXT = 5291, + ShadingRateNV = 5291, GroupNonUniformPartitionedNV = 5297, + ShaderNonUniform = 5301, ShaderNonUniformEXT = 5301, + RuntimeDescriptorArray = 5302, RuntimeDescriptorArrayEXT = 5302, + InputAttachmentArrayDynamicIndexing = 5303, InputAttachmentArrayDynamicIndexingEXT = 5303, + UniformTexelBufferArrayDynamicIndexing = 5304, UniformTexelBufferArrayDynamicIndexingEXT = 5304, + StorageTexelBufferArrayDynamicIndexing = 5305, StorageTexelBufferArrayDynamicIndexingEXT = 5305, + UniformBufferArrayNonUniformIndexing = 5306, UniformBufferArrayNonUniformIndexingEXT = 5306, + SampledImageArrayNonUniformIndexing = 5307, SampledImageArrayNonUniformIndexingEXT = 5307, + StorageBufferArrayNonUniformIndexing = 5308, StorageBufferArrayNonUniformIndexingEXT = 5308, + StorageImageArrayNonUniformIndexing = 5309, StorageImageArrayNonUniformIndexingEXT = 5309, + InputAttachmentArrayNonUniformIndexing = 5310, InputAttachmentArrayNonUniformIndexingEXT = 5310, + UniformTexelBufferArrayNonUniformIndexing = 5311, UniformTexelBufferArrayNonUniformIndexingEXT = 5311, + StorageTexelBufferArrayNonUniformIndexing = 5312, StorageTexelBufferArrayNonUniformIndexingEXT = 5312, + RayTracingNV = 5340, + VulkanMemoryModel = 5345, + VulkanMemoryModelKHR = 5345, + VulkanMemoryModelDeviceScope = 5346, + VulkanMemoryModelDeviceScopeKHR = 5346, + PhysicalStorageBufferAddresses = 5347, + PhysicalStorageBufferAddressesEXT = 5347, + ComputeDerivativeGroupLinearNV = 5350, + RayTracingProvisionalKHR = 5353, + CooperativeMatrixNV = 5357, + FragmentShaderSampleInterlockEXT = 5363, + FragmentShaderShadingRateInterlockEXT = 5372, + ShaderSMBuiltinsNV = 5373, + FragmentShaderPixelInterlockEXT = 5378, + DemoteToHelperInvocationEXT = 5379, SubgroupShuffleINTEL = 5568, SubgroupBufferBlockIOINTEL = 5569, SubgroupImageBlockIOINTEL = 5570, + SubgroupImageMediaBlockIOINTEL = 5579, + RoundToInfinityINTEL = 5582, + FloatingPointModeINTEL = 5583, + IntegerFunctions2INTEL = 5584, + FunctionPointersINTEL = 5603, + IndirectReferencesINTEL = 5604, + AsmINTEL = 5606, + AtomicFloat32MinMaxEXT = 5612, + AtomicFloat64MinMaxEXT = 5613, + AtomicFloat16MinMaxEXT = 5616, + VectorComputeINTEL = 5617, + VectorAnyINTEL = 5619, + SubgroupAvcMotionEstimationINTEL = 5696, + SubgroupAvcMotionEstimationIntraINTEL = 5697, + SubgroupAvcMotionEstimationChromaINTEL = 5698, + VariableLengthArrayINTEL = 5817, + FunctionFloatControlINTEL = 5821, + FPGAMemoryAttributesINTEL = 5824, + FPFastMathModeINTEL = 5837, + ArbitraryPrecisionIntegersINTEL = 5844, + UnstructuredLoopControlsINTEL = 5886, + FPGALoopControlsINTEL = 5888, + KernelAttributesINTEL = 5892, + FPGAKernelAttributesINTEL = 5897, + FPGAMemoryAccessesINTEL = 5898, + FPGAClusterAttributesINTEL = 5904, + LoopFuseINTEL = 5906, + FPGABufferLocationINTEL = 5920, + USMStorageClassesINTEL = 5935, + IOPipesINTEL = 5943, + BlockingPipesINTEL = 5945, + FPGARegINTEL = 5948, + AtomicFloat32AddEXT = 6033, + AtomicFloat64AddEXT = 6034, + LongConstantCompositeINTEL = 6089, + Max = 0x7fffffff, +}; + +enum class RayFlagsShift : unsigned { + OpaqueKHR = 0, + NoOpaqueKHR = 1, + TerminateOnFirstHitKHR = 2, + SkipClosestHitShaderKHR = 3, + CullBackFacingTrianglesKHR = 4, + CullFrontFacingTrianglesKHR = 5, + CullOpaqueKHR = 6, + CullNoOpaqueKHR = 7, + SkipTrianglesKHR = 8, + SkipAABBsKHR = 9, + Max = 0x7fffffff, +}; + +enum class RayFlagsMask : unsigned { + MaskNone = 0, + OpaqueKHR = 0x00000001, + NoOpaqueKHR = 0x00000002, + TerminateOnFirstHitKHR = 0x00000004, + SkipClosestHitShaderKHR = 0x00000008, + CullBackFacingTrianglesKHR = 0x00000010, + CullFrontFacingTrianglesKHR = 0x00000020, + CullOpaqueKHR = 0x00000040, + CullNoOpaqueKHR = 0x00000080, + SkipTrianglesKHR = 0x00000100, + SkipAABBsKHR = 0x00000200, +}; + +enum class RayQueryIntersection : unsigned { + RayQueryCandidateIntersectionKHR = 0, + RayQueryCommittedIntersectionKHR = 1, + Max = 0x7fffffff, +}; + +enum class RayQueryCommittedIntersectionType : unsigned { + RayQueryCommittedIntersectionNoneKHR = 0, + RayQueryCommittedIntersectionTriangleKHR = 1, + RayQueryCommittedIntersectionGeneratedKHR = 2, + Max = 0x7fffffff, +}; + +enum class RayQueryCandidateIntersectionType : unsigned { + RayQueryCandidateIntersectionTriangleKHR = 0, + RayQueryCandidateIntersectionAABBKHR = 1, + Max = 0x7fffffff, +}; + +enum class FragmentShadingRateShift : unsigned { + Vertical2Pixels = 0, + Vertical4Pixels = 1, + Horizontal2Pixels = 2, + Horizontal4Pixels = 3, + Max = 0x7fffffff, +}; + +enum class FragmentShadingRateMask : unsigned { + MaskNone = 0, + Vertical2Pixels = 0x00000001, + Vertical4Pixels = 0x00000002, + Horizontal2Pixels = 0x00000004, + Horizontal4Pixels = 0x00000008, +}; + +enum class FPDenormMode : unsigned { + Preserve = 0, + FlushToZero = 1, + Max = 0x7fffffff, +}; + +enum class FPOperationMode : unsigned { + IEEE = 0, + ALT = 1, Max = 0x7fffffff, }; @@ -1055,12 +1457,29 @@ enum class Op : unsigned { OpGroupNonUniformLogicalXor = 364, OpGroupNonUniformQuadBroadcast = 365, OpGroupNonUniformQuadSwap = 366, + OpCopyLogical = 400, + OpPtrEqual = 401, + OpPtrNotEqual = 402, + OpPtrDiff = 403, + OpTerminateInvocation = 4416, OpSubgroupBallotKHR = 4421, OpSubgroupFirstInvocationKHR = 4422, OpSubgroupAllKHR = 4428, OpSubgroupAnyKHR = 4429, OpSubgroupAllEqualKHR = 4430, OpSubgroupReadInvocationKHR = 4432, + OpTraceRayKHR = 4445, + OpExecuteCallableKHR = 4446, + OpConvertUToAccelerationStructureKHR = 4447, + OpIgnoreIntersectionKHR = 4448, + OpTerminateRayKHR = 4449, + OpTypeRayQueryKHR = 4472, + OpRayQueryInitializeKHR = 4473, + OpRayQueryTerminateKHR = 4474, + OpRayQueryGenerateIntersectionKHR = 4475, + OpRayQueryConfirmIntersectionKHR = 4476, + OpRayQueryProceedKHR = 4477, + OpRayQueryGetIntersectionTypeKHR = 4479, OpGroupIAddNonUniformAMD = 5000, OpGroupFAddNonUniformAMD = 5001, OpGroupFMinNonUniformAMD = 5002, @@ -1071,7 +1490,27 @@ enum class Op : unsigned { OpGroupSMaxNonUniformAMD = 5007, OpFragmentMaskFetchAMD = 5011, OpFragmentFetchAMD = 5012, + OpReadClockKHR = 5056, + OpImageSampleFootprintNV = 5283, OpGroupNonUniformPartitionNV = 5296, + OpWritePackedPrimitiveIndices4x8NV = 5299, + OpReportIntersectionKHR = 5334, + OpReportIntersectionNV = 5334, + OpIgnoreIntersectionNV = 5335, + OpTerminateRayNV = 5336, + OpTraceNV = 5337, + OpTypeAccelerationStructureKHR = 5341, + OpTypeAccelerationStructureNV = 5341, + OpExecuteCallableNV = 5344, + OpTypeCooperativeMatrixNV = 5358, + OpCooperativeMatrixLoadNV = 5359, + OpCooperativeMatrixStoreNV = 5360, + OpCooperativeMatrixMulAddNV = 5361, + OpCooperativeMatrixLengthNV = 5362, + OpBeginInvocationInterlockEXT = 5364, + OpEndInvocationInterlockEXT = 5365, + OpDemoteToHelperInvocationEXT = 5380, + OpIsHelperInvocationEXT = 5381, OpSubgroupShuffleINTEL = 5571, OpSubgroupShuffleDownINTEL = 5572, OpSubgroupShuffleUpINTEL = 5573, @@ -1080,11 +1519,768 @@ enum class Op : unsigned { OpSubgroupBlockWriteINTEL = 5576, OpSubgroupImageBlockReadINTEL = 5577, OpSubgroupImageBlockWriteINTEL = 5578, + OpSubgroupImageMediaBlockReadINTEL = 5580, + OpSubgroupImageMediaBlockWriteINTEL = 5581, + OpUCountLeadingZerosINTEL = 5585, + OpUCountTrailingZerosINTEL = 5586, + OpAbsISubINTEL = 5587, + OpAbsUSubINTEL = 5588, + OpIAddSatINTEL = 5589, + OpUAddSatINTEL = 5590, + OpIAverageINTEL = 5591, + OpUAverageINTEL = 5592, + OpIAverageRoundedINTEL = 5593, + OpUAverageRoundedINTEL = 5594, + OpISubSatINTEL = 5595, + OpUSubSatINTEL = 5596, + OpIMul32x16INTEL = 5597, + OpUMul32x16INTEL = 5598, + OpConstFunctionPointerINTEL = 5600, + OpFunctionPointerCallINTEL = 5601, + OpAsmTargetINTEL = 5609, + OpAsmINTEL = 5610, + OpAsmCallINTEL = 5611, + OpAtomicFMinEXT = 5614, + OpAtomicFMaxEXT = 5615, + OpDecorateString = 5632, OpDecorateStringGOOGLE = 5632, + OpMemberDecorateString = 5633, OpMemberDecorateStringGOOGLE = 5633, + OpVmeImageINTEL = 5699, + OpTypeVmeImageINTEL = 5700, + OpTypeAvcImePayloadINTEL = 5701, + OpTypeAvcRefPayloadINTEL = 5702, + OpTypeAvcSicPayloadINTEL = 5703, + OpTypeAvcMcePayloadINTEL = 5704, + OpTypeAvcMceResultINTEL = 5705, + OpTypeAvcImeResultINTEL = 5706, + OpTypeAvcImeResultSingleReferenceStreamoutINTEL = 5707, + OpTypeAvcImeResultDualReferenceStreamoutINTEL = 5708, + OpTypeAvcImeSingleReferenceStreaminINTEL = 5709, + OpTypeAvcImeDualReferenceStreaminINTEL = 5710, + OpTypeAvcRefResultINTEL = 5711, + OpTypeAvcSicResultINTEL = 5712, + OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL = 5713, + OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL = 5714, + OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL = 5715, + OpSubgroupAvcMceSetInterShapePenaltyINTEL = 5716, + OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL = 5717, + OpSubgroupAvcMceSetInterDirectionPenaltyINTEL = 5718, + OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL = 5719, + OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL = 5720, + OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL = 5721, + OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL = 5722, + OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL = 5723, + OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL = 5724, + OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL = 5725, + OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL = 5726, + OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL = 5727, + OpSubgroupAvcMceSetAcOnlyHaarINTEL = 5728, + OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL = 5729, + OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL = 5730, + OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL = 5731, + OpSubgroupAvcMceConvertToImePayloadINTEL = 5732, + OpSubgroupAvcMceConvertToImeResultINTEL = 5733, + OpSubgroupAvcMceConvertToRefPayloadINTEL = 5734, + OpSubgroupAvcMceConvertToRefResultINTEL = 5735, + OpSubgroupAvcMceConvertToSicPayloadINTEL = 5736, + OpSubgroupAvcMceConvertToSicResultINTEL = 5737, + OpSubgroupAvcMceGetMotionVectorsINTEL = 5738, + OpSubgroupAvcMceGetInterDistortionsINTEL = 5739, + OpSubgroupAvcMceGetBestInterDistortionsINTEL = 5740, + OpSubgroupAvcMceGetInterMajorShapeINTEL = 5741, + OpSubgroupAvcMceGetInterMinorShapeINTEL = 5742, + OpSubgroupAvcMceGetInterDirectionsINTEL = 5743, + OpSubgroupAvcMceGetInterMotionVectorCountINTEL = 5744, + OpSubgroupAvcMceGetInterReferenceIdsINTEL = 5745, + OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL = 5746, + OpSubgroupAvcImeInitializeINTEL = 5747, + OpSubgroupAvcImeSetSingleReferenceINTEL = 5748, + OpSubgroupAvcImeSetDualReferenceINTEL = 5749, + OpSubgroupAvcImeRefWindowSizeINTEL = 5750, + OpSubgroupAvcImeAdjustRefOffsetINTEL = 5751, + OpSubgroupAvcImeConvertToMcePayloadINTEL = 5752, + OpSubgroupAvcImeSetMaxMotionVectorCountINTEL = 5753, + OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL = 5754, + OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL = 5755, + OpSubgroupAvcImeSetWeightedSadINTEL = 5756, + OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL = 5757, + OpSubgroupAvcImeEvaluateWithDualReferenceINTEL = 5758, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL = 5759, + OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL = 5760, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL = 5761, + OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL = 5762, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL = 5763, + OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL = 5764, + OpSubgroupAvcImeConvertToMceResultINTEL = 5765, + OpSubgroupAvcImeGetSingleReferenceStreaminINTEL = 5766, + OpSubgroupAvcImeGetDualReferenceStreaminINTEL = 5767, + OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL = 5768, + OpSubgroupAvcImeStripDualReferenceStreamoutINTEL = 5769, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL = 5770, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL = 5771, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL = 5772, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL = 5773, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL = 5774, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL = 5775, + OpSubgroupAvcImeGetBorderReachedINTEL = 5776, + OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL = 5777, + OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL = 5778, + OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL = 5779, + OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL = 5780, + OpSubgroupAvcFmeInitializeINTEL = 5781, + OpSubgroupAvcBmeInitializeINTEL = 5782, + OpSubgroupAvcRefConvertToMcePayloadINTEL = 5783, + OpSubgroupAvcRefSetBidirectionalMixDisableINTEL = 5784, + OpSubgroupAvcRefSetBilinearFilterEnableINTEL = 5785, + OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL = 5786, + OpSubgroupAvcRefEvaluateWithDualReferenceINTEL = 5787, + OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL = 5788, + OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL = 5789, + OpSubgroupAvcRefConvertToMceResultINTEL = 5790, + OpSubgroupAvcSicInitializeINTEL = 5791, + OpSubgroupAvcSicConfigureSkcINTEL = 5792, + OpSubgroupAvcSicConfigureIpeLumaINTEL = 5793, + OpSubgroupAvcSicConfigureIpeLumaChromaINTEL = 5794, + OpSubgroupAvcSicGetMotionVectorMaskINTEL = 5795, + OpSubgroupAvcSicConvertToMcePayloadINTEL = 5796, + OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL = 5797, + OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL = 5798, + OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL = 5799, + OpSubgroupAvcSicSetBilinearFilterEnableINTEL = 5800, + OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL = 5801, + OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL = 5802, + OpSubgroupAvcSicEvaluateIpeINTEL = 5803, + OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL = 5804, + OpSubgroupAvcSicEvaluateWithDualReferenceINTEL = 5805, + OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL = 5806, + OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL = 5807, + OpSubgroupAvcSicConvertToMceResultINTEL = 5808, + OpSubgroupAvcSicGetIpeLumaShapeINTEL = 5809, + OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL = 5810, + OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL = 5811, + OpSubgroupAvcSicGetPackedIpeLumaModesINTEL = 5812, + OpSubgroupAvcSicGetIpeChromaModeINTEL = 5813, + OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL = 5814, + OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL = 5815, + OpSubgroupAvcSicGetInterRawSadsINTEL = 5816, + OpVariableLengthArrayINTEL = 5818, + OpSaveMemoryINTEL = 5819, + OpRestoreMemoryINTEL = 5820, + OpLoopControlINTEL = 5887, + OpPtrCastToCrossWorkgroupINTEL = 5934, + OpCrossWorkgroupCastToPtrINTEL = 5938, + OpReadPipeBlockingINTEL = 5946, + OpWritePipeBlockingINTEL = 5947, + OpFPGARegINTEL = 5949, + OpRayQueryGetRayTMinKHR = 6016, + OpRayQueryGetRayFlagsKHR = 6017, + OpRayQueryGetIntersectionTKHR = 6018, + OpRayQueryGetIntersectionInstanceCustomIndexKHR = 6019, + OpRayQueryGetIntersectionInstanceIdKHR = 6020, + OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR = 6021, + OpRayQueryGetIntersectionGeometryIndexKHR = 6022, + OpRayQueryGetIntersectionPrimitiveIndexKHR = 6023, + OpRayQueryGetIntersectionBarycentricsKHR = 6024, + OpRayQueryGetIntersectionFrontFaceKHR = 6025, + OpRayQueryGetIntersectionCandidateAABBOpaqueKHR = 6026, + OpRayQueryGetIntersectionObjectRayDirectionKHR = 6027, + OpRayQueryGetIntersectionObjectRayOriginKHR = 6028, + OpRayQueryGetWorldRayDirectionKHR = 6029, + OpRayQueryGetWorldRayOriginKHR = 6030, + OpRayQueryGetIntersectionObjectToWorldKHR = 6031, + OpRayQueryGetIntersectionWorldToObjectKHR = 6032, + OpAtomicFAddEXT = 6035, + OpTypeBufferSurfaceINTEL = 6086, + OpTypeStructContinuedINTEL = 6090, + OpConstantCompositeContinuedINTEL = 6091, + OpSpecConstantCompositeContinuedINTEL = 6092, Max = 0x7fffffff, }; +#ifdef SPV_ENABLE_UTILITY_CODE +inline void HasResultAndType(Op opcode, bool *hasResult, bool *hasResultType) { + *hasResult = *hasResultType = false; + switch (opcode) { + default: /* unknown opcode */ break; + case Op::OpNop: *hasResult = false; *hasResultType = false; break; + case Op::OpUndef: *hasResult = true; *hasResultType = true; break; + case Op::OpSourceContinued: *hasResult = false; *hasResultType = false; break; + case Op::OpSource: *hasResult = false; *hasResultType = false; break; + case Op::OpSourceExtension: *hasResult = false; *hasResultType = false; break; + case Op::OpName: *hasResult = false; *hasResultType = false; break; + case Op::OpMemberName: *hasResult = false; *hasResultType = false; break; + case Op::OpString: *hasResult = true; *hasResultType = false; break; + case Op::OpLine: *hasResult = false; *hasResultType = false; break; + case Op::OpExtension: *hasResult = false; *hasResultType = false; break; + case Op::OpExtInstImport: *hasResult = true; *hasResultType = false; break; + case Op::OpExtInst: *hasResult = true; *hasResultType = true; break; + case Op::OpMemoryModel: *hasResult = false; *hasResultType = false; break; + case Op::OpEntryPoint: *hasResult = false; *hasResultType = false; break; + case Op::OpExecutionMode: *hasResult = false; *hasResultType = false; break; + case Op::OpCapability: *hasResult = false; *hasResultType = false; break; + case Op::OpTypeVoid: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeBool: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeInt: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeFloat: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeVector: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeMatrix: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeImage: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeSampler: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeSampledImage: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeArray: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeRuntimeArray: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeStruct: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeOpaque: *hasResult = true; *hasResultType = false; break; + case Op::OpTypePointer: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeFunction: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeEvent: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeDeviceEvent: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeReserveId: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeQueue: *hasResult = true; *hasResultType = false; break; + case Op::OpTypePipe: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeForwardPointer: *hasResult = false; *hasResultType = false; break; + case Op::OpConstantTrue: *hasResult = true; *hasResultType = true; break; + case Op::OpConstantFalse: *hasResult = true; *hasResultType = true; break; + case Op::OpConstant: *hasResult = true; *hasResultType = true; break; + case Op::OpConstantComposite: *hasResult = true; *hasResultType = true; break; + case Op::OpConstantSampler: *hasResult = true; *hasResultType = true; break; + case Op::OpConstantNull: *hasResult = true; *hasResultType = true; break; + case Op::OpSpecConstantTrue: *hasResult = true; *hasResultType = true; break; + case Op::OpSpecConstantFalse: *hasResult = true; *hasResultType = true; break; + case Op::OpSpecConstant: *hasResult = true; *hasResultType = true; break; + case Op::OpSpecConstantComposite: *hasResult = true; *hasResultType = true; break; + case Op::OpSpecConstantOp: *hasResult = true; *hasResultType = true; break; + case Op::OpFunction: *hasResult = true; *hasResultType = true; break; + case Op::OpFunctionParameter: *hasResult = true; *hasResultType = true; break; + case Op::OpFunctionEnd: *hasResult = false; *hasResultType = false; break; + case Op::OpFunctionCall: *hasResult = true; *hasResultType = true; break; + case Op::OpVariable: *hasResult = true; *hasResultType = true; break; + case Op::OpImageTexelPointer: *hasResult = true; *hasResultType = true; break; + case Op::OpLoad: *hasResult = true; *hasResultType = true; break; + case Op::OpStore: *hasResult = false; *hasResultType = false; break; + case Op::OpCopyMemory: *hasResult = false; *hasResultType = false; break; + case Op::OpCopyMemorySized: *hasResult = false; *hasResultType = false; break; + case Op::OpAccessChain: *hasResult = true; *hasResultType = true; break; + case Op::OpInBoundsAccessChain: *hasResult = true; *hasResultType = true; break; + case Op::OpPtrAccessChain: *hasResult = true; *hasResultType = true; break; + case Op::OpArrayLength: *hasResult = true; *hasResultType = true; break; + case Op::OpGenericPtrMemSemantics: *hasResult = true; *hasResultType = true; break; + case Op::OpInBoundsPtrAccessChain: *hasResult = true; *hasResultType = true; break; + case Op::OpDecorate: *hasResult = false; *hasResultType = false; break; + case Op::OpMemberDecorate: *hasResult = false; *hasResultType = false; break; + case Op::OpDecorationGroup: *hasResult = true; *hasResultType = false; break; + case Op::OpGroupDecorate: *hasResult = false; *hasResultType = false; break; + case Op::OpGroupMemberDecorate: *hasResult = false; *hasResultType = false; break; + case Op::OpVectorExtractDynamic: *hasResult = true; *hasResultType = true; break; + case Op::OpVectorInsertDynamic: *hasResult = true; *hasResultType = true; break; + case Op::OpVectorShuffle: *hasResult = true; *hasResultType = true; break; + case Op::OpCompositeConstruct: *hasResult = true; *hasResultType = true; break; + case Op::OpCompositeExtract: *hasResult = true; *hasResultType = true; break; + case Op::OpCompositeInsert: *hasResult = true; *hasResultType = true; break; + case Op::OpCopyObject: *hasResult = true; *hasResultType = true; break; + case Op::OpTranspose: *hasResult = true; *hasResultType = true; break; + case Op::OpSampledImage: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSampleImplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSampleExplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSampleDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSampleDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSampleProjImplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSampleProjExplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSampleProjDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSampleProjDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageFetch: *hasResult = true; *hasResultType = true; break; + case Op::OpImageGather: *hasResult = true; *hasResultType = true; break; + case Op::OpImageDrefGather: *hasResult = true; *hasResultType = true; break; + case Op::OpImageRead: *hasResult = true; *hasResultType = true; break; + case Op::OpImageWrite: *hasResult = false; *hasResultType = false; break; + case Op::OpImage: *hasResult = true; *hasResultType = true; break; + case Op::OpImageQueryFormat: *hasResult = true; *hasResultType = true; break; + case Op::OpImageQueryOrder: *hasResult = true; *hasResultType = true; break; + case Op::OpImageQuerySizeLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageQuerySize: *hasResult = true; *hasResultType = true; break; + case Op::OpImageQueryLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageQueryLevels: *hasResult = true; *hasResultType = true; break; + case Op::OpImageQuerySamples: *hasResult = true; *hasResultType = true; break; + case Op::OpConvertFToU: *hasResult = true; *hasResultType = true; break; + case Op::OpConvertFToS: *hasResult = true; *hasResultType = true; break; + case Op::OpConvertSToF: *hasResult = true; *hasResultType = true; break; + case Op::OpConvertUToF: *hasResult = true; *hasResultType = true; break; + case Op::OpUConvert: *hasResult = true; *hasResultType = true; break; + case Op::OpSConvert: *hasResult = true; *hasResultType = true; break; + case Op::OpFConvert: *hasResult = true; *hasResultType = true; break; + case Op::OpQuantizeToF16: *hasResult = true; *hasResultType = true; break; + case Op::OpConvertPtrToU: *hasResult = true; *hasResultType = true; break; + case Op::OpSatConvertSToU: *hasResult = true; *hasResultType = true; break; + case Op::OpSatConvertUToS: *hasResult = true; *hasResultType = true; break; + case Op::OpConvertUToPtr: *hasResult = true; *hasResultType = true; break; + case Op::OpPtrCastToGeneric: *hasResult = true; *hasResultType = true; break; + case Op::OpGenericCastToPtr: *hasResult = true; *hasResultType = true; break; + case Op::OpGenericCastToPtrExplicit: *hasResult = true; *hasResultType = true; break; + case Op::OpBitcast: *hasResult = true; *hasResultType = true; break; + case Op::OpSNegate: *hasResult = true; *hasResultType = true; break; + case Op::OpFNegate: *hasResult = true; *hasResultType = true; break; + case Op::OpIAdd: *hasResult = true; *hasResultType = true; break; + case Op::OpFAdd: *hasResult = true; *hasResultType = true; break; + case Op::OpISub: *hasResult = true; *hasResultType = true; break; + case Op::OpFSub: *hasResult = true; *hasResultType = true; break; + case Op::OpIMul: *hasResult = true; *hasResultType = true; break; + case Op::OpFMul: *hasResult = true; *hasResultType = true; break; + case Op::OpUDiv: *hasResult = true; *hasResultType = true; break; + case Op::OpSDiv: *hasResult = true; *hasResultType = true; break; + case Op::OpFDiv: *hasResult = true; *hasResultType = true; break; + case Op::OpUMod: *hasResult = true; *hasResultType = true; break; + case Op::OpSRem: *hasResult = true; *hasResultType = true; break; + case Op::OpSMod: *hasResult = true; *hasResultType = true; break; + case Op::OpFRem: *hasResult = true; *hasResultType = true; break; + case Op::OpFMod: *hasResult = true; *hasResultType = true; break; + case Op::OpVectorTimesScalar: *hasResult = true; *hasResultType = true; break; + case Op::OpMatrixTimesScalar: *hasResult = true; *hasResultType = true; break; + case Op::OpVectorTimesMatrix: *hasResult = true; *hasResultType = true; break; + case Op::OpMatrixTimesVector: *hasResult = true; *hasResultType = true; break; + case Op::OpMatrixTimesMatrix: *hasResult = true; *hasResultType = true; break; + case Op::OpOuterProduct: *hasResult = true; *hasResultType = true; break; + case Op::OpDot: *hasResult = true; *hasResultType = true; break; + case Op::OpIAddCarry: *hasResult = true; *hasResultType = true; break; + case Op::OpISubBorrow: *hasResult = true; *hasResultType = true; break; + case Op::OpUMulExtended: *hasResult = true; *hasResultType = true; break; + case Op::OpSMulExtended: *hasResult = true; *hasResultType = true; break; + case Op::OpAny: *hasResult = true; *hasResultType = true; break; + case Op::OpAll: *hasResult = true; *hasResultType = true; break; + case Op::OpIsNan: *hasResult = true; *hasResultType = true; break; + case Op::OpIsInf: *hasResult = true; *hasResultType = true; break; + case Op::OpIsFinite: *hasResult = true; *hasResultType = true; break; + case Op::OpIsNormal: *hasResult = true; *hasResultType = true; break; + case Op::OpSignBitSet: *hasResult = true; *hasResultType = true; break; + case Op::OpLessOrGreater: *hasResult = true; *hasResultType = true; break; + case Op::OpOrdered: *hasResult = true; *hasResultType = true; break; + case Op::OpUnordered: *hasResult = true; *hasResultType = true; break; + case Op::OpLogicalEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpLogicalNotEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpLogicalOr: *hasResult = true; *hasResultType = true; break; + case Op::OpLogicalAnd: *hasResult = true; *hasResultType = true; break; + case Op::OpLogicalNot: *hasResult = true; *hasResultType = true; break; + case Op::OpSelect: *hasResult = true; *hasResultType = true; break; + case Op::OpIEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpINotEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpUGreaterThan: *hasResult = true; *hasResultType = true; break; + case Op::OpSGreaterThan: *hasResult = true; *hasResultType = true; break; + case Op::OpUGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpSGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpULessThan: *hasResult = true; *hasResultType = true; break; + case Op::OpSLessThan: *hasResult = true; *hasResultType = true; break; + case Op::OpULessThanEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpSLessThanEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpFOrdEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpFUnordEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpFOrdNotEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpFUnordNotEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpFOrdLessThan: *hasResult = true; *hasResultType = true; break; + case Op::OpFUnordLessThan: *hasResult = true; *hasResultType = true; break; + case Op::OpFOrdGreaterThan: *hasResult = true; *hasResultType = true; break; + case Op::OpFUnordGreaterThan: *hasResult = true; *hasResultType = true; break; + case Op::OpFOrdLessThanEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpFUnordLessThanEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpFOrdGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpFUnordGreaterThanEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpShiftRightLogical: *hasResult = true; *hasResultType = true; break; + case Op::OpShiftRightArithmetic: *hasResult = true; *hasResultType = true; break; + case Op::OpShiftLeftLogical: *hasResult = true; *hasResultType = true; break; + case Op::OpBitwiseOr: *hasResult = true; *hasResultType = true; break; + case Op::OpBitwiseXor: *hasResult = true; *hasResultType = true; break; + case Op::OpBitwiseAnd: *hasResult = true; *hasResultType = true; break; + case Op::OpNot: *hasResult = true; *hasResultType = true; break; + case Op::OpBitFieldInsert: *hasResult = true; *hasResultType = true; break; + case Op::OpBitFieldSExtract: *hasResult = true; *hasResultType = true; break; + case Op::OpBitFieldUExtract: *hasResult = true; *hasResultType = true; break; + case Op::OpBitReverse: *hasResult = true; *hasResultType = true; break; + case Op::OpBitCount: *hasResult = true; *hasResultType = true; break; + case Op::OpDPdx: *hasResult = true; *hasResultType = true; break; + case Op::OpDPdy: *hasResult = true; *hasResultType = true; break; + case Op::OpFwidth: *hasResult = true; *hasResultType = true; break; + case Op::OpDPdxFine: *hasResult = true; *hasResultType = true; break; + case Op::OpDPdyFine: *hasResult = true; *hasResultType = true; break; + case Op::OpFwidthFine: *hasResult = true; *hasResultType = true; break; + case Op::OpDPdxCoarse: *hasResult = true; *hasResultType = true; break; + case Op::OpDPdyCoarse: *hasResult = true; *hasResultType = true; break; + case Op::OpFwidthCoarse: *hasResult = true; *hasResultType = true; break; + case Op::OpEmitVertex: *hasResult = false; *hasResultType = false; break; + case Op::OpEndPrimitive: *hasResult = false; *hasResultType = false; break; + case Op::OpEmitStreamVertex: *hasResult = false; *hasResultType = false; break; + case Op::OpEndStreamPrimitive: *hasResult = false; *hasResultType = false; break; + case Op::OpControlBarrier: *hasResult = false; *hasResultType = false; break; + case Op::OpMemoryBarrier: *hasResult = false; *hasResultType = false; break; + case Op::OpAtomicLoad: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicStore: *hasResult = false; *hasResultType = false; break; + case Op::OpAtomicExchange: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicCompareExchange: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicCompareExchangeWeak: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicIIncrement: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicIDecrement: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicIAdd: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicISub: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicSMin: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicUMin: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicSMax: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicUMax: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicAnd: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicOr: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicXor: *hasResult = true; *hasResultType = true; break; + case Op::OpPhi: *hasResult = true; *hasResultType = true; break; + case Op::OpLoopMerge: *hasResult = false; *hasResultType = false; break; + case Op::OpSelectionMerge: *hasResult = false; *hasResultType = false; break; + case Op::OpLabel: *hasResult = true; *hasResultType = false; break; + case Op::OpBranch: *hasResult = false; *hasResultType = false; break; + case Op::OpBranchConditional: *hasResult = false; *hasResultType = false; break; + case Op::OpSwitch: *hasResult = false; *hasResultType = false; break; + case Op::OpKill: *hasResult = false; *hasResultType = false; break; + case Op::OpReturn: *hasResult = false; *hasResultType = false; break; + case Op::OpReturnValue: *hasResult = false; *hasResultType = false; break; + case Op::OpUnreachable: *hasResult = false; *hasResultType = false; break; + case Op::OpLifetimeStart: *hasResult = false; *hasResultType = false; break; + case Op::OpLifetimeStop: *hasResult = false; *hasResultType = false; break; + case Op::OpGroupAsyncCopy: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupWaitEvents: *hasResult = false; *hasResultType = false; break; + case Op::OpGroupAll: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupAny: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupBroadcast: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupIAdd: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupFAdd: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupFMin: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupUMin: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupSMin: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupFMax: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupUMax: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupSMax: *hasResult = true; *hasResultType = true; break; + case Op::OpReadPipe: *hasResult = true; *hasResultType = true; break; + case Op::OpWritePipe: *hasResult = true; *hasResultType = true; break; + case Op::OpReservedReadPipe: *hasResult = true; *hasResultType = true; break; + case Op::OpReservedWritePipe: *hasResult = true; *hasResultType = true; break; + case Op::OpReserveReadPipePackets: *hasResult = true; *hasResultType = true; break; + case Op::OpReserveWritePipePackets: *hasResult = true; *hasResultType = true; break; + case Op::OpCommitReadPipe: *hasResult = false; *hasResultType = false; break; + case Op::OpCommitWritePipe: *hasResult = false; *hasResultType = false; break; + case Op::OpIsValidReserveId: *hasResult = true; *hasResultType = true; break; + case Op::OpGetNumPipePackets: *hasResult = true; *hasResultType = true; break; + case Op::OpGetMaxPipePackets: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupReserveReadPipePackets: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupReserveWritePipePackets: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupCommitReadPipe: *hasResult = false; *hasResultType = false; break; + case Op::OpGroupCommitWritePipe: *hasResult = false; *hasResultType = false; break; + case Op::OpEnqueueMarker: *hasResult = true; *hasResultType = true; break; + case Op::OpEnqueueKernel: *hasResult = true; *hasResultType = true; break; + case Op::OpGetKernelNDrangeSubGroupCount: *hasResult = true; *hasResultType = true; break; + case Op::OpGetKernelNDrangeMaxSubGroupSize: *hasResult = true; *hasResultType = true; break; + case Op::OpGetKernelWorkGroupSize: *hasResult = true; *hasResultType = true; break; + case Op::OpGetKernelPreferredWorkGroupSizeMultiple: *hasResult = true; *hasResultType = true; break; + case Op::OpRetainEvent: *hasResult = false; *hasResultType = false; break; + case Op::OpReleaseEvent: *hasResult = false; *hasResultType = false; break; + case Op::OpCreateUserEvent: *hasResult = true; *hasResultType = true; break; + case Op::OpIsValidEvent: *hasResult = true; *hasResultType = true; break; + case Op::OpSetUserEventStatus: *hasResult = false; *hasResultType = false; break; + case Op::OpCaptureEventProfilingInfo: *hasResult = false; *hasResultType = false; break; + case Op::OpGetDefaultQueue: *hasResult = true; *hasResultType = true; break; + case Op::OpBuildNDRange: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseSampleImplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseSampleExplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseSampleDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseSampleDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseSampleProjImplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseSampleProjExplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseSampleProjDrefImplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseSampleProjDrefExplicitLod: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseFetch: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseGather: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseDrefGather: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSparseTexelsResident: *hasResult = true; *hasResultType = true; break; + case Op::OpNoLine: *hasResult = false; *hasResultType = false; break; + case Op::OpAtomicFlagTestAndSet: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicFlagClear: *hasResult = false; *hasResultType = false; break; + case Op::OpImageSparseRead: *hasResult = true; *hasResultType = true; break; + case Op::OpSizeOf: *hasResult = true; *hasResultType = true; break; + case Op::OpTypePipeStorage: *hasResult = true; *hasResultType = false; break; + case Op::OpConstantPipeStorage: *hasResult = true; *hasResultType = true; break; + case Op::OpCreatePipeFromPipeStorage: *hasResult = true; *hasResultType = true; break; + case Op::OpGetKernelLocalSizeForSubgroupCount: *hasResult = true; *hasResultType = true; break; + case Op::OpGetKernelMaxNumSubgroups: *hasResult = true; *hasResultType = true; break; + case Op::OpTypeNamedBarrier: *hasResult = true; *hasResultType = false; break; + case Op::OpNamedBarrierInitialize: *hasResult = true; *hasResultType = true; break; + case Op::OpMemoryNamedBarrier: *hasResult = false; *hasResultType = false; break; + case Op::OpModuleProcessed: *hasResult = false; *hasResultType = false; break; + case Op::OpExecutionModeId: *hasResult = false; *hasResultType = false; break; + case Op::OpDecorateId: *hasResult = false; *hasResultType = false; break; + case Op::OpGroupNonUniformElect: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformAll: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformAny: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformAllEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformBroadcast: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformBroadcastFirst: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformBallot: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformInverseBallot: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformBallotBitExtract: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformBallotBitCount: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformBallotFindLSB: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformBallotFindMSB: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformShuffle: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformShuffleXor: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformShuffleUp: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformShuffleDown: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformIAdd: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformFAdd: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformIMul: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformFMul: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformSMin: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformUMin: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformFMin: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformSMax: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformUMax: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformFMax: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformBitwiseAnd: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformBitwiseOr: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformBitwiseXor: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformLogicalAnd: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformLogicalOr: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformLogicalXor: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformQuadBroadcast: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformQuadSwap: *hasResult = true; *hasResultType = true; break; + case Op::OpCopyLogical: *hasResult = true; *hasResultType = true; break; + case Op::OpPtrEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpPtrNotEqual: *hasResult = true; *hasResultType = true; break; + case Op::OpPtrDiff: *hasResult = true; *hasResultType = true; break; + case Op::OpTerminateInvocation: *hasResult = false; *hasResultType = false; break; + case Op::OpSubgroupBallotKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupFirstInvocationKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAllKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAnyKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAllEqualKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupReadInvocationKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpTraceRayKHR: *hasResult = false; *hasResultType = false; break; + case Op::OpExecuteCallableKHR: *hasResult = false; *hasResultType = false; break; + case Op::OpConvertUToAccelerationStructureKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpIgnoreIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case Op::OpTerminateRayKHR: *hasResult = false; *hasResultType = false; break; + case Op::OpTypeRayQueryKHR: *hasResult = true; *hasResultType = false; break; + case Op::OpRayQueryInitializeKHR: *hasResult = false; *hasResultType = false; break; + case Op::OpRayQueryTerminateKHR: *hasResult = false; *hasResultType = false; break; + case Op::OpRayQueryGenerateIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case Op::OpRayQueryConfirmIntersectionKHR: *hasResult = false; *hasResultType = false; break; + case Op::OpRayQueryProceedKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionTypeKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupIAddNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupFAddNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupFMinNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupUMinNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupSMinNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupFMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupUMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupSMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break; + case Op::OpFragmentMaskFetchAMD: *hasResult = true; *hasResultType = true; break; + case Op::OpFragmentFetchAMD: *hasResult = true; *hasResultType = true; break; + case Op::OpReadClockKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpImageSampleFootprintNV: *hasResult = true; *hasResultType = true; break; + case Op::OpGroupNonUniformPartitionNV: *hasResult = true; *hasResultType = true; break; + case Op::OpWritePackedPrimitiveIndices4x8NV: *hasResult = false; *hasResultType = false; break; + case Op::OpReportIntersectionNV: *hasResult = true; *hasResultType = true; break; + case Op::OpIgnoreIntersectionNV: *hasResult = false; *hasResultType = false; break; + case Op::OpTerminateRayNV: *hasResult = false; *hasResultType = false; break; + case Op::OpTraceNV: *hasResult = false; *hasResultType = false; break; + case Op::OpTypeAccelerationStructureNV: *hasResult = true; *hasResultType = false; break; + case Op::OpExecuteCallableNV: *hasResult = false; *hasResultType = false; break; + case Op::OpTypeCooperativeMatrixNV: *hasResult = true; *hasResultType = false; break; + case Op::OpCooperativeMatrixLoadNV: *hasResult = true; *hasResultType = true; break; + case Op::OpCooperativeMatrixStoreNV: *hasResult = false; *hasResultType = false; break; + case Op::OpCooperativeMatrixMulAddNV: *hasResult = true; *hasResultType = true; break; + case Op::OpCooperativeMatrixLengthNV: *hasResult = true; *hasResultType = true; break; + case Op::OpBeginInvocationInterlockEXT: *hasResult = false; *hasResultType = false; break; + case Op::OpEndInvocationInterlockEXT: *hasResult = false; *hasResultType = false; break; + case Op::OpDemoteToHelperInvocationEXT: *hasResult = false; *hasResultType = false; break; + case Op::OpIsHelperInvocationEXT: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupShuffleINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupShuffleDownINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupShuffleUpINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupShuffleXorINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupBlockReadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupBlockWriteINTEL: *hasResult = false; *hasResultType = false; break; + case Op::OpSubgroupImageBlockReadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupImageBlockWriteINTEL: *hasResult = false; *hasResultType = false; break; + case Op::OpSubgroupImageMediaBlockReadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupImageMediaBlockWriteINTEL: *hasResult = false; *hasResultType = false; break; + case Op::OpUCountLeadingZerosINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpUCountTrailingZerosINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpAbsISubINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpAbsUSubINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpIAddSatINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpUAddSatINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpIAverageINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpUAverageINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpIAverageRoundedINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpUAverageRoundedINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpISubSatINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpUSubSatINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpIMul32x16INTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpUMul32x16INTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpConstFunctionPointerINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpFunctionPointerCallINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpAsmTargetINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpAsmINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpAsmCallINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicFMinEXT: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicFMaxEXT: *hasResult = true; *hasResultType = true; break; + case Op::OpDecorateString: *hasResult = false; *hasResultType = false; break; + case Op::OpMemberDecorateString: *hasResult = false; *hasResultType = false; break; + case Op::OpVmeImageINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpTypeVmeImageINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcImePayloadINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcRefPayloadINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcSicPayloadINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcMcePayloadINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcMceResultINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcImeResultINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcImeResultSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcImeResultDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcImeSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcImeDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcRefResultINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeAvcSicResultINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceSetInterShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceSetInterDirectionPenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceSetAcOnlyHaarINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceConvertToImePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceConvertToImeResultINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceConvertToRefPayloadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceConvertToRefResultINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceConvertToSicPayloadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceConvertToSicResultINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetInterDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetBestInterDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetInterMajorShapeINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetInterMinorShapeINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetInterDirectionsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetInterMotionVectorCountINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetInterReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeSetSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeSetDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeRefWindowSizeINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeAdjustRefOffsetINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeSetMaxMotionVectorCountINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeSetWeightedSadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeStripDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetBorderReachedINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcFmeInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcBmeInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcRefConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcRefSetBidirectionalMixDisableINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcRefSetBilinearFilterEnableINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcRefEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcRefConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicInitializeINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicConfigureSkcINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicConfigureIpeLumaINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicConfigureIpeLumaChromaINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicGetMotionVectorMaskINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicSetBilinearFilterEnableINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicEvaluateIpeINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicGetIpeLumaShapeINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicGetPackedIpeLumaModesINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicGetIpeChromaModeINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSubgroupAvcSicGetInterRawSadsINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpVariableLengthArrayINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpSaveMemoryINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpRestoreMemoryINTEL: *hasResult = false; *hasResultType = false; break; + case Op::OpLoopControlINTEL: *hasResult = false; *hasResultType = false; break; + case Op::OpPtrCastToCrossWorkgroupINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpCrossWorkgroupCastToPtrINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpReadPipeBlockingINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpWritePipeBlockingINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpFPGARegINTEL: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetRayTMinKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetRayFlagsKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionTKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionInstanceCustomIndexKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionInstanceIdKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionGeometryIndexKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionPrimitiveIndexKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionBarycentricsKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionFrontFaceKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionCandidateAABBOpaqueKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionObjectRayDirectionKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionObjectRayOriginKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetWorldRayDirectionKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetWorldRayOriginKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionObjectToWorldKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpRayQueryGetIntersectionWorldToObjectKHR: *hasResult = true; *hasResultType = true; break; + case Op::OpAtomicFAddEXT: *hasResult = true; *hasResultType = true; break; + case Op::OpTypeBufferSurfaceINTEL: *hasResult = true; *hasResultType = false; break; + case Op::OpTypeStructContinuedINTEL: *hasResult = false; *hasResultType = false; break; + case Op::OpConstantCompositeContinuedINTEL: *hasResult = false; *hasResultType = false; break; + case Op::OpSpecConstantCompositeContinuedINTEL: *hasResult = false; *hasResultType = false; break; + } +} +#endif /* SPV_ENABLE_UTILITY_CODE */ + // Overload operator| for mask bit combining inline ImageOperandsMask operator|(ImageOperandsMask a, ImageOperandsMask b) { return ImageOperandsMask(unsigned(a) | unsigned(b)); } @@ -1095,6 +2291,8 @@ inline FunctionControlMask operator|(FunctionControlMask a, FunctionControlMask inline MemorySemanticsMask operator|(MemorySemanticsMask a, MemorySemanticsMask b) { return MemorySemanticsMask(unsigned(a) | unsigned(b)); } inline MemoryAccessMask operator|(MemoryAccessMask a, MemoryAccessMask b) { return MemoryAccessMask(unsigned(a) | unsigned(b)); } inline KernelProfilingInfoMask operator|(KernelProfilingInfoMask a, KernelProfilingInfoMask b) { return KernelProfilingInfoMask(unsigned(a) | unsigned(b)); } +inline RayFlagsMask operator|(RayFlagsMask a, RayFlagsMask b) { return RayFlagsMask(unsigned(a) | unsigned(b)); } +inline FragmentShadingRateMask operator|(FragmentShadingRateMask a, FragmentShadingRateMask b) { return FragmentShadingRateMask(unsigned(a) | unsigned(b)); } } // end namespace spv diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.json b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.json index a592dfa23..d12eb68da 100755 --- a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.json +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.json @@ -6,7 +6,7 @@ "Comment": [ [ - "Copyright (c) 2014-2018 The Khronos Group Inc.", + "Copyright (c) 2014-2020 The Khronos Group Inc.", "", "Permission is hereby granted, free of charge, to any person obtaining a copy", "of this software and/or associated documentation files (the \"Materials\"),", @@ -36,13 +36,16 @@ ], [ "Enumeration tokens for SPIR-V, in various styles:", - " C, C++, C++11, JSON, Lua, Python", + " C, C++, C++11, JSON, Lua, Python, C#, D", "", "- C will have tokens with a \"Spv\" prefix, e.g.: SpvSourceLanguageGLSL", "- C++ will have tokens in the \"spv\" name space, e.g.: spv::SourceLanguageGLSL", "- C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL", "- Lua will use tables, e.g.: spv.SourceLanguage.GLSL", "- Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL']", + "- C# will use enum classes in the Specification class located in the \"Spv\" namespace,", + " e.g.: Spv.Specification.SourceLanguage.GLSL", + "- D will have tokens under the \"spv\" module, e.g: spv.SourceLanguage.GLSL", "", "Some tokens act like mask values, which can be OR'd together,", "while others are mutually exclusive. The mask-like ones have", @@ -51,8 +54,8 @@ ] ], "MagicNumber": 119734787, - "Version": 66304, - "Revision": 1, + "Version": 66816, + "Revision": 4, "OpCodeMask": 65535, "WordCountShift": 16 }, @@ -82,7 +85,21 @@ "Geometry": 3, "Fragment": 4, "GLCompute": 5, - "Kernel": 6 + "Kernel": 6, + "TaskNV": 5267, + "MeshNV": 5268, + "RayGenerationKHR": 5313, + "RayGenerationNV": 5313, + "IntersectionKHR": 5314, + "IntersectionNV": 5314, + "AnyHitKHR": 5315, + "AnyHitNV": 5315, + "ClosestHitKHR": 5316, + "ClosestHitNV": 5316, + "MissKHR": 5317, + "MissNV": 5317, + "CallableKHR": 5318, + "CallableNV": 5318 } }, { @@ -92,7 +109,9 @@ { "Logical": 0, "Physical32": 1, - "Physical64": 2 + "Physical64": 2, + "PhysicalStorageBuffer64": 5348, + "PhysicalStorageBuffer64EXT": 5348 } }, { @@ -102,7 +121,9 @@ { "Simple": 0, "GLSL450": 1, - "OpenCL": 2 + "OpenCL": 2, + "Vulkan": 3, + "VulkanKHR": 3 } }, { @@ -149,7 +170,33 @@ "LocalSizeId": 38, "LocalSizeHintId": 39, "PostDepthCoverage": 4446, - "StencilRefReplacingEXT": 5027 + "DenormPreserve": 4459, + "DenormFlushToZero": 4460, + "SignedZeroInfNanPreserve": 4461, + "RoundingModeRTE": 4462, + "RoundingModeRTZ": 4463, + "StencilRefReplacingEXT": 5027, + "OutputLinesNV": 5269, + "OutputPrimitivesNV": 5270, + "DerivativeGroupQuadsNV": 5289, + "DerivativeGroupLinearNV": 5290, + "OutputTrianglesNV": 5298, + "PixelInterlockOrderedEXT": 5366, + "PixelInterlockUnorderedEXT": 5367, + "SampleInterlockOrderedEXT": 5368, + "SampleInterlockUnorderedEXT": 5369, + "ShadingRateInterlockOrderedEXT": 5370, + "ShadingRateInterlockUnorderedEXT": 5371, + "SharedLocalMemorySizeINTEL": 5618, + "RoundingModeRTPINTEL": 5620, + "RoundingModeRTNINTEL": 5621, + "FloatingPointModeALTINTEL": 5622, + "FloatingPointModeIEEEINTEL": 5623, + "MaxWorkgroupSizeINTEL": 5893, + "MaxWorkDimINTEL": 5894, + "NoGlobalOffsetINTEL": 5895, + "NumSIMDWorkitemsINTEL": 5896, + "SchedulerTargetFmaxMhzINTEL": 5903 } }, { @@ -169,7 +216,24 @@ "PushConstant": 9, "AtomicCounter": 10, "Image": 11, - "StorageBuffer": 12 + "StorageBuffer": 12, + "CallableDataKHR": 5328, + "CallableDataNV": 5328, + "IncomingCallableDataKHR": 5329, + "IncomingCallableDataNV": 5329, + "RayPayloadKHR": 5338, + "RayPayloadNV": 5338, + "HitAttributeKHR": 5339, + "HitAttributeNV": 5339, + "IncomingRayPayloadKHR": 5342, + "IncomingRayPayloadNV": 5342, + "ShaderRecordBufferKHR": 5343, + "ShaderRecordBufferNV": 5343, + "PhysicalStorageBuffer": 5349, + "PhysicalStorageBufferEXT": 5349, + "CodeSectionINTEL": 5605, + "DeviceOnlyINTEL": 5936, + "HostOnlyINTEL": 5937 } }, { @@ -251,7 +315,9 @@ "Rg16ui": 36, "Rg8ui": 37, "R16ui": 38, - "R8ui": 39 + "R8ui": 39, + "R64ui": 40, + "R64i": 41 } }, { @@ -317,7 +383,17 @@ "Offset": 4, "ConstOffsets": 5, "Sample": 6, - "MinLod": 7 + "MinLod": 7, + "MakeTexelAvailable": 8, + "MakeTexelAvailableKHR": 8, + "MakeTexelVisible": 9, + "MakeTexelVisibleKHR": 9, + "NonPrivateTexel": 10, + "NonPrivateTexelKHR": 10, + "VolatileTexel": 11, + "VolatileTexelKHR": 11, + "SignExtend": 12, + "ZeroExtend": 13 } }, { @@ -329,7 +405,9 @@ "NotInf": 1, "NSZ": 2, "AllowRecip": 3, - "Fast": 4 + "Fast": 4, + "AllowContractFastINTEL": 16, + "AllowReassocINTEL": 17 } }, { @@ -408,6 +486,7 @@ "NonWritable": 24, "NonReadable": 25, "Uniform": 26, + "UniformId": 27, "SaturatedConversion": 28, "Stream": 29, "Location": 30, @@ -428,14 +507,62 @@ "MaxByteOffset": 45, "AlignmentId": 46, "MaxByteOffsetId": 47, + "NoSignedWrap": 4469, + "NoUnsignedWrap": 4470, "ExplicitInterpAMD": 4999, "OverrideCoverageNV": 5248, "PassthroughNV": 5250, "ViewportRelativeNV": 5252, "SecondaryViewportRelativeNV": 5256, + "PerPrimitiveNV": 5271, + "PerViewNV": 5272, + "PerTaskNV": 5273, + "PerVertexNV": 5285, + "NonUniform": 5300, "NonUniformEXT": 5300, + "RestrictPointer": 5355, + "RestrictPointerEXT": 5355, + "AliasedPointer": 5356, + "AliasedPointerEXT": 5356, + "SIMTCallINTEL": 5599, + "ReferencedIndirectlyINTEL": 5602, + "ClobberINTEL": 5607, + "SideEffectsINTEL": 5608, + "VectorComputeVariableINTEL": 5624, + "FuncParamIOKindINTEL": 5625, + "VectorComputeFunctionINTEL": 5626, + "StackCallINTEL": 5627, + "GlobalVariableOffsetINTEL": 5628, + "CounterBuffer": 5634, "HlslCounterBufferGOOGLE": 5634, - "HlslSemanticGOOGLE": 5635 + "HlslSemanticGOOGLE": 5635, + "UserSemantic": 5635, + "UserTypeGOOGLE": 5636, + "FunctionRoundingModeINTEL": 5822, + "FunctionDenormModeINTEL": 5823, + "RegisterINTEL": 5825, + "MemoryINTEL": 5826, + "NumbanksINTEL": 5827, + "BankwidthINTEL": 5828, + "MaxPrivateCopiesINTEL": 5829, + "SinglepumpINTEL": 5830, + "DoublepumpINTEL": 5831, + "MaxReplicatesINTEL": 5832, + "SimpleDualPortINTEL": 5833, + "MergeINTEL": 5834, + "BankBitsINTEL": 5835, + "ForcePow2DepthINTEL": 5836, + "BurstCoalesceINTEL": 5899, + "CacheSizeINTEL": 5900, + "DontStaticallyCoalesceINTEL": 5901, + "PrefetchINTEL": 5902, + "StallEnableINTEL": 5905, + "FuseLoopsInFunctionINTEL": 5907, + "BufferLocationINTEL": 5921, + "IOPipeStorageINTEL": 5944, + "FunctionFloatingPointModeINTEL": 6080, + "SingleElementVectorINTEL": 6085, + "VectorComputeCallableFunctionINTEL": 6087 } }, { @@ -497,8 +624,10 @@ "BaseVertex": 4424, "BaseInstance": 4425, "DrawIndex": 4426, + "PrimitiveShadingRateKHR": 4432, "DeviceIndex": 4438, "ViewIndex": 4440, + "ShadingRateKHR": 4444, "BaryCoordNoPerspAMD": 4992, "BaryCoordNoPerspCentroidAMD": 4993, "BaryCoordNoPerspSampleAMD": 4994, @@ -512,7 +641,53 @@ "SecondaryViewportMaskNV": 5258, "PositionPerViewNV": 5261, "ViewportMaskPerViewNV": 5262, - "FullyCoveredEXT": 5264 + "FullyCoveredEXT": 5264, + "TaskCountNV": 5274, + "PrimitiveCountNV": 5275, + "PrimitiveIndicesNV": 5276, + "ClipDistancePerViewNV": 5277, + "CullDistancePerViewNV": 5278, + "LayerPerViewNV": 5279, + "MeshViewCountNV": 5280, + "MeshViewIndicesNV": 5281, + "BaryCoordNV": 5286, + "BaryCoordNoPerspNV": 5287, + "FragSizeEXT": 5292, + "FragmentSizeNV": 5292, + "FragInvocationCountEXT": 5293, + "InvocationsPerPixelNV": 5293, + "LaunchIdKHR": 5319, + "LaunchIdNV": 5319, + "LaunchSizeKHR": 5320, + "LaunchSizeNV": 5320, + "WorldRayOriginKHR": 5321, + "WorldRayOriginNV": 5321, + "WorldRayDirectionKHR": 5322, + "WorldRayDirectionNV": 5322, + "ObjectRayOriginKHR": 5323, + "ObjectRayOriginNV": 5323, + "ObjectRayDirectionKHR": 5324, + "ObjectRayDirectionNV": 5324, + "RayTminKHR": 5325, + "RayTminNV": 5325, + "RayTmaxKHR": 5326, + "RayTmaxNV": 5326, + "InstanceCustomIndexKHR": 5327, + "InstanceCustomIndexNV": 5327, + "ObjectToWorldKHR": 5330, + "ObjectToWorldNV": 5330, + "WorldToObjectKHR": 5331, + "WorldToObjectNV": 5331, + "HitTNV": 5332, + "HitKindKHR": 5333, + "HitKindNV": 5333, + "IncomingRayFlagsKHR": 5351, + "IncomingRayFlagsNV": 5351, + "RayGeometryIndexKHR": 5352, + "WarpsPerSMNV": 5374, + "SMCountNV": 5375, + "WarpIDNV": 5376, + "SMIDNV": 5377 } }, { @@ -532,7 +707,20 @@ "Unroll": 0, "DontUnroll": 1, "DependencyInfinite": 2, - "DependencyLength": 3 + "DependencyLength": 3, + "MinIterations": 4, + "MaxIterations": 5, + "IterationMultiple": 6, + "PeelCount": 7, + "PartialCount": 8, + "InitiationIntervalINTEL": 16, + "MaxConcurrencyINTEL": 17, + "DependencyArrayINTEL": 18, + "PipelineEnableINTEL": 19, + "LoopCoalesceINTEL": 20, + "MaxInterleavingINTEL": 21, + "SpeculatedIterationsINTEL": 22, + "NoFusionINTEL": 23 } }, { @@ -560,7 +748,14 @@ "WorkgroupMemory": 8, "CrossWorkgroupMemory": 9, "AtomicCounterMemory": 10, - "ImageMemory": 11 + "ImageMemory": 11, + "OutputMemory": 12, + "OutputMemoryKHR": 12, + "MakeAvailable": 13, + "MakeAvailableKHR": 13, + "MakeVisible": 14, + "MakeVisibleKHR": 14, + "Volatile": 15 } }, { @@ -570,7 +765,13 @@ { "Volatile": 0, "Aligned": 1, - "Nontemporal": 2 + "Nontemporal": 2, + "MakePointerAvailable": 3, + "MakePointerAvailableKHR": 3, + "MakePointerVisible": 4, + "MakePointerVisibleKHR": 4, + "NonPrivatePointer": 5, + "NonPrivatePointerKHR": 5 } }, { @@ -582,7 +783,10 @@ "Device": 1, "Workgroup": 2, "Subgroup": 3, - "Invocation": 4 + "Invocation": 4, + "QueueFamily": 5, + "QueueFamilyKHR": 5, + "ShaderCallKHR": 6 } }, { @@ -689,8 +893,14 @@ "GroupNonUniformShuffleRelative": 66, "GroupNonUniformClustered": 67, "GroupNonUniformQuad": 68, + "ShaderLayer": 69, + "ShaderViewportIndex": 70, + "FragmentShadingRateKHR": 4422, "SubgroupBallotKHR": 4423, "DrawParameters": 4427, + "WorkgroupMemoryExplicitLayoutKHR": 4428, + "WorkgroupMemoryExplicitLayout8BitAccessKHR": 4429, + "WorkgroupMemoryExplicitLayout16BitAccessKHR": 4430, "SubgroupVoteKHR": 4431, "StorageBuffer16BitAccess": 4433, "StorageUniformBufferBlock16": 4433, @@ -707,11 +917,22 @@ "StorageBuffer8BitAccess": 4448, "UniformAndStorageBuffer8BitAccess": 4449, "StoragePushConstant8": 4450, + "DenormPreserve": 4464, + "DenormFlushToZero": 4465, + "SignedZeroInfNanPreserve": 4466, + "RoundingModeRTE": 4467, + "RoundingModeRTZ": 4468, + "RayQueryProvisionalKHR": 4471, + "RayQueryKHR": 4472, + "RayTraversalPrimitiveCullingKHR": 4478, + "RayTracingKHR": 4479, "Float16ImageAMD": 5008, "ImageGatherBiasLodAMD": 5009, "FragmentMaskAMD": 5010, "StencilExportEXT": 5013, "ImageReadWriteLodAMD": 5015, + "Int64ImageEXT": 5016, + "ShaderClockKHR": 5055, "SampleMaskOverrideCoverageNV": 5249, "GeometryShaderPassthroughNV": 5251, "ShaderViewportIndexLayerEXT": 5254, @@ -720,22 +941,164 @@ "ShaderStereoViewNV": 5259, "PerViewAttributesNV": 5260, "FragmentFullyCoveredEXT": 5265, + "MeshShadingNV": 5266, + "ImageFootprintNV": 5282, + "FragmentBarycentricNV": 5284, + "ComputeDerivativeGroupQuadsNV": 5288, + "FragmentDensityEXT": 5291, + "ShadingRateNV": 5291, "GroupNonUniformPartitionedNV": 5297, + "ShaderNonUniform": 5301, "ShaderNonUniformEXT": 5301, + "RuntimeDescriptorArray": 5302, "RuntimeDescriptorArrayEXT": 5302, + "InputAttachmentArrayDynamicIndexing": 5303, "InputAttachmentArrayDynamicIndexingEXT": 5303, + "UniformTexelBufferArrayDynamicIndexing": 5304, "UniformTexelBufferArrayDynamicIndexingEXT": 5304, + "StorageTexelBufferArrayDynamicIndexing": 5305, "StorageTexelBufferArrayDynamicIndexingEXT": 5305, + "UniformBufferArrayNonUniformIndexing": 5306, "UniformBufferArrayNonUniformIndexingEXT": 5306, + "SampledImageArrayNonUniformIndexing": 5307, "SampledImageArrayNonUniformIndexingEXT": 5307, + "StorageBufferArrayNonUniformIndexing": 5308, "StorageBufferArrayNonUniformIndexingEXT": 5308, + "StorageImageArrayNonUniformIndexing": 5309, "StorageImageArrayNonUniformIndexingEXT": 5309, + "InputAttachmentArrayNonUniformIndexing": 5310, "InputAttachmentArrayNonUniformIndexingEXT": 5310, + "UniformTexelBufferArrayNonUniformIndexing": 5311, "UniformTexelBufferArrayNonUniformIndexingEXT": 5311, + "StorageTexelBufferArrayNonUniformIndexing": 5312, "StorageTexelBufferArrayNonUniformIndexingEXT": 5312, + "RayTracingNV": 5340, + "VulkanMemoryModel": 5345, + "VulkanMemoryModelKHR": 5345, + "VulkanMemoryModelDeviceScope": 5346, + "VulkanMemoryModelDeviceScopeKHR": 5346, + "PhysicalStorageBufferAddresses": 5347, + "PhysicalStorageBufferAddressesEXT": 5347, + "ComputeDerivativeGroupLinearNV": 5350, + "RayTracingProvisionalKHR": 5353, + "CooperativeMatrixNV": 5357, + "FragmentShaderSampleInterlockEXT": 5363, + "FragmentShaderShadingRateInterlockEXT": 5372, + "ShaderSMBuiltinsNV": 5373, + "FragmentShaderPixelInterlockEXT": 5378, + "DemoteToHelperInvocationEXT": 5379, "SubgroupShuffleINTEL": 5568, "SubgroupBufferBlockIOINTEL": 5569, - "SubgroupImageBlockIOINTEL": 5570 + "SubgroupImageBlockIOINTEL": 5570, + "SubgroupImageMediaBlockIOINTEL": 5579, + "RoundToInfinityINTEL": 5582, + "FloatingPointModeINTEL": 5583, + "IntegerFunctions2INTEL": 5584, + "FunctionPointersINTEL": 5603, + "IndirectReferencesINTEL": 5604, + "AsmINTEL": 5606, + "AtomicFloat32MinMaxEXT": 5612, + "AtomicFloat64MinMaxEXT": 5613, + "AtomicFloat16MinMaxEXT": 5616, + "VectorComputeINTEL": 5617, + "VectorAnyINTEL": 5619, + "SubgroupAvcMotionEstimationINTEL": 5696, + "SubgroupAvcMotionEstimationIntraINTEL": 5697, + "SubgroupAvcMotionEstimationChromaINTEL": 5698, + "VariableLengthArrayINTEL": 5817, + "FunctionFloatControlINTEL": 5821, + "FPGAMemoryAttributesINTEL": 5824, + "FPFastMathModeINTEL": 5837, + "ArbitraryPrecisionIntegersINTEL": 5844, + "UnstructuredLoopControlsINTEL": 5886, + "FPGALoopControlsINTEL": 5888, + "KernelAttributesINTEL": 5892, + "FPGAKernelAttributesINTEL": 5897, + "FPGAMemoryAccessesINTEL": 5898, + "FPGAClusterAttributesINTEL": 5904, + "LoopFuseINTEL": 5906, + "FPGABufferLocationINTEL": 5920, + "USMStorageClassesINTEL": 5935, + "IOPipesINTEL": 5943, + "BlockingPipesINTEL": 5945, + "FPGARegINTEL": 5948, + "AtomicFloat32AddEXT": 6033, + "AtomicFloat64AddEXT": 6034, + "LongConstantCompositeINTEL": 6089 + } + }, + { + "Name": "RayFlags", + "Type": "Bit", + "Values": + { + "OpaqueKHR": 0, + "NoOpaqueKHR": 1, + "TerminateOnFirstHitKHR": 2, + "SkipClosestHitShaderKHR": 3, + "CullBackFacingTrianglesKHR": 4, + "CullFrontFacingTrianglesKHR": 5, + "CullOpaqueKHR": 6, + "CullNoOpaqueKHR": 7, + "SkipTrianglesKHR": 8, + "SkipAABBsKHR": 9 + } + }, + { + "Name": "RayQueryIntersection", + "Type": "Value", + "Values": + { + "RayQueryCandidateIntersectionKHR": 0, + "RayQueryCommittedIntersectionKHR": 1 + } + }, + { + "Name": "RayQueryCommittedIntersectionType", + "Type": "Value", + "Values": + { + "RayQueryCommittedIntersectionNoneKHR": 0, + "RayQueryCommittedIntersectionTriangleKHR": 1, + "RayQueryCommittedIntersectionGeneratedKHR": 2 + } + }, + { + "Name": "RayQueryCandidateIntersectionType", + "Type": "Value", + "Values": + { + "RayQueryCandidateIntersectionTriangleKHR": 0, + "RayQueryCandidateIntersectionAABBKHR": 1 + } + }, + { + "Name": "FragmentShadingRate", + "Type": "Bit", + "Values": + { + "Vertical2Pixels": 0, + "Vertical4Pixels": 1, + "Horizontal2Pixels": 2, + "Horizontal4Pixels": 3 + } + }, + { + "Name": "FPDenormMode", + "Type": "Value", + "Values": + { + "Preserve": 0, + "FlushToZero": 1 + } + }, + { + "Name": "FPOperationMode", + "Type": "Value", + "Values": + { + "IEEE": 0, + "ALT": 1 } }, { @@ -1083,12 +1446,29 @@ "OpGroupNonUniformLogicalXor": 364, "OpGroupNonUniformQuadBroadcast": 365, "OpGroupNonUniformQuadSwap": 366, + "OpCopyLogical": 400, + "OpPtrEqual": 401, + "OpPtrNotEqual": 402, + "OpPtrDiff": 403, + "OpTerminateInvocation": 4416, "OpSubgroupBallotKHR": 4421, "OpSubgroupFirstInvocationKHR": 4422, "OpSubgroupAllKHR": 4428, "OpSubgroupAnyKHR": 4429, "OpSubgroupAllEqualKHR": 4430, "OpSubgroupReadInvocationKHR": 4432, + "OpTraceRayKHR": 4445, + "OpExecuteCallableKHR": 4446, + "OpConvertUToAccelerationStructureKHR": 4447, + "OpIgnoreIntersectionKHR": 4448, + "OpTerminateRayKHR": 4449, + "OpTypeRayQueryKHR": 4472, + "OpRayQueryInitializeKHR": 4473, + "OpRayQueryTerminateKHR": 4474, + "OpRayQueryGenerateIntersectionKHR": 4475, + "OpRayQueryConfirmIntersectionKHR": 4476, + "OpRayQueryProceedKHR": 4477, + "OpRayQueryGetIntersectionTypeKHR": 4479, "OpGroupIAddNonUniformAMD": 5000, "OpGroupFAddNonUniformAMD": 5001, "OpGroupFMinNonUniformAMD": 5002, @@ -1099,7 +1479,27 @@ "OpGroupSMaxNonUniformAMD": 5007, "OpFragmentMaskFetchAMD": 5011, "OpFragmentFetchAMD": 5012, + "OpReadClockKHR": 5056, + "OpImageSampleFootprintNV": 5283, "OpGroupNonUniformPartitionNV": 5296, + "OpWritePackedPrimitiveIndices4x8NV": 5299, + "OpReportIntersectionKHR": 5334, + "OpReportIntersectionNV": 5334, + "OpIgnoreIntersectionNV": 5335, + "OpTerminateRayNV": 5336, + "OpTraceNV": 5337, + "OpTypeAccelerationStructureKHR": 5341, + "OpTypeAccelerationStructureNV": 5341, + "OpExecuteCallableNV": 5344, + "OpTypeCooperativeMatrixNV": 5358, + "OpCooperativeMatrixLoadNV": 5359, + "OpCooperativeMatrixStoreNV": 5360, + "OpCooperativeMatrixMulAddNV": 5361, + "OpCooperativeMatrixLengthNV": 5362, + "OpBeginInvocationInterlockEXT": 5364, + "OpEndInvocationInterlockEXT": 5365, + "OpDemoteToHelperInvocationEXT": 5380, + "OpIsHelperInvocationEXT": 5381, "OpSubgroupShuffleINTEL": 5571, "OpSubgroupShuffleDownINTEL": 5572, "OpSubgroupShuffleUpINTEL": 5573, @@ -1108,8 +1508,182 @@ "OpSubgroupBlockWriteINTEL": 5576, "OpSubgroupImageBlockReadINTEL": 5577, "OpSubgroupImageBlockWriteINTEL": 5578, + "OpSubgroupImageMediaBlockReadINTEL": 5580, + "OpSubgroupImageMediaBlockWriteINTEL": 5581, + "OpUCountLeadingZerosINTEL": 5585, + "OpUCountTrailingZerosINTEL": 5586, + "OpAbsISubINTEL": 5587, + "OpAbsUSubINTEL": 5588, + "OpIAddSatINTEL": 5589, + "OpUAddSatINTEL": 5590, + "OpIAverageINTEL": 5591, + "OpUAverageINTEL": 5592, + "OpIAverageRoundedINTEL": 5593, + "OpUAverageRoundedINTEL": 5594, + "OpISubSatINTEL": 5595, + "OpUSubSatINTEL": 5596, + "OpIMul32x16INTEL": 5597, + "OpUMul32x16INTEL": 5598, + "OpConstFunctionPointerINTEL": 5600, + "OpFunctionPointerCallINTEL": 5601, + "OpAsmTargetINTEL": 5609, + "OpAsmINTEL": 5610, + "OpAsmCallINTEL": 5611, + "OpAtomicFMinEXT": 5614, + "OpAtomicFMaxEXT": 5615, + "OpDecorateString": 5632, "OpDecorateStringGOOGLE": 5632, - "OpMemberDecorateStringGOOGLE": 5633 + "OpMemberDecorateString": 5633, + "OpMemberDecorateStringGOOGLE": 5633, + "OpVmeImageINTEL": 5699, + "OpTypeVmeImageINTEL": 5700, + "OpTypeAvcImePayloadINTEL": 5701, + "OpTypeAvcRefPayloadINTEL": 5702, + "OpTypeAvcSicPayloadINTEL": 5703, + "OpTypeAvcMcePayloadINTEL": 5704, + "OpTypeAvcMceResultINTEL": 5705, + "OpTypeAvcImeResultINTEL": 5706, + "OpTypeAvcImeResultSingleReferenceStreamoutINTEL": 5707, + "OpTypeAvcImeResultDualReferenceStreamoutINTEL": 5708, + "OpTypeAvcImeSingleReferenceStreaminINTEL": 5709, + "OpTypeAvcImeDualReferenceStreaminINTEL": 5710, + "OpTypeAvcRefResultINTEL": 5711, + "OpTypeAvcSicResultINTEL": 5712, + "OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL": 5713, + "OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL": 5714, + "OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL": 5715, + "OpSubgroupAvcMceSetInterShapePenaltyINTEL": 5716, + "OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL": 5717, + "OpSubgroupAvcMceSetInterDirectionPenaltyINTEL": 5718, + "OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL": 5719, + "OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL": 5720, + "OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL": 5721, + "OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL": 5722, + "OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL": 5723, + "OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL": 5724, + "OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL": 5725, + "OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL": 5726, + "OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL": 5727, + "OpSubgroupAvcMceSetAcOnlyHaarINTEL": 5728, + "OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL": 5729, + "OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL": 5730, + "OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL": 5731, + "OpSubgroupAvcMceConvertToImePayloadINTEL": 5732, + "OpSubgroupAvcMceConvertToImeResultINTEL": 5733, + "OpSubgroupAvcMceConvertToRefPayloadINTEL": 5734, + "OpSubgroupAvcMceConvertToRefResultINTEL": 5735, + "OpSubgroupAvcMceConvertToSicPayloadINTEL": 5736, + "OpSubgroupAvcMceConvertToSicResultINTEL": 5737, + "OpSubgroupAvcMceGetMotionVectorsINTEL": 5738, + "OpSubgroupAvcMceGetInterDistortionsINTEL": 5739, + "OpSubgroupAvcMceGetBestInterDistortionsINTEL": 5740, + "OpSubgroupAvcMceGetInterMajorShapeINTEL": 5741, + "OpSubgroupAvcMceGetInterMinorShapeINTEL": 5742, + "OpSubgroupAvcMceGetInterDirectionsINTEL": 5743, + "OpSubgroupAvcMceGetInterMotionVectorCountINTEL": 5744, + "OpSubgroupAvcMceGetInterReferenceIdsINTEL": 5745, + "OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL": 5746, + "OpSubgroupAvcImeInitializeINTEL": 5747, + "OpSubgroupAvcImeSetSingleReferenceINTEL": 5748, + "OpSubgroupAvcImeSetDualReferenceINTEL": 5749, + "OpSubgroupAvcImeRefWindowSizeINTEL": 5750, + "OpSubgroupAvcImeAdjustRefOffsetINTEL": 5751, + "OpSubgroupAvcImeConvertToMcePayloadINTEL": 5752, + "OpSubgroupAvcImeSetMaxMotionVectorCountINTEL": 5753, + "OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL": 5754, + "OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL": 5755, + "OpSubgroupAvcImeSetWeightedSadINTEL": 5756, + "OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL": 5757, + "OpSubgroupAvcImeEvaluateWithDualReferenceINTEL": 5758, + "OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL": 5759, + "OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL": 5760, + "OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL": 5761, + "OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL": 5762, + "OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL": 5763, + "OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL": 5764, + "OpSubgroupAvcImeConvertToMceResultINTEL": 5765, + "OpSubgroupAvcImeGetSingleReferenceStreaminINTEL": 5766, + "OpSubgroupAvcImeGetDualReferenceStreaminINTEL": 5767, + "OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL": 5768, + "OpSubgroupAvcImeStripDualReferenceStreamoutINTEL": 5769, + "OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL": 5770, + "OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL": 5771, + "OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL": 5772, + "OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL": 5773, + "OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL": 5774, + "OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL": 5775, + "OpSubgroupAvcImeGetBorderReachedINTEL": 5776, + "OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL": 5777, + "OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL": 5778, + "OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL": 5779, + "OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL": 5780, + "OpSubgroupAvcFmeInitializeINTEL": 5781, + "OpSubgroupAvcBmeInitializeINTEL": 5782, + "OpSubgroupAvcRefConvertToMcePayloadINTEL": 5783, + "OpSubgroupAvcRefSetBidirectionalMixDisableINTEL": 5784, + "OpSubgroupAvcRefSetBilinearFilterEnableINTEL": 5785, + "OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL": 5786, + "OpSubgroupAvcRefEvaluateWithDualReferenceINTEL": 5787, + "OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL": 5788, + "OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL": 5789, + "OpSubgroupAvcRefConvertToMceResultINTEL": 5790, + "OpSubgroupAvcSicInitializeINTEL": 5791, + "OpSubgroupAvcSicConfigureSkcINTEL": 5792, + "OpSubgroupAvcSicConfigureIpeLumaINTEL": 5793, + "OpSubgroupAvcSicConfigureIpeLumaChromaINTEL": 5794, + "OpSubgroupAvcSicGetMotionVectorMaskINTEL": 5795, + "OpSubgroupAvcSicConvertToMcePayloadINTEL": 5796, + "OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL": 5797, + "OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL": 5798, + "OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL": 5799, + "OpSubgroupAvcSicSetBilinearFilterEnableINTEL": 5800, + "OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL": 5801, + "OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL": 5802, + "OpSubgroupAvcSicEvaluateIpeINTEL": 5803, + "OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL": 5804, + "OpSubgroupAvcSicEvaluateWithDualReferenceINTEL": 5805, + "OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL": 5806, + "OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL": 5807, + "OpSubgroupAvcSicConvertToMceResultINTEL": 5808, + "OpSubgroupAvcSicGetIpeLumaShapeINTEL": 5809, + "OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL": 5810, + "OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL": 5811, + "OpSubgroupAvcSicGetPackedIpeLumaModesINTEL": 5812, + "OpSubgroupAvcSicGetIpeChromaModeINTEL": 5813, + "OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL": 5814, + "OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL": 5815, + "OpSubgroupAvcSicGetInterRawSadsINTEL": 5816, + "OpVariableLengthArrayINTEL": 5818, + "OpSaveMemoryINTEL": 5819, + "OpRestoreMemoryINTEL": 5820, + "OpLoopControlINTEL": 5887, + "OpPtrCastToCrossWorkgroupINTEL": 5934, + "OpCrossWorkgroupCastToPtrINTEL": 5938, + "OpReadPipeBlockingINTEL": 5946, + "OpWritePipeBlockingINTEL": 5947, + "OpFPGARegINTEL": 5949, + "OpRayQueryGetRayTMinKHR": 6016, + "OpRayQueryGetRayFlagsKHR": 6017, + "OpRayQueryGetIntersectionTKHR": 6018, + "OpRayQueryGetIntersectionInstanceCustomIndexKHR": 6019, + "OpRayQueryGetIntersectionInstanceIdKHR": 6020, + "OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR": 6021, + "OpRayQueryGetIntersectionGeometryIndexKHR": 6022, + "OpRayQueryGetIntersectionPrimitiveIndexKHR": 6023, + "OpRayQueryGetIntersectionBarycentricsKHR": 6024, + "OpRayQueryGetIntersectionFrontFaceKHR": 6025, + "OpRayQueryGetIntersectionCandidateAABBOpaqueKHR": 6026, + "OpRayQueryGetIntersectionObjectRayDirectionKHR": 6027, + "OpRayQueryGetIntersectionObjectRayOriginKHR": 6028, + "OpRayQueryGetWorldRayDirectionKHR": 6029, + "OpRayQueryGetWorldRayOriginKHR": 6030, + "OpRayQueryGetIntersectionObjectToWorldKHR": 6031, + "OpRayQueryGetIntersectionWorldToObjectKHR": 6032, + "OpAtomicFAddEXT": 6035, + "OpTypeBufferSurfaceINTEL": 6086, + "OpTypeStructContinuedINTEL": 6090, + "OpConstantCompositeContinuedINTEL": 6091, + "OpSpecConstantCompositeContinuedINTEL": 6092 } } ] diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.lua b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.lua index 43e9ba5be..b9c8bc471 100755 --- a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.lua +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.lua @@ -1,4 +1,4 @@ --- Copyright (c) 2014-2018 The Khronos Group Inc. +-- Copyright (c) 2014-2020 The Khronos Group Inc. -- -- Permission is hereby granted, free of charge, to any person obtaining a copy -- of this software and/or associated documentation files (the "Materials"), @@ -26,13 +26,16 @@ -- the Binary Section of the SPIR-V specification. -- Enumeration tokens for SPIR-V, in various styles: --- C, C++, C++11, JSON, Lua, Python +-- C, C++, C++11, JSON, Lua, Python, C#, D -- -- - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL -- - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL -- - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL -- - Lua will use tables, e.g.: spv.SourceLanguage.GLSL -- - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +-- - C# will use enum classes in the Specification class located in the "Spv" namespace, +-- e.g.: Spv.Specification.SourceLanguage.GLSL +-- - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL -- -- Some tokens act like mask values, which can be OR'd together, -- while others are mutually exclusive. The mask-like ones have @@ -41,8 +44,8 @@ spv = { MagicNumber = 0x07230203, - Version = 0x00010300, - Revision = 1, + Version = 0x00010500, + Revision = 4, OpCodeMask = 0xffff, WordCountShift = 16, @@ -63,18 +66,36 @@ spv = { Fragment = 4, GLCompute = 5, Kernel = 6, + TaskNV = 5267, + MeshNV = 5268, + RayGenerationKHR = 5313, + RayGenerationNV = 5313, + IntersectionKHR = 5314, + IntersectionNV = 5314, + AnyHitKHR = 5315, + AnyHitNV = 5315, + ClosestHitKHR = 5316, + ClosestHitNV = 5316, + MissKHR = 5317, + MissNV = 5317, + CallableKHR = 5318, + CallableNV = 5318, }, AddressingModel = { Logical = 0, Physical32 = 1, Physical64 = 2, + PhysicalStorageBuffer64 = 5348, + PhysicalStorageBuffer64EXT = 5348, }, MemoryModel = { Simple = 0, GLSL450 = 1, OpenCL = 2, + Vulkan = 3, + VulkanKHR = 3, }, ExecutionMode = { @@ -117,7 +138,33 @@ spv = { LocalSizeId = 38, LocalSizeHintId = 39, PostDepthCoverage = 4446, + DenormPreserve = 4459, + DenormFlushToZero = 4460, + SignedZeroInfNanPreserve = 4461, + RoundingModeRTE = 4462, + RoundingModeRTZ = 4463, StencilRefReplacingEXT = 5027, + OutputLinesNV = 5269, + OutputPrimitivesNV = 5270, + DerivativeGroupQuadsNV = 5289, + DerivativeGroupLinearNV = 5290, + OutputTrianglesNV = 5298, + PixelInterlockOrderedEXT = 5366, + PixelInterlockUnorderedEXT = 5367, + SampleInterlockOrderedEXT = 5368, + SampleInterlockUnorderedEXT = 5369, + ShadingRateInterlockOrderedEXT = 5370, + ShadingRateInterlockUnorderedEXT = 5371, + SharedLocalMemorySizeINTEL = 5618, + RoundingModeRTPINTEL = 5620, + RoundingModeRTNINTEL = 5621, + FloatingPointModeALTINTEL = 5622, + FloatingPointModeIEEEINTEL = 5623, + MaxWorkgroupSizeINTEL = 5893, + MaxWorkDimINTEL = 5894, + NoGlobalOffsetINTEL = 5895, + NumSIMDWorkitemsINTEL = 5896, + SchedulerTargetFmaxMhzINTEL = 5903, }, StorageClass = { @@ -134,6 +181,23 @@ spv = { AtomicCounter = 10, Image = 11, StorageBuffer = 12, + CallableDataKHR = 5328, + CallableDataNV = 5328, + IncomingCallableDataKHR = 5329, + IncomingCallableDataNV = 5329, + RayPayloadKHR = 5338, + RayPayloadNV = 5338, + HitAttributeKHR = 5339, + HitAttributeNV = 5339, + IncomingRayPayloadKHR = 5342, + IncomingRayPayloadNV = 5342, + ShaderRecordBufferKHR = 5343, + ShaderRecordBufferNV = 5343, + PhysicalStorageBuffer = 5349, + PhysicalStorageBufferEXT = 5349, + CodeSectionINTEL = 5605, + DeviceOnlyINTEL = 5936, + HostOnlyINTEL = 5937, }, Dim = { @@ -200,6 +264,8 @@ spv = { Rg8ui = 37, R16ui = 38, R8ui = 39, + R64ui = 40, + R64i = 41, }, ImageChannelOrder = { @@ -254,6 +320,16 @@ spv = { ConstOffsets = 5, Sample = 6, MinLod = 7, + MakeTexelAvailable = 8, + MakeTexelAvailableKHR = 8, + MakeTexelVisible = 9, + MakeTexelVisibleKHR = 9, + NonPrivateTexel = 10, + NonPrivateTexelKHR = 10, + VolatileTexel = 11, + VolatileTexelKHR = 11, + SignExtend = 12, + ZeroExtend = 13, }, ImageOperandsMask = { @@ -266,6 +342,16 @@ spv = { ConstOffsets = 0x00000020, Sample = 0x00000040, MinLod = 0x00000080, + MakeTexelAvailable = 0x00000100, + MakeTexelAvailableKHR = 0x00000100, + MakeTexelVisible = 0x00000200, + MakeTexelVisibleKHR = 0x00000200, + NonPrivateTexel = 0x00000400, + NonPrivateTexelKHR = 0x00000400, + VolatileTexel = 0x00000800, + VolatileTexelKHR = 0x00000800, + SignExtend = 0x00001000, + ZeroExtend = 0x00002000, }, FPFastMathModeShift = { @@ -274,6 +360,8 @@ spv = { NSZ = 2, AllowRecip = 3, Fast = 4, + AllowContractFastINTEL = 16, + AllowReassocINTEL = 17, }, FPFastMathModeMask = { @@ -283,6 +371,8 @@ spv = { NSZ = 0x00000004, AllowRecip = 0x00000008, Fast = 0x00000010, + AllowContractFastINTEL = 0x00010000, + AllowReassocINTEL = 0x00020000, }, FPRoundingMode = { @@ -341,6 +431,7 @@ spv = { NonWritable = 24, NonReadable = 25, Uniform = 26, + UniformId = 27, SaturatedConversion = 28, Stream = 29, Location = 30, @@ -361,14 +452,62 @@ spv = { MaxByteOffset = 45, AlignmentId = 46, MaxByteOffsetId = 47, + NoSignedWrap = 4469, + NoUnsignedWrap = 4470, ExplicitInterpAMD = 4999, OverrideCoverageNV = 5248, PassthroughNV = 5250, ViewportRelativeNV = 5252, SecondaryViewportRelativeNV = 5256, + PerPrimitiveNV = 5271, + PerViewNV = 5272, + PerTaskNV = 5273, + PerVertexNV = 5285, + NonUniform = 5300, NonUniformEXT = 5300, + RestrictPointer = 5355, + RestrictPointerEXT = 5355, + AliasedPointer = 5356, + AliasedPointerEXT = 5356, + SIMTCallINTEL = 5599, + ReferencedIndirectlyINTEL = 5602, + ClobberINTEL = 5607, + SideEffectsINTEL = 5608, + VectorComputeVariableINTEL = 5624, + FuncParamIOKindINTEL = 5625, + VectorComputeFunctionINTEL = 5626, + StackCallINTEL = 5627, + GlobalVariableOffsetINTEL = 5628, + CounterBuffer = 5634, HlslCounterBufferGOOGLE = 5634, HlslSemanticGOOGLE = 5635, + UserSemantic = 5635, + UserTypeGOOGLE = 5636, + FunctionRoundingModeINTEL = 5822, + FunctionDenormModeINTEL = 5823, + RegisterINTEL = 5825, + MemoryINTEL = 5826, + NumbanksINTEL = 5827, + BankwidthINTEL = 5828, + MaxPrivateCopiesINTEL = 5829, + SinglepumpINTEL = 5830, + DoublepumpINTEL = 5831, + MaxReplicatesINTEL = 5832, + SimpleDualPortINTEL = 5833, + MergeINTEL = 5834, + BankBitsINTEL = 5835, + ForcePow2DepthINTEL = 5836, + BurstCoalesceINTEL = 5899, + CacheSizeINTEL = 5900, + DontStaticallyCoalesceINTEL = 5901, + PrefetchINTEL = 5902, + StallEnableINTEL = 5905, + FuseLoopsInFunctionINTEL = 5907, + BufferLocationINTEL = 5921, + IOPipeStorageINTEL = 5944, + FunctionFloatingPointModeINTEL = 6080, + SingleElementVectorINTEL = 6085, + VectorComputeCallableFunctionINTEL = 6087, }, BuiltIn = { @@ -426,8 +565,10 @@ spv = { BaseVertex = 4424, BaseInstance = 4425, DrawIndex = 4426, + PrimitiveShadingRateKHR = 4432, DeviceIndex = 4438, ViewIndex = 4440, + ShadingRateKHR = 4444, BaryCoordNoPerspAMD = 4992, BaryCoordNoPerspCentroidAMD = 4993, BaryCoordNoPerspSampleAMD = 4994, @@ -442,6 +583,52 @@ spv = { PositionPerViewNV = 5261, ViewportMaskPerViewNV = 5262, FullyCoveredEXT = 5264, + TaskCountNV = 5274, + PrimitiveCountNV = 5275, + PrimitiveIndicesNV = 5276, + ClipDistancePerViewNV = 5277, + CullDistancePerViewNV = 5278, + LayerPerViewNV = 5279, + MeshViewCountNV = 5280, + MeshViewIndicesNV = 5281, + BaryCoordNV = 5286, + BaryCoordNoPerspNV = 5287, + FragSizeEXT = 5292, + FragmentSizeNV = 5292, + FragInvocationCountEXT = 5293, + InvocationsPerPixelNV = 5293, + LaunchIdKHR = 5319, + LaunchIdNV = 5319, + LaunchSizeKHR = 5320, + LaunchSizeNV = 5320, + WorldRayOriginKHR = 5321, + WorldRayOriginNV = 5321, + WorldRayDirectionKHR = 5322, + WorldRayDirectionNV = 5322, + ObjectRayOriginKHR = 5323, + ObjectRayOriginNV = 5323, + ObjectRayDirectionKHR = 5324, + ObjectRayDirectionNV = 5324, + RayTminKHR = 5325, + RayTminNV = 5325, + RayTmaxKHR = 5326, + RayTmaxNV = 5326, + InstanceCustomIndexKHR = 5327, + InstanceCustomIndexNV = 5327, + ObjectToWorldKHR = 5330, + ObjectToWorldNV = 5330, + WorldToObjectKHR = 5331, + WorldToObjectNV = 5331, + HitTNV = 5332, + HitKindKHR = 5333, + HitKindNV = 5333, + IncomingRayFlagsKHR = 5351, + IncomingRayFlagsNV = 5351, + RayGeometryIndexKHR = 5352, + WarpsPerSMNV = 5374, + SMCountNV = 5375, + WarpIDNV = 5376, + SMIDNV = 5377, }, SelectionControlShift = { @@ -460,6 +647,19 @@ spv = { DontUnroll = 1, DependencyInfinite = 2, DependencyLength = 3, + MinIterations = 4, + MaxIterations = 5, + IterationMultiple = 6, + PeelCount = 7, + PartialCount = 8, + InitiationIntervalINTEL = 16, + MaxConcurrencyINTEL = 17, + DependencyArrayINTEL = 18, + PipelineEnableINTEL = 19, + LoopCoalesceINTEL = 20, + MaxInterleavingINTEL = 21, + SpeculatedIterationsINTEL = 22, + NoFusionINTEL = 23, }, LoopControlMask = { @@ -468,6 +668,19 @@ spv = { DontUnroll = 0x00000002, DependencyInfinite = 0x00000004, DependencyLength = 0x00000008, + MinIterations = 0x00000010, + MaxIterations = 0x00000020, + IterationMultiple = 0x00000040, + PeelCount = 0x00000080, + PartialCount = 0x00000100, + InitiationIntervalINTEL = 0x00010000, + MaxConcurrencyINTEL = 0x00020000, + DependencyArrayINTEL = 0x00040000, + PipelineEnableINTEL = 0x00080000, + LoopCoalesceINTEL = 0x00100000, + MaxInterleavingINTEL = 0x00200000, + SpeculatedIterationsINTEL = 0x00400000, + NoFusionINTEL = 0x00800000, }, FunctionControlShift = { @@ -496,6 +709,13 @@ spv = { CrossWorkgroupMemory = 9, AtomicCounterMemory = 10, ImageMemory = 11, + OutputMemory = 12, + OutputMemoryKHR = 12, + MakeAvailable = 13, + MakeAvailableKHR = 13, + MakeVisible = 14, + MakeVisibleKHR = 14, + Volatile = 15, }, MemorySemanticsMask = { @@ -510,12 +730,25 @@ spv = { CrossWorkgroupMemory = 0x00000200, AtomicCounterMemory = 0x00000400, ImageMemory = 0x00000800, + OutputMemory = 0x00001000, + OutputMemoryKHR = 0x00001000, + MakeAvailable = 0x00002000, + MakeAvailableKHR = 0x00002000, + MakeVisible = 0x00004000, + MakeVisibleKHR = 0x00004000, + Volatile = 0x00008000, }, MemoryAccessShift = { Volatile = 0, Aligned = 1, Nontemporal = 2, + MakePointerAvailable = 3, + MakePointerAvailableKHR = 3, + MakePointerVisible = 4, + MakePointerVisibleKHR = 4, + NonPrivatePointer = 5, + NonPrivatePointerKHR = 5, }, MemoryAccessMask = { @@ -523,6 +756,12 @@ spv = { Volatile = 0x00000001, Aligned = 0x00000002, Nontemporal = 0x00000004, + MakePointerAvailable = 0x00000008, + MakePointerAvailableKHR = 0x00000008, + MakePointerVisible = 0x00000010, + MakePointerVisibleKHR = 0x00000010, + NonPrivatePointer = 0x00000020, + NonPrivatePointerKHR = 0x00000020, }, Scope = { @@ -531,6 +770,9 @@ spv = { Workgroup = 2, Subgroup = 3, Invocation = 4, + QueueFamily = 5, + QueueFamilyKHR = 5, + ShaderCallKHR = 6, }, GroupOperation = { @@ -626,8 +868,14 @@ spv = { GroupNonUniformShuffleRelative = 66, GroupNonUniformClustered = 67, GroupNonUniformQuad = 68, + ShaderLayer = 69, + ShaderViewportIndex = 70, + FragmentShadingRateKHR = 4422, SubgroupBallotKHR = 4423, DrawParameters = 4427, + WorkgroupMemoryExplicitLayoutKHR = 4428, + WorkgroupMemoryExplicitLayout8BitAccessKHR = 4429, + WorkgroupMemoryExplicitLayout16BitAccessKHR = 4430, SubgroupVoteKHR = 4431, StorageBuffer16BitAccess = 4433, StorageUniformBufferBlock16 = 4433, @@ -644,11 +892,22 @@ spv = { StorageBuffer8BitAccess = 4448, UniformAndStorageBuffer8BitAccess = 4449, StoragePushConstant8 = 4450, + DenormPreserve = 4464, + DenormFlushToZero = 4465, + SignedZeroInfNanPreserve = 4466, + RoundingModeRTE = 4467, + RoundingModeRTZ = 4468, + RayQueryProvisionalKHR = 4471, + RayQueryKHR = 4472, + RayTraversalPrimitiveCullingKHR = 4478, + RayTracingKHR = 4479, Float16ImageAMD = 5008, ImageGatherBiasLodAMD = 5009, FragmentMaskAMD = 5010, StencilExportEXT = 5013, ImageReadWriteLodAMD = 5015, + Int64ImageEXT = 5016, + ShaderClockKHR = 5055, SampleMaskOverrideCoverageNV = 5249, GeometryShaderPassthroughNV = 5251, ShaderViewportIndexLayerEXT = 5254, @@ -657,22 +916,158 @@ spv = { ShaderStereoViewNV = 5259, PerViewAttributesNV = 5260, FragmentFullyCoveredEXT = 5265, + MeshShadingNV = 5266, + ImageFootprintNV = 5282, + FragmentBarycentricNV = 5284, + ComputeDerivativeGroupQuadsNV = 5288, + FragmentDensityEXT = 5291, + ShadingRateNV = 5291, GroupNonUniformPartitionedNV = 5297, + ShaderNonUniform = 5301, ShaderNonUniformEXT = 5301, + RuntimeDescriptorArray = 5302, RuntimeDescriptorArrayEXT = 5302, + InputAttachmentArrayDynamicIndexing = 5303, InputAttachmentArrayDynamicIndexingEXT = 5303, + UniformTexelBufferArrayDynamicIndexing = 5304, UniformTexelBufferArrayDynamicIndexingEXT = 5304, + StorageTexelBufferArrayDynamicIndexing = 5305, StorageTexelBufferArrayDynamicIndexingEXT = 5305, + UniformBufferArrayNonUniformIndexing = 5306, UniformBufferArrayNonUniformIndexingEXT = 5306, + SampledImageArrayNonUniformIndexing = 5307, SampledImageArrayNonUniformIndexingEXT = 5307, + StorageBufferArrayNonUniformIndexing = 5308, StorageBufferArrayNonUniformIndexingEXT = 5308, + StorageImageArrayNonUniformIndexing = 5309, StorageImageArrayNonUniformIndexingEXT = 5309, + InputAttachmentArrayNonUniformIndexing = 5310, InputAttachmentArrayNonUniformIndexingEXT = 5310, + UniformTexelBufferArrayNonUniformIndexing = 5311, UniformTexelBufferArrayNonUniformIndexingEXT = 5311, + StorageTexelBufferArrayNonUniformIndexing = 5312, StorageTexelBufferArrayNonUniformIndexingEXT = 5312, + RayTracingNV = 5340, + VulkanMemoryModel = 5345, + VulkanMemoryModelKHR = 5345, + VulkanMemoryModelDeviceScope = 5346, + VulkanMemoryModelDeviceScopeKHR = 5346, + PhysicalStorageBufferAddresses = 5347, + PhysicalStorageBufferAddressesEXT = 5347, + ComputeDerivativeGroupLinearNV = 5350, + RayTracingProvisionalKHR = 5353, + CooperativeMatrixNV = 5357, + FragmentShaderSampleInterlockEXT = 5363, + FragmentShaderShadingRateInterlockEXT = 5372, + ShaderSMBuiltinsNV = 5373, + FragmentShaderPixelInterlockEXT = 5378, + DemoteToHelperInvocationEXT = 5379, SubgroupShuffleINTEL = 5568, SubgroupBufferBlockIOINTEL = 5569, SubgroupImageBlockIOINTEL = 5570, + SubgroupImageMediaBlockIOINTEL = 5579, + RoundToInfinityINTEL = 5582, + FloatingPointModeINTEL = 5583, + IntegerFunctions2INTEL = 5584, + FunctionPointersINTEL = 5603, + IndirectReferencesINTEL = 5604, + AsmINTEL = 5606, + AtomicFloat32MinMaxEXT = 5612, + AtomicFloat64MinMaxEXT = 5613, + AtomicFloat16MinMaxEXT = 5616, + VectorComputeINTEL = 5617, + VectorAnyINTEL = 5619, + SubgroupAvcMotionEstimationINTEL = 5696, + SubgroupAvcMotionEstimationIntraINTEL = 5697, + SubgroupAvcMotionEstimationChromaINTEL = 5698, + VariableLengthArrayINTEL = 5817, + FunctionFloatControlINTEL = 5821, + FPGAMemoryAttributesINTEL = 5824, + FPFastMathModeINTEL = 5837, + ArbitraryPrecisionIntegersINTEL = 5844, + UnstructuredLoopControlsINTEL = 5886, + FPGALoopControlsINTEL = 5888, + KernelAttributesINTEL = 5892, + FPGAKernelAttributesINTEL = 5897, + FPGAMemoryAccessesINTEL = 5898, + FPGAClusterAttributesINTEL = 5904, + LoopFuseINTEL = 5906, + FPGABufferLocationINTEL = 5920, + USMStorageClassesINTEL = 5935, + IOPipesINTEL = 5943, + BlockingPipesINTEL = 5945, + FPGARegINTEL = 5948, + AtomicFloat32AddEXT = 6033, + AtomicFloat64AddEXT = 6034, + LongConstantCompositeINTEL = 6089, + }, + + RayFlagsShift = { + OpaqueKHR = 0, + NoOpaqueKHR = 1, + TerminateOnFirstHitKHR = 2, + SkipClosestHitShaderKHR = 3, + CullBackFacingTrianglesKHR = 4, + CullFrontFacingTrianglesKHR = 5, + CullOpaqueKHR = 6, + CullNoOpaqueKHR = 7, + SkipTrianglesKHR = 8, + SkipAABBsKHR = 9, + }, + + RayFlagsMask = { + MaskNone = 0, + OpaqueKHR = 0x00000001, + NoOpaqueKHR = 0x00000002, + TerminateOnFirstHitKHR = 0x00000004, + SkipClosestHitShaderKHR = 0x00000008, + CullBackFacingTrianglesKHR = 0x00000010, + CullFrontFacingTrianglesKHR = 0x00000020, + CullOpaqueKHR = 0x00000040, + CullNoOpaqueKHR = 0x00000080, + SkipTrianglesKHR = 0x00000100, + SkipAABBsKHR = 0x00000200, + }, + + RayQueryIntersection = { + RayQueryCandidateIntersectionKHR = 0, + RayQueryCommittedIntersectionKHR = 1, + }, + + RayQueryCommittedIntersectionType = { + RayQueryCommittedIntersectionNoneKHR = 0, + RayQueryCommittedIntersectionTriangleKHR = 1, + RayQueryCommittedIntersectionGeneratedKHR = 2, + }, + + RayQueryCandidateIntersectionType = { + RayQueryCandidateIntersectionTriangleKHR = 0, + RayQueryCandidateIntersectionAABBKHR = 1, + }, + + FragmentShadingRateShift = { + Vertical2Pixels = 0, + Vertical4Pixels = 1, + Horizontal2Pixels = 2, + Horizontal4Pixels = 3, + }, + + FragmentShadingRateMask = { + MaskNone = 0, + Vertical2Pixels = 0x00000001, + Vertical4Pixels = 0x00000002, + Horizontal2Pixels = 0x00000004, + Horizontal4Pixels = 0x00000008, + }, + + FPDenormMode = { + Preserve = 0, + FlushToZero = 1, + }, + + FPOperationMode = { + IEEE = 0, + ALT = 1, }, Op = { @@ -1016,12 +1411,29 @@ spv = { OpGroupNonUniformLogicalXor = 364, OpGroupNonUniformQuadBroadcast = 365, OpGroupNonUniformQuadSwap = 366, + OpCopyLogical = 400, + OpPtrEqual = 401, + OpPtrNotEqual = 402, + OpPtrDiff = 403, + OpTerminateInvocation = 4416, OpSubgroupBallotKHR = 4421, OpSubgroupFirstInvocationKHR = 4422, OpSubgroupAllKHR = 4428, OpSubgroupAnyKHR = 4429, OpSubgroupAllEqualKHR = 4430, OpSubgroupReadInvocationKHR = 4432, + OpTraceRayKHR = 4445, + OpExecuteCallableKHR = 4446, + OpConvertUToAccelerationStructureKHR = 4447, + OpIgnoreIntersectionKHR = 4448, + OpTerminateRayKHR = 4449, + OpTypeRayQueryKHR = 4472, + OpRayQueryInitializeKHR = 4473, + OpRayQueryTerminateKHR = 4474, + OpRayQueryGenerateIntersectionKHR = 4475, + OpRayQueryConfirmIntersectionKHR = 4476, + OpRayQueryProceedKHR = 4477, + OpRayQueryGetIntersectionTypeKHR = 4479, OpGroupIAddNonUniformAMD = 5000, OpGroupFAddNonUniformAMD = 5001, OpGroupFMinNonUniformAMD = 5002, @@ -1032,7 +1444,27 @@ spv = { OpGroupSMaxNonUniformAMD = 5007, OpFragmentMaskFetchAMD = 5011, OpFragmentFetchAMD = 5012, + OpReadClockKHR = 5056, + OpImageSampleFootprintNV = 5283, OpGroupNonUniformPartitionNV = 5296, + OpWritePackedPrimitiveIndices4x8NV = 5299, + OpReportIntersectionKHR = 5334, + OpReportIntersectionNV = 5334, + OpIgnoreIntersectionNV = 5335, + OpTerminateRayNV = 5336, + OpTraceNV = 5337, + OpTypeAccelerationStructureKHR = 5341, + OpTypeAccelerationStructureNV = 5341, + OpExecuteCallableNV = 5344, + OpTypeCooperativeMatrixNV = 5358, + OpCooperativeMatrixLoadNV = 5359, + OpCooperativeMatrixStoreNV = 5360, + OpCooperativeMatrixMulAddNV = 5361, + OpCooperativeMatrixLengthNV = 5362, + OpBeginInvocationInterlockEXT = 5364, + OpEndInvocationInterlockEXT = 5365, + OpDemoteToHelperInvocationEXT = 5380, + OpIsHelperInvocationEXT = 5381, OpSubgroupShuffleINTEL = 5571, OpSubgroupShuffleDownINTEL = 5572, OpSubgroupShuffleUpINTEL = 5573, @@ -1041,8 +1473,182 @@ spv = { OpSubgroupBlockWriteINTEL = 5576, OpSubgroupImageBlockReadINTEL = 5577, OpSubgroupImageBlockWriteINTEL = 5578, + OpSubgroupImageMediaBlockReadINTEL = 5580, + OpSubgroupImageMediaBlockWriteINTEL = 5581, + OpUCountLeadingZerosINTEL = 5585, + OpUCountTrailingZerosINTEL = 5586, + OpAbsISubINTEL = 5587, + OpAbsUSubINTEL = 5588, + OpIAddSatINTEL = 5589, + OpUAddSatINTEL = 5590, + OpIAverageINTEL = 5591, + OpUAverageINTEL = 5592, + OpIAverageRoundedINTEL = 5593, + OpUAverageRoundedINTEL = 5594, + OpISubSatINTEL = 5595, + OpUSubSatINTEL = 5596, + OpIMul32x16INTEL = 5597, + OpUMul32x16INTEL = 5598, + OpConstFunctionPointerINTEL = 5600, + OpFunctionPointerCallINTEL = 5601, + OpAsmTargetINTEL = 5609, + OpAsmINTEL = 5610, + OpAsmCallINTEL = 5611, + OpAtomicFMinEXT = 5614, + OpAtomicFMaxEXT = 5615, + OpDecorateString = 5632, OpDecorateStringGOOGLE = 5632, + OpMemberDecorateString = 5633, OpMemberDecorateStringGOOGLE = 5633, + OpVmeImageINTEL = 5699, + OpTypeVmeImageINTEL = 5700, + OpTypeAvcImePayloadINTEL = 5701, + OpTypeAvcRefPayloadINTEL = 5702, + OpTypeAvcSicPayloadINTEL = 5703, + OpTypeAvcMcePayloadINTEL = 5704, + OpTypeAvcMceResultINTEL = 5705, + OpTypeAvcImeResultINTEL = 5706, + OpTypeAvcImeResultSingleReferenceStreamoutINTEL = 5707, + OpTypeAvcImeResultDualReferenceStreamoutINTEL = 5708, + OpTypeAvcImeSingleReferenceStreaminINTEL = 5709, + OpTypeAvcImeDualReferenceStreaminINTEL = 5710, + OpTypeAvcRefResultINTEL = 5711, + OpTypeAvcSicResultINTEL = 5712, + OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL = 5713, + OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL = 5714, + OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL = 5715, + OpSubgroupAvcMceSetInterShapePenaltyINTEL = 5716, + OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL = 5717, + OpSubgroupAvcMceSetInterDirectionPenaltyINTEL = 5718, + OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL = 5719, + OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL = 5720, + OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL = 5721, + OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL = 5722, + OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL = 5723, + OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL = 5724, + OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL = 5725, + OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL = 5726, + OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL = 5727, + OpSubgroupAvcMceSetAcOnlyHaarINTEL = 5728, + OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL = 5729, + OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL = 5730, + OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL = 5731, + OpSubgroupAvcMceConvertToImePayloadINTEL = 5732, + OpSubgroupAvcMceConvertToImeResultINTEL = 5733, + OpSubgroupAvcMceConvertToRefPayloadINTEL = 5734, + OpSubgroupAvcMceConvertToRefResultINTEL = 5735, + OpSubgroupAvcMceConvertToSicPayloadINTEL = 5736, + OpSubgroupAvcMceConvertToSicResultINTEL = 5737, + OpSubgroupAvcMceGetMotionVectorsINTEL = 5738, + OpSubgroupAvcMceGetInterDistortionsINTEL = 5739, + OpSubgroupAvcMceGetBestInterDistortionsINTEL = 5740, + OpSubgroupAvcMceGetInterMajorShapeINTEL = 5741, + OpSubgroupAvcMceGetInterMinorShapeINTEL = 5742, + OpSubgroupAvcMceGetInterDirectionsINTEL = 5743, + OpSubgroupAvcMceGetInterMotionVectorCountINTEL = 5744, + OpSubgroupAvcMceGetInterReferenceIdsINTEL = 5745, + OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL = 5746, + OpSubgroupAvcImeInitializeINTEL = 5747, + OpSubgroupAvcImeSetSingleReferenceINTEL = 5748, + OpSubgroupAvcImeSetDualReferenceINTEL = 5749, + OpSubgroupAvcImeRefWindowSizeINTEL = 5750, + OpSubgroupAvcImeAdjustRefOffsetINTEL = 5751, + OpSubgroupAvcImeConvertToMcePayloadINTEL = 5752, + OpSubgroupAvcImeSetMaxMotionVectorCountINTEL = 5753, + OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL = 5754, + OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL = 5755, + OpSubgroupAvcImeSetWeightedSadINTEL = 5756, + OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL = 5757, + OpSubgroupAvcImeEvaluateWithDualReferenceINTEL = 5758, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL = 5759, + OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL = 5760, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL = 5761, + OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL = 5762, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL = 5763, + OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL = 5764, + OpSubgroupAvcImeConvertToMceResultINTEL = 5765, + OpSubgroupAvcImeGetSingleReferenceStreaminINTEL = 5766, + OpSubgroupAvcImeGetDualReferenceStreaminINTEL = 5767, + OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL = 5768, + OpSubgroupAvcImeStripDualReferenceStreamoutINTEL = 5769, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL = 5770, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL = 5771, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL = 5772, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL = 5773, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL = 5774, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL = 5775, + OpSubgroupAvcImeGetBorderReachedINTEL = 5776, + OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL = 5777, + OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL = 5778, + OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL = 5779, + OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL = 5780, + OpSubgroupAvcFmeInitializeINTEL = 5781, + OpSubgroupAvcBmeInitializeINTEL = 5782, + OpSubgroupAvcRefConvertToMcePayloadINTEL = 5783, + OpSubgroupAvcRefSetBidirectionalMixDisableINTEL = 5784, + OpSubgroupAvcRefSetBilinearFilterEnableINTEL = 5785, + OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL = 5786, + OpSubgroupAvcRefEvaluateWithDualReferenceINTEL = 5787, + OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL = 5788, + OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL = 5789, + OpSubgroupAvcRefConvertToMceResultINTEL = 5790, + OpSubgroupAvcSicInitializeINTEL = 5791, + OpSubgroupAvcSicConfigureSkcINTEL = 5792, + OpSubgroupAvcSicConfigureIpeLumaINTEL = 5793, + OpSubgroupAvcSicConfigureIpeLumaChromaINTEL = 5794, + OpSubgroupAvcSicGetMotionVectorMaskINTEL = 5795, + OpSubgroupAvcSicConvertToMcePayloadINTEL = 5796, + OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL = 5797, + OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL = 5798, + OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL = 5799, + OpSubgroupAvcSicSetBilinearFilterEnableINTEL = 5800, + OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL = 5801, + OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL = 5802, + OpSubgroupAvcSicEvaluateIpeINTEL = 5803, + OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL = 5804, + OpSubgroupAvcSicEvaluateWithDualReferenceINTEL = 5805, + OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL = 5806, + OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL = 5807, + OpSubgroupAvcSicConvertToMceResultINTEL = 5808, + OpSubgroupAvcSicGetIpeLumaShapeINTEL = 5809, + OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL = 5810, + OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL = 5811, + OpSubgroupAvcSicGetPackedIpeLumaModesINTEL = 5812, + OpSubgroupAvcSicGetIpeChromaModeINTEL = 5813, + OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL = 5814, + OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL = 5815, + OpSubgroupAvcSicGetInterRawSadsINTEL = 5816, + OpVariableLengthArrayINTEL = 5818, + OpSaveMemoryINTEL = 5819, + OpRestoreMemoryINTEL = 5820, + OpLoopControlINTEL = 5887, + OpPtrCastToCrossWorkgroupINTEL = 5934, + OpCrossWorkgroupCastToPtrINTEL = 5938, + OpReadPipeBlockingINTEL = 5946, + OpWritePipeBlockingINTEL = 5947, + OpFPGARegINTEL = 5949, + OpRayQueryGetRayTMinKHR = 6016, + OpRayQueryGetRayFlagsKHR = 6017, + OpRayQueryGetIntersectionTKHR = 6018, + OpRayQueryGetIntersectionInstanceCustomIndexKHR = 6019, + OpRayQueryGetIntersectionInstanceIdKHR = 6020, + OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR = 6021, + OpRayQueryGetIntersectionGeometryIndexKHR = 6022, + OpRayQueryGetIntersectionPrimitiveIndexKHR = 6023, + OpRayQueryGetIntersectionBarycentricsKHR = 6024, + OpRayQueryGetIntersectionFrontFaceKHR = 6025, + OpRayQueryGetIntersectionCandidateAABBOpaqueKHR = 6026, + OpRayQueryGetIntersectionObjectRayDirectionKHR = 6027, + OpRayQueryGetIntersectionObjectRayOriginKHR = 6028, + OpRayQueryGetWorldRayDirectionKHR = 6029, + OpRayQueryGetWorldRayOriginKHR = 6030, + OpRayQueryGetIntersectionObjectToWorldKHR = 6031, + OpRayQueryGetIntersectionWorldToObjectKHR = 6032, + OpAtomicFAddEXT = 6035, + OpTypeBufferSurfaceINTEL = 6086, + OpTypeStructContinuedINTEL = 6090, + OpConstantCompositeContinuedINTEL = 6091, + OpSpecConstantCompositeContinuedINTEL = 6092, }, } diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.py b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.py index cb3775ff9..0829dc1a5 100755 --- a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.py +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spirv.py @@ -1,4 +1,4 @@ -# Copyright (c) 2014-2018 The Khronos Group Inc. +# Copyright (c) 2014-2020 The Khronos Group Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and/or associated documentation files (the "Materials"), @@ -26,13 +26,16 @@ # the Binary Section of the SPIR-V specification. # Enumeration tokens for SPIR-V, in various styles: -# C, C++, C++11, JSON, Lua, Python +# C, C++, C++11, JSON, Lua, Python, C#, D # # - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL # - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL # - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL # - Lua will use tables, e.g.: spv.SourceLanguage.GLSL # - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +# - C# will use enum classes in the Specification class located in the "Spv" namespace, +# e.g.: Spv.Specification.SourceLanguage.GLSL +# - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL # # Some tokens act like mask values, which can be OR'd together, # while others are mutually exclusive. The mask-like ones have @@ -41,8 +44,8 @@ spv = { 'MagicNumber' : 0x07230203, - 'Version' : 0x00010300, - 'Revision' : 1, + 'Version' : 0x00010500, + 'Revision' : 4, 'OpCodeMask' : 0xffff, 'WordCountShift' : 16, @@ -63,18 +66,36 @@ spv = { 'Fragment' : 4, 'GLCompute' : 5, 'Kernel' : 6, + 'TaskNV' : 5267, + 'MeshNV' : 5268, + 'RayGenerationKHR' : 5313, + 'RayGenerationNV' : 5313, + 'IntersectionKHR' : 5314, + 'IntersectionNV' : 5314, + 'AnyHitKHR' : 5315, + 'AnyHitNV' : 5315, + 'ClosestHitKHR' : 5316, + 'ClosestHitNV' : 5316, + 'MissKHR' : 5317, + 'MissNV' : 5317, + 'CallableKHR' : 5318, + 'CallableNV' : 5318, }, 'AddressingModel' : { 'Logical' : 0, 'Physical32' : 1, 'Physical64' : 2, + 'PhysicalStorageBuffer64' : 5348, + 'PhysicalStorageBuffer64EXT' : 5348, }, 'MemoryModel' : { 'Simple' : 0, 'GLSL450' : 1, 'OpenCL' : 2, + 'Vulkan' : 3, + 'VulkanKHR' : 3, }, 'ExecutionMode' : { @@ -117,7 +138,33 @@ spv = { 'LocalSizeId' : 38, 'LocalSizeHintId' : 39, 'PostDepthCoverage' : 4446, + 'DenormPreserve' : 4459, + 'DenormFlushToZero' : 4460, + 'SignedZeroInfNanPreserve' : 4461, + 'RoundingModeRTE' : 4462, + 'RoundingModeRTZ' : 4463, 'StencilRefReplacingEXT' : 5027, + 'OutputLinesNV' : 5269, + 'OutputPrimitivesNV' : 5270, + 'DerivativeGroupQuadsNV' : 5289, + 'DerivativeGroupLinearNV' : 5290, + 'OutputTrianglesNV' : 5298, + 'PixelInterlockOrderedEXT' : 5366, + 'PixelInterlockUnorderedEXT' : 5367, + 'SampleInterlockOrderedEXT' : 5368, + 'SampleInterlockUnorderedEXT' : 5369, + 'ShadingRateInterlockOrderedEXT' : 5370, + 'ShadingRateInterlockUnorderedEXT' : 5371, + 'SharedLocalMemorySizeINTEL' : 5618, + 'RoundingModeRTPINTEL' : 5620, + 'RoundingModeRTNINTEL' : 5621, + 'FloatingPointModeALTINTEL' : 5622, + 'FloatingPointModeIEEEINTEL' : 5623, + 'MaxWorkgroupSizeINTEL' : 5893, + 'MaxWorkDimINTEL' : 5894, + 'NoGlobalOffsetINTEL' : 5895, + 'NumSIMDWorkitemsINTEL' : 5896, + 'SchedulerTargetFmaxMhzINTEL' : 5903, }, 'StorageClass' : { @@ -134,6 +181,23 @@ spv = { 'AtomicCounter' : 10, 'Image' : 11, 'StorageBuffer' : 12, + 'CallableDataKHR' : 5328, + 'CallableDataNV' : 5328, + 'IncomingCallableDataKHR' : 5329, + 'IncomingCallableDataNV' : 5329, + 'RayPayloadKHR' : 5338, + 'RayPayloadNV' : 5338, + 'HitAttributeKHR' : 5339, + 'HitAttributeNV' : 5339, + 'IncomingRayPayloadKHR' : 5342, + 'IncomingRayPayloadNV' : 5342, + 'ShaderRecordBufferKHR' : 5343, + 'ShaderRecordBufferNV' : 5343, + 'PhysicalStorageBuffer' : 5349, + 'PhysicalStorageBufferEXT' : 5349, + 'CodeSectionINTEL' : 5605, + 'DeviceOnlyINTEL' : 5936, + 'HostOnlyINTEL' : 5937, }, 'Dim' : { @@ -200,6 +264,8 @@ spv = { 'Rg8ui' : 37, 'R16ui' : 38, 'R8ui' : 39, + 'R64ui' : 40, + 'R64i' : 41, }, 'ImageChannelOrder' : { @@ -254,6 +320,16 @@ spv = { 'ConstOffsets' : 5, 'Sample' : 6, 'MinLod' : 7, + 'MakeTexelAvailable' : 8, + 'MakeTexelAvailableKHR' : 8, + 'MakeTexelVisible' : 9, + 'MakeTexelVisibleKHR' : 9, + 'NonPrivateTexel' : 10, + 'NonPrivateTexelKHR' : 10, + 'VolatileTexel' : 11, + 'VolatileTexelKHR' : 11, + 'SignExtend' : 12, + 'ZeroExtend' : 13, }, 'ImageOperandsMask' : { @@ -266,6 +342,16 @@ spv = { 'ConstOffsets' : 0x00000020, 'Sample' : 0x00000040, 'MinLod' : 0x00000080, + 'MakeTexelAvailable' : 0x00000100, + 'MakeTexelAvailableKHR' : 0x00000100, + 'MakeTexelVisible' : 0x00000200, + 'MakeTexelVisibleKHR' : 0x00000200, + 'NonPrivateTexel' : 0x00000400, + 'NonPrivateTexelKHR' : 0x00000400, + 'VolatileTexel' : 0x00000800, + 'VolatileTexelKHR' : 0x00000800, + 'SignExtend' : 0x00001000, + 'ZeroExtend' : 0x00002000, }, 'FPFastMathModeShift' : { @@ -274,6 +360,8 @@ spv = { 'NSZ' : 2, 'AllowRecip' : 3, 'Fast' : 4, + 'AllowContractFastINTEL' : 16, + 'AllowReassocINTEL' : 17, }, 'FPFastMathModeMask' : { @@ -283,6 +371,8 @@ spv = { 'NSZ' : 0x00000004, 'AllowRecip' : 0x00000008, 'Fast' : 0x00000010, + 'AllowContractFastINTEL' : 0x00010000, + 'AllowReassocINTEL' : 0x00020000, }, 'FPRoundingMode' : { @@ -341,6 +431,7 @@ spv = { 'NonWritable' : 24, 'NonReadable' : 25, 'Uniform' : 26, + 'UniformId' : 27, 'SaturatedConversion' : 28, 'Stream' : 29, 'Location' : 30, @@ -361,14 +452,62 @@ spv = { 'MaxByteOffset' : 45, 'AlignmentId' : 46, 'MaxByteOffsetId' : 47, + 'NoSignedWrap' : 4469, + 'NoUnsignedWrap' : 4470, 'ExplicitInterpAMD' : 4999, 'OverrideCoverageNV' : 5248, 'PassthroughNV' : 5250, 'ViewportRelativeNV' : 5252, 'SecondaryViewportRelativeNV' : 5256, + 'PerPrimitiveNV' : 5271, + 'PerViewNV' : 5272, + 'PerTaskNV' : 5273, + 'PerVertexNV' : 5285, + 'NonUniform' : 5300, 'NonUniformEXT' : 5300, + 'RestrictPointer' : 5355, + 'RestrictPointerEXT' : 5355, + 'AliasedPointer' : 5356, + 'AliasedPointerEXT' : 5356, + 'SIMTCallINTEL' : 5599, + 'ReferencedIndirectlyINTEL' : 5602, + 'ClobberINTEL' : 5607, + 'SideEffectsINTEL' : 5608, + 'VectorComputeVariableINTEL' : 5624, + 'FuncParamIOKindINTEL' : 5625, + 'VectorComputeFunctionINTEL' : 5626, + 'StackCallINTEL' : 5627, + 'GlobalVariableOffsetINTEL' : 5628, + 'CounterBuffer' : 5634, 'HlslCounterBufferGOOGLE' : 5634, 'HlslSemanticGOOGLE' : 5635, + 'UserSemantic' : 5635, + 'UserTypeGOOGLE' : 5636, + 'FunctionRoundingModeINTEL' : 5822, + 'FunctionDenormModeINTEL' : 5823, + 'RegisterINTEL' : 5825, + 'MemoryINTEL' : 5826, + 'NumbanksINTEL' : 5827, + 'BankwidthINTEL' : 5828, + 'MaxPrivateCopiesINTEL' : 5829, + 'SinglepumpINTEL' : 5830, + 'DoublepumpINTEL' : 5831, + 'MaxReplicatesINTEL' : 5832, + 'SimpleDualPortINTEL' : 5833, + 'MergeINTEL' : 5834, + 'BankBitsINTEL' : 5835, + 'ForcePow2DepthINTEL' : 5836, + 'BurstCoalesceINTEL' : 5899, + 'CacheSizeINTEL' : 5900, + 'DontStaticallyCoalesceINTEL' : 5901, + 'PrefetchINTEL' : 5902, + 'StallEnableINTEL' : 5905, + 'FuseLoopsInFunctionINTEL' : 5907, + 'BufferLocationINTEL' : 5921, + 'IOPipeStorageINTEL' : 5944, + 'FunctionFloatingPointModeINTEL' : 6080, + 'SingleElementVectorINTEL' : 6085, + 'VectorComputeCallableFunctionINTEL' : 6087, }, 'BuiltIn' : { @@ -426,8 +565,10 @@ spv = { 'BaseVertex' : 4424, 'BaseInstance' : 4425, 'DrawIndex' : 4426, + 'PrimitiveShadingRateKHR' : 4432, 'DeviceIndex' : 4438, 'ViewIndex' : 4440, + 'ShadingRateKHR' : 4444, 'BaryCoordNoPerspAMD' : 4992, 'BaryCoordNoPerspCentroidAMD' : 4993, 'BaryCoordNoPerspSampleAMD' : 4994, @@ -442,6 +583,52 @@ spv = { 'PositionPerViewNV' : 5261, 'ViewportMaskPerViewNV' : 5262, 'FullyCoveredEXT' : 5264, + 'TaskCountNV' : 5274, + 'PrimitiveCountNV' : 5275, + 'PrimitiveIndicesNV' : 5276, + 'ClipDistancePerViewNV' : 5277, + 'CullDistancePerViewNV' : 5278, + 'LayerPerViewNV' : 5279, + 'MeshViewCountNV' : 5280, + 'MeshViewIndicesNV' : 5281, + 'BaryCoordNV' : 5286, + 'BaryCoordNoPerspNV' : 5287, + 'FragSizeEXT' : 5292, + 'FragmentSizeNV' : 5292, + 'FragInvocationCountEXT' : 5293, + 'InvocationsPerPixelNV' : 5293, + 'LaunchIdKHR' : 5319, + 'LaunchIdNV' : 5319, + 'LaunchSizeKHR' : 5320, + 'LaunchSizeNV' : 5320, + 'WorldRayOriginKHR' : 5321, + 'WorldRayOriginNV' : 5321, + 'WorldRayDirectionKHR' : 5322, + 'WorldRayDirectionNV' : 5322, + 'ObjectRayOriginKHR' : 5323, + 'ObjectRayOriginNV' : 5323, + 'ObjectRayDirectionKHR' : 5324, + 'ObjectRayDirectionNV' : 5324, + 'RayTminKHR' : 5325, + 'RayTminNV' : 5325, + 'RayTmaxKHR' : 5326, + 'RayTmaxNV' : 5326, + 'InstanceCustomIndexKHR' : 5327, + 'InstanceCustomIndexNV' : 5327, + 'ObjectToWorldKHR' : 5330, + 'ObjectToWorldNV' : 5330, + 'WorldToObjectKHR' : 5331, + 'WorldToObjectNV' : 5331, + 'HitTNV' : 5332, + 'HitKindKHR' : 5333, + 'HitKindNV' : 5333, + 'IncomingRayFlagsKHR' : 5351, + 'IncomingRayFlagsNV' : 5351, + 'RayGeometryIndexKHR' : 5352, + 'WarpsPerSMNV' : 5374, + 'SMCountNV' : 5375, + 'WarpIDNV' : 5376, + 'SMIDNV' : 5377, }, 'SelectionControlShift' : { @@ -460,6 +647,19 @@ spv = { 'DontUnroll' : 1, 'DependencyInfinite' : 2, 'DependencyLength' : 3, + 'MinIterations' : 4, + 'MaxIterations' : 5, + 'IterationMultiple' : 6, + 'PeelCount' : 7, + 'PartialCount' : 8, + 'InitiationIntervalINTEL' : 16, + 'MaxConcurrencyINTEL' : 17, + 'DependencyArrayINTEL' : 18, + 'PipelineEnableINTEL' : 19, + 'LoopCoalesceINTEL' : 20, + 'MaxInterleavingINTEL' : 21, + 'SpeculatedIterationsINTEL' : 22, + 'NoFusionINTEL' : 23, }, 'LoopControlMask' : { @@ -468,6 +668,19 @@ spv = { 'DontUnroll' : 0x00000002, 'DependencyInfinite' : 0x00000004, 'DependencyLength' : 0x00000008, + 'MinIterations' : 0x00000010, + 'MaxIterations' : 0x00000020, + 'IterationMultiple' : 0x00000040, + 'PeelCount' : 0x00000080, + 'PartialCount' : 0x00000100, + 'InitiationIntervalINTEL' : 0x00010000, + 'MaxConcurrencyINTEL' : 0x00020000, + 'DependencyArrayINTEL' : 0x00040000, + 'PipelineEnableINTEL' : 0x00080000, + 'LoopCoalesceINTEL' : 0x00100000, + 'MaxInterleavingINTEL' : 0x00200000, + 'SpeculatedIterationsINTEL' : 0x00400000, + 'NoFusionINTEL' : 0x00800000, }, 'FunctionControlShift' : { @@ -496,6 +709,13 @@ spv = { 'CrossWorkgroupMemory' : 9, 'AtomicCounterMemory' : 10, 'ImageMemory' : 11, + 'OutputMemory' : 12, + 'OutputMemoryKHR' : 12, + 'MakeAvailable' : 13, + 'MakeAvailableKHR' : 13, + 'MakeVisible' : 14, + 'MakeVisibleKHR' : 14, + 'Volatile' : 15, }, 'MemorySemanticsMask' : { @@ -510,12 +730,25 @@ spv = { 'CrossWorkgroupMemory' : 0x00000200, 'AtomicCounterMemory' : 0x00000400, 'ImageMemory' : 0x00000800, + 'OutputMemory' : 0x00001000, + 'OutputMemoryKHR' : 0x00001000, + 'MakeAvailable' : 0x00002000, + 'MakeAvailableKHR' : 0x00002000, + 'MakeVisible' : 0x00004000, + 'MakeVisibleKHR' : 0x00004000, + 'Volatile' : 0x00008000, }, 'MemoryAccessShift' : { 'Volatile' : 0, 'Aligned' : 1, 'Nontemporal' : 2, + 'MakePointerAvailable' : 3, + 'MakePointerAvailableKHR' : 3, + 'MakePointerVisible' : 4, + 'MakePointerVisibleKHR' : 4, + 'NonPrivatePointer' : 5, + 'NonPrivatePointerKHR' : 5, }, 'MemoryAccessMask' : { @@ -523,6 +756,12 @@ spv = { 'Volatile' : 0x00000001, 'Aligned' : 0x00000002, 'Nontemporal' : 0x00000004, + 'MakePointerAvailable' : 0x00000008, + 'MakePointerAvailableKHR' : 0x00000008, + 'MakePointerVisible' : 0x00000010, + 'MakePointerVisibleKHR' : 0x00000010, + 'NonPrivatePointer' : 0x00000020, + 'NonPrivatePointerKHR' : 0x00000020, }, 'Scope' : { @@ -531,6 +770,9 @@ spv = { 'Workgroup' : 2, 'Subgroup' : 3, 'Invocation' : 4, + 'QueueFamily' : 5, + 'QueueFamilyKHR' : 5, + 'ShaderCallKHR' : 6, }, 'GroupOperation' : { @@ -626,8 +868,14 @@ spv = { 'GroupNonUniformShuffleRelative' : 66, 'GroupNonUniformClustered' : 67, 'GroupNonUniformQuad' : 68, + 'ShaderLayer' : 69, + 'ShaderViewportIndex' : 70, + 'FragmentShadingRateKHR' : 4422, 'SubgroupBallotKHR' : 4423, 'DrawParameters' : 4427, + 'WorkgroupMemoryExplicitLayoutKHR' : 4428, + 'WorkgroupMemoryExplicitLayout8BitAccessKHR' : 4429, + 'WorkgroupMemoryExplicitLayout16BitAccessKHR' : 4430, 'SubgroupVoteKHR' : 4431, 'StorageBuffer16BitAccess' : 4433, 'StorageUniformBufferBlock16' : 4433, @@ -644,11 +892,22 @@ spv = { 'StorageBuffer8BitAccess' : 4448, 'UniformAndStorageBuffer8BitAccess' : 4449, 'StoragePushConstant8' : 4450, + 'DenormPreserve' : 4464, + 'DenormFlushToZero' : 4465, + 'SignedZeroInfNanPreserve' : 4466, + 'RoundingModeRTE' : 4467, + 'RoundingModeRTZ' : 4468, + 'RayQueryProvisionalKHR' : 4471, + 'RayQueryKHR' : 4472, + 'RayTraversalPrimitiveCullingKHR' : 4478, + 'RayTracingKHR' : 4479, 'Float16ImageAMD' : 5008, 'ImageGatherBiasLodAMD' : 5009, 'FragmentMaskAMD' : 5010, 'StencilExportEXT' : 5013, 'ImageReadWriteLodAMD' : 5015, + 'Int64ImageEXT' : 5016, + 'ShaderClockKHR' : 5055, 'SampleMaskOverrideCoverageNV' : 5249, 'GeometryShaderPassthroughNV' : 5251, 'ShaderViewportIndexLayerEXT' : 5254, @@ -657,22 +916,158 @@ spv = { 'ShaderStereoViewNV' : 5259, 'PerViewAttributesNV' : 5260, 'FragmentFullyCoveredEXT' : 5265, + 'MeshShadingNV' : 5266, + 'ImageFootprintNV' : 5282, + 'FragmentBarycentricNV' : 5284, + 'ComputeDerivativeGroupQuadsNV' : 5288, + 'FragmentDensityEXT' : 5291, + 'ShadingRateNV' : 5291, 'GroupNonUniformPartitionedNV' : 5297, + 'ShaderNonUniform' : 5301, 'ShaderNonUniformEXT' : 5301, + 'RuntimeDescriptorArray' : 5302, 'RuntimeDescriptorArrayEXT' : 5302, + 'InputAttachmentArrayDynamicIndexing' : 5303, 'InputAttachmentArrayDynamicIndexingEXT' : 5303, + 'UniformTexelBufferArrayDynamicIndexing' : 5304, 'UniformTexelBufferArrayDynamicIndexingEXT' : 5304, + 'StorageTexelBufferArrayDynamicIndexing' : 5305, 'StorageTexelBufferArrayDynamicIndexingEXT' : 5305, + 'UniformBufferArrayNonUniformIndexing' : 5306, 'UniformBufferArrayNonUniformIndexingEXT' : 5306, + 'SampledImageArrayNonUniformIndexing' : 5307, 'SampledImageArrayNonUniformIndexingEXT' : 5307, + 'StorageBufferArrayNonUniformIndexing' : 5308, 'StorageBufferArrayNonUniformIndexingEXT' : 5308, + 'StorageImageArrayNonUniformIndexing' : 5309, 'StorageImageArrayNonUniformIndexingEXT' : 5309, + 'InputAttachmentArrayNonUniformIndexing' : 5310, 'InputAttachmentArrayNonUniformIndexingEXT' : 5310, + 'UniformTexelBufferArrayNonUniformIndexing' : 5311, 'UniformTexelBufferArrayNonUniformIndexingEXT' : 5311, + 'StorageTexelBufferArrayNonUniformIndexing' : 5312, 'StorageTexelBufferArrayNonUniformIndexingEXT' : 5312, + 'RayTracingNV' : 5340, + 'VulkanMemoryModel' : 5345, + 'VulkanMemoryModelKHR' : 5345, + 'VulkanMemoryModelDeviceScope' : 5346, + 'VulkanMemoryModelDeviceScopeKHR' : 5346, + 'PhysicalStorageBufferAddresses' : 5347, + 'PhysicalStorageBufferAddressesEXT' : 5347, + 'ComputeDerivativeGroupLinearNV' : 5350, + 'RayTracingProvisionalKHR' : 5353, + 'CooperativeMatrixNV' : 5357, + 'FragmentShaderSampleInterlockEXT' : 5363, + 'FragmentShaderShadingRateInterlockEXT' : 5372, + 'ShaderSMBuiltinsNV' : 5373, + 'FragmentShaderPixelInterlockEXT' : 5378, + 'DemoteToHelperInvocationEXT' : 5379, 'SubgroupShuffleINTEL' : 5568, 'SubgroupBufferBlockIOINTEL' : 5569, 'SubgroupImageBlockIOINTEL' : 5570, + 'SubgroupImageMediaBlockIOINTEL' : 5579, + 'RoundToInfinityINTEL' : 5582, + 'FloatingPointModeINTEL' : 5583, + 'IntegerFunctions2INTEL' : 5584, + 'FunctionPointersINTEL' : 5603, + 'IndirectReferencesINTEL' : 5604, + 'AsmINTEL' : 5606, + 'AtomicFloat32MinMaxEXT' : 5612, + 'AtomicFloat64MinMaxEXT' : 5613, + 'AtomicFloat16MinMaxEXT' : 5616, + 'VectorComputeINTEL' : 5617, + 'VectorAnyINTEL' : 5619, + 'SubgroupAvcMotionEstimationINTEL' : 5696, + 'SubgroupAvcMotionEstimationIntraINTEL' : 5697, + 'SubgroupAvcMotionEstimationChromaINTEL' : 5698, + 'VariableLengthArrayINTEL' : 5817, + 'FunctionFloatControlINTEL' : 5821, + 'FPGAMemoryAttributesINTEL' : 5824, + 'FPFastMathModeINTEL' : 5837, + 'ArbitraryPrecisionIntegersINTEL' : 5844, + 'UnstructuredLoopControlsINTEL' : 5886, + 'FPGALoopControlsINTEL' : 5888, + 'KernelAttributesINTEL' : 5892, + 'FPGAKernelAttributesINTEL' : 5897, + 'FPGAMemoryAccessesINTEL' : 5898, + 'FPGAClusterAttributesINTEL' : 5904, + 'LoopFuseINTEL' : 5906, + 'FPGABufferLocationINTEL' : 5920, + 'USMStorageClassesINTEL' : 5935, + 'IOPipesINTEL' : 5943, + 'BlockingPipesINTEL' : 5945, + 'FPGARegINTEL' : 5948, + 'AtomicFloat32AddEXT' : 6033, + 'AtomicFloat64AddEXT' : 6034, + 'LongConstantCompositeINTEL' : 6089, + }, + + 'RayFlagsShift' : { + 'OpaqueKHR' : 0, + 'NoOpaqueKHR' : 1, + 'TerminateOnFirstHitKHR' : 2, + 'SkipClosestHitShaderKHR' : 3, + 'CullBackFacingTrianglesKHR' : 4, + 'CullFrontFacingTrianglesKHR' : 5, + 'CullOpaqueKHR' : 6, + 'CullNoOpaqueKHR' : 7, + 'SkipTrianglesKHR' : 8, + 'SkipAABBsKHR' : 9, + }, + + 'RayFlagsMask' : { + 'MaskNone' : 0, + 'OpaqueKHR' : 0x00000001, + 'NoOpaqueKHR' : 0x00000002, + 'TerminateOnFirstHitKHR' : 0x00000004, + 'SkipClosestHitShaderKHR' : 0x00000008, + 'CullBackFacingTrianglesKHR' : 0x00000010, + 'CullFrontFacingTrianglesKHR' : 0x00000020, + 'CullOpaqueKHR' : 0x00000040, + 'CullNoOpaqueKHR' : 0x00000080, + 'SkipTrianglesKHR' : 0x00000100, + 'SkipAABBsKHR' : 0x00000200, + }, + + 'RayQueryIntersection' : { + 'RayQueryCandidateIntersectionKHR' : 0, + 'RayQueryCommittedIntersectionKHR' : 1, + }, + + 'RayQueryCommittedIntersectionType' : { + 'RayQueryCommittedIntersectionNoneKHR' : 0, + 'RayQueryCommittedIntersectionTriangleKHR' : 1, + 'RayQueryCommittedIntersectionGeneratedKHR' : 2, + }, + + 'RayQueryCandidateIntersectionType' : { + 'RayQueryCandidateIntersectionTriangleKHR' : 0, + 'RayQueryCandidateIntersectionAABBKHR' : 1, + }, + + 'FragmentShadingRateShift' : { + 'Vertical2Pixels' : 0, + 'Vertical4Pixels' : 1, + 'Horizontal2Pixels' : 2, + 'Horizontal4Pixels' : 3, + }, + + 'FragmentShadingRateMask' : { + 'MaskNone' : 0, + 'Vertical2Pixels' : 0x00000001, + 'Vertical4Pixels' : 0x00000002, + 'Horizontal2Pixels' : 0x00000004, + 'Horizontal4Pixels' : 0x00000008, + }, + + 'FPDenormMode' : { + 'Preserve' : 0, + 'FlushToZero' : 1, + }, + + 'FPOperationMode' : { + 'IEEE' : 0, + 'ALT' : 1, }, 'Op' : { @@ -1016,12 +1411,29 @@ spv = { 'OpGroupNonUniformLogicalXor' : 364, 'OpGroupNonUniformQuadBroadcast' : 365, 'OpGroupNonUniformQuadSwap' : 366, + 'OpCopyLogical' : 400, + 'OpPtrEqual' : 401, + 'OpPtrNotEqual' : 402, + 'OpPtrDiff' : 403, + 'OpTerminateInvocation' : 4416, 'OpSubgroupBallotKHR' : 4421, 'OpSubgroupFirstInvocationKHR' : 4422, 'OpSubgroupAllKHR' : 4428, 'OpSubgroupAnyKHR' : 4429, 'OpSubgroupAllEqualKHR' : 4430, 'OpSubgroupReadInvocationKHR' : 4432, + 'OpTraceRayKHR' : 4445, + 'OpExecuteCallableKHR' : 4446, + 'OpConvertUToAccelerationStructureKHR' : 4447, + 'OpIgnoreIntersectionKHR' : 4448, + 'OpTerminateRayKHR' : 4449, + 'OpTypeRayQueryKHR' : 4472, + 'OpRayQueryInitializeKHR' : 4473, + 'OpRayQueryTerminateKHR' : 4474, + 'OpRayQueryGenerateIntersectionKHR' : 4475, + 'OpRayQueryConfirmIntersectionKHR' : 4476, + 'OpRayQueryProceedKHR' : 4477, + 'OpRayQueryGetIntersectionTypeKHR' : 4479, 'OpGroupIAddNonUniformAMD' : 5000, 'OpGroupFAddNonUniformAMD' : 5001, 'OpGroupFMinNonUniformAMD' : 5002, @@ -1032,7 +1444,27 @@ spv = { 'OpGroupSMaxNonUniformAMD' : 5007, 'OpFragmentMaskFetchAMD' : 5011, 'OpFragmentFetchAMD' : 5012, + 'OpReadClockKHR' : 5056, + 'OpImageSampleFootprintNV' : 5283, 'OpGroupNonUniformPartitionNV' : 5296, + 'OpWritePackedPrimitiveIndices4x8NV' : 5299, + 'OpReportIntersectionKHR' : 5334, + 'OpReportIntersectionNV' : 5334, + 'OpIgnoreIntersectionNV' : 5335, + 'OpTerminateRayNV' : 5336, + 'OpTraceNV' : 5337, + 'OpTypeAccelerationStructureKHR' : 5341, + 'OpTypeAccelerationStructureNV' : 5341, + 'OpExecuteCallableNV' : 5344, + 'OpTypeCooperativeMatrixNV' : 5358, + 'OpCooperativeMatrixLoadNV' : 5359, + 'OpCooperativeMatrixStoreNV' : 5360, + 'OpCooperativeMatrixMulAddNV' : 5361, + 'OpCooperativeMatrixLengthNV' : 5362, + 'OpBeginInvocationInterlockEXT' : 5364, + 'OpEndInvocationInterlockEXT' : 5365, + 'OpDemoteToHelperInvocationEXT' : 5380, + 'OpIsHelperInvocationEXT' : 5381, 'OpSubgroupShuffleINTEL' : 5571, 'OpSubgroupShuffleDownINTEL' : 5572, 'OpSubgroupShuffleUpINTEL' : 5573, @@ -1041,8 +1473,182 @@ spv = { 'OpSubgroupBlockWriteINTEL' : 5576, 'OpSubgroupImageBlockReadINTEL' : 5577, 'OpSubgroupImageBlockWriteINTEL' : 5578, + 'OpSubgroupImageMediaBlockReadINTEL' : 5580, + 'OpSubgroupImageMediaBlockWriteINTEL' : 5581, + 'OpUCountLeadingZerosINTEL' : 5585, + 'OpUCountTrailingZerosINTEL' : 5586, + 'OpAbsISubINTEL' : 5587, + 'OpAbsUSubINTEL' : 5588, + 'OpIAddSatINTEL' : 5589, + 'OpUAddSatINTEL' : 5590, + 'OpIAverageINTEL' : 5591, + 'OpUAverageINTEL' : 5592, + 'OpIAverageRoundedINTEL' : 5593, + 'OpUAverageRoundedINTEL' : 5594, + 'OpISubSatINTEL' : 5595, + 'OpUSubSatINTEL' : 5596, + 'OpIMul32x16INTEL' : 5597, + 'OpUMul32x16INTEL' : 5598, + 'OpConstFunctionPointerINTEL' : 5600, + 'OpFunctionPointerCallINTEL' : 5601, + 'OpAsmTargetINTEL' : 5609, + 'OpAsmINTEL' : 5610, + 'OpAsmCallINTEL' : 5611, + 'OpAtomicFMinEXT' : 5614, + 'OpAtomicFMaxEXT' : 5615, + 'OpDecorateString' : 5632, 'OpDecorateStringGOOGLE' : 5632, + 'OpMemberDecorateString' : 5633, 'OpMemberDecorateStringGOOGLE' : 5633, + 'OpVmeImageINTEL' : 5699, + 'OpTypeVmeImageINTEL' : 5700, + 'OpTypeAvcImePayloadINTEL' : 5701, + 'OpTypeAvcRefPayloadINTEL' : 5702, + 'OpTypeAvcSicPayloadINTEL' : 5703, + 'OpTypeAvcMcePayloadINTEL' : 5704, + 'OpTypeAvcMceResultINTEL' : 5705, + 'OpTypeAvcImeResultINTEL' : 5706, + 'OpTypeAvcImeResultSingleReferenceStreamoutINTEL' : 5707, + 'OpTypeAvcImeResultDualReferenceStreamoutINTEL' : 5708, + 'OpTypeAvcImeSingleReferenceStreaminINTEL' : 5709, + 'OpTypeAvcImeDualReferenceStreaminINTEL' : 5710, + 'OpTypeAvcRefResultINTEL' : 5711, + 'OpTypeAvcSicResultINTEL' : 5712, + 'OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL' : 5713, + 'OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL' : 5714, + 'OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL' : 5715, + 'OpSubgroupAvcMceSetInterShapePenaltyINTEL' : 5716, + 'OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL' : 5717, + 'OpSubgroupAvcMceSetInterDirectionPenaltyINTEL' : 5718, + 'OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL' : 5719, + 'OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL' : 5720, + 'OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL' : 5721, + 'OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL' : 5722, + 'OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL' : 5723, + 'OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL' : 5724, + 'OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL' : 5725, + 'OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL' : 5726, + 'OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL' : 5727, + 'OpSubgroupAvcMceSetAcOnlyHaarINTEL' : 5728, + 'OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL' : 5729, + 'OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL' : 5730, + 'OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL' : 5731, + 'OpSubgroupAvcMceConvertToImePayloadINTEL' : 5732, + 'OpSubgroupAvcMceConvertToImeResultINTEL' : 5733, + 'OpSubgroupAvcMceConvertToRefPayloadINTEL' : 5734, + 'OpSubgroupAvcMceConvertToRefResultINTEL' : 5735, + 'OpSubgroupAvcMceConvertToSicPayloadINTEL' : 5736, + 'OpSubgroupAvcMceConvertToSicResultINTEL' : 5737, + 'OpSubgroupAvcMceGetMotionVectorsINTEL' : 5738, + 'OpSubgroupAvcMceGetInterDistortionsINTEL' : 5739, + 'OpSubgroupAvcMceGetBestInterDistortionsINTEL' : 5740, + 'OpSubgroupAvcMceGetInterMajorShapeINTEL' : 5741, + 'OpSubgroupAvcMceGetInterMinorShapeINTEL' : 5742, + 'OpSubgroupAvcMceGetInterDirectionsINTEL' : 5743, + 'OpSubgroupAvcMceGetInterMotionVectorCountINTEL' : 5744, + 'OpSubgroupAvcMceGetInterReferenceIdsINTEL' : 5745, + 'OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL' : 5746, + 'OpSubgroupAvcImeInitializeINTEL' : 5747, + 'OpSubgroupAvcImeSetSingleReferenceINTEL' : 5748, + 'OpSubgroupAvcImeSetDualReferenceINTEL' : 5749, + 'OpSubgroupAvcImeRefWindowSizeINTEL' : 5750, + 'OpSubgroupAvcImeAdjustRefOffsetINTEL' : 5751, + 'OpSubgroupAvcImeConvertToMcePayloadINTEL' : 5752, + 'OpSubgroupAvcImeSetMaxMotionVectorCountINTEL' : 5753, + 'OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL' : 5754, + 'OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL' : 5755, + 'OpSubgroupAvcImeSetWeightedSadINTEL' : 5756, + 'OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL' : 5757, + 'OpSubgroupAvcImeEvaluateWithDualReferenceINTEL' : 5758, + 'OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL' : 5759, + 'OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL' : 5760, + 'OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL' : 5761, + 'OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL' : 5762, + 'OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL' : 5763, + 'OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL' : 5764, + 'OpSubgroupAvcImeConvertToMceResultINTEL' : 5765, + 'OpSubgroupAvcImeGetSingleReferenceStreaminINTEL' : 5766, + 'OpSubgroupAvcImeGetDualReferenceStreaminINTEL' : 5767, + 'OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL' : 5768, + 'OpSubgroupAvcImeStripDualReferenceStreamoutINTEL' : 5769, + 'OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL' : 5770, + 'OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL' : 5771, + 'OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL' : 5772, + 'OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL' : 5773, + 'OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL' : 5774, + 'OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL' : 5775, + 'OpSubgroupAvcImeGetBorderReachedINTEL' : 5776, + 'OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL' : 5777, + 'OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL' : 5778, + 'OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL' : 5779, + 'OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL' : 5780, + 'OpSubgroupAvcFmeInitializeINTEL' : 5781, + 'OpSubgroupAvcBmeInitializeINTEL' : 5782, + 'OpSubgroupAvcRefConvertToMcePayloadINTEL' : 5783, + 'OpSubgroupAvcRefSetBidirectionalMixDisableINTEL' : 5784, + 'OpSubgroupAvcRefSetBilinearFilterEnableINTEL' : 5785, + 'OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL' : 5786, + 'OpSubgroupAvcRefEvaluateWithDualReferenceINTEL' : 5787, + 'OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL' : 5788, + 'OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL' : 5789, + 'OpSubgroupAvcRefConvertToMceResultINTEL' : 5790, + 'OpSubgroupAvcSicInitializeINTEL' : 5791, + 'OpSubgroupAvcSicConfigureSkcINTEL' : 5792, + 'OpSubgroupAvcSicConfigureIpeLumaINTEL' : 5793, + 'OpSubgroupAvcSicConfigureIpeLumaChromaINTEL' : 5794, + 'OpSubgroupAvcSicGetMotionVectorMaskINTEL' : 5795, + 'OpSubgroupAvcSicConvertToMcePayloadINTEL' : 5796, + 'OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL' : 5797, + 'OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL' : 5798, + 'OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL' : 5799, + 'OpSubgroupAvcSicSetBilinearFilterEnableINTEL' : 5800, + 'OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL' : 5801, + 'OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL' : 5802, + 'OpSubgroupAvcSicEvaluateIpeINTEL' : 5803, + 'OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL' : 5804, + 'OpSubgroupAvcSicEvaluateWithDualReferenceINTEL' : 5805, + 'OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL' : 5806, + 'OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL' : 5807, + 'OpSubgroupAvcSicConvertToMceResultINTEL' : 5808, + 'OpSubgroupAvcSicGetIpeLumaShapeINTEL' : 5809, + 'OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL' : 5810, + 'OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL' : 5811, + 'OpSubgroupAvcSicGetPackedIpeLumaModesINTEL' : 5812, + 'OpSubgroupAvcSicGetIpeChromaModeINTEL' : 5813, + 'OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL' : 5814, + 'OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL' : 5815, + 'OpSubgroupAvcSicGetInterRawSadsINTEL' : 5816, + 'OpVariableLengthArrayINTEL' : 5818, + 'OpSaveMemoryINTEL' : 5819, + 'OpRestoreMemoryINTEL' : 5820, + 'OpLoopControlINTEL' : 5887, + 'OpPtrCastToCrossWorkgroupINTEL' : 5934, + 'OpCrossWorkgroupCastToPtrINTEL' : 5938, + 'OpReadPipeBlockingINTEL' : 5946, + 'OpWritePipeBlockingINTEL' : 5947, + 'OpFPGARegINTEL' : 5949, + 'OpRayQueryGetRayTMinKHR' : 6016, + 'OpRayQueryGetRayFlagsKHR' : 6017, + 'OpRayQueryGetIntersectionTKHR' : 6018, + 'OpRayQueryGetIntersectionInstanceCustomIndexKHR' : 6019, + 'OpRayQueryGetIntersectionInstanceIdKHR' : 6020, + 'OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR' : 6021, + 'OpRayQueryGetIntersectionGeometryIndexKHR' : 6022, + 'OpRayQueryGetIntersectionPrimitiveIndexKHR' : 6023, + 'OpRayQueryGetIntersectionBarycentricsKHR' : 6024, + 'OpRayQueryGetIntersectionFrontFaceKHR' : 6025, + 'OpRayQueryGetIntersectionCandidateAABBOpaqueKHR' : 6026, + 'OpRayQueryGetIntersectionObjectRayDirectionKHR' : 6027, + 'OpRayQueryGetIntersectionObjectRayOriginKHR' : 6028, + 'OpRayQueryGetWorldRayDirectionKHR' : 6029, + 'OpRayQueryGetWorldRayOriginKHR' : 6030, + 'OpRayQueryGetIntersectionObjectToWorldKHR' : 6031, + 'OpRayQueryGetIntersectionWorldToObjectKHR' : 6032, + 'OpAtomicFAddEXT' : 6035, + 'OpTypeBufferSurfaceINTEL' : 6086, + 'OpTypeStructContinuedINTEL' : 6090, + 'OpConstantCompositeContinuedINTEL' : 6091, + 'OpSpecConstantCompositeContinuedINTEL' : 6092, }, } diff --git a/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spv.d b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spv.d new file mode 100755 index 000000000..e1db29ba4 --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/include/spirv/unified1/spv.d @@ -0,0 +1,1709 @@ +/+ + + Copyright (c) 2014-2020 The Khronos Group Inc. + + + + Permission is hereby granted, free of charge, to any person obtaining a copy + + of this software and/or associated documentation files (the "Materials"), + + to deal in the Materials without restriction, including without limitation + + the rights to use, copy, modify, merge, publish, distribute, sublicense, + + and/or sell copies of the Materials, and to permit persons to whom the + + Materials are furnished to do so, subject to the following conditions: + + + + The above copyright notice and this permission notice shall be included in + + all copies or substantial portions of the Materials. + + + + MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS + + STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND + + HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ + + + + THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + + FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS + + IN THE MATERIALS. + +/ + +/+ + + This header is automatically generated by the same tool that creates + + the Binary Section of the SPIR-V specification. + +/ + +/+ + + Enumeration tokens for SPIR-V, in various styles: + + C, C++, C++11, JSON, Lua, Python, C#, D + + + + - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL + + - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL + + - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL + + - Lua will use tables, e.g.: spv.SourceLanguage.GLSL + + - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] + + - C# will use enum classes in the Specification class located in the "Spv" namespace, + + e.g.: Spv.Specification.SourceLanguage.GLSL + + - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL + + + + Some tokens act like mask values, which can be OR'd together, + + while others are mutually exclusive. The mask-like ones have + + "Mask" in their name, and a parallel enum that has the shift + + amount (1 << x) for each corresponding enumerant. + +/ + +module spv; + +enum uint MagicNumber = 0x07230203; +enum uint Version = 0x00010500; +enum uint Revision = 4; +enum uint OpCodeMask = 0xffff; +enum uint WordCountShift = 16; + +enum SourceLanguage : uint +{ + Unknown = 0, + ESSL = 1, + GLSL = 2, + OpenCL_C = 3, + OpenCL_CPP = 4, + HLSL = 5, +} + +enum ExecutionModel : uint +{ + Vertex = 0, + TessellationControl = 1, + TessellationEvaluation = 2, + Geometry = 3, + Fragment = 4, + GLCompute = 5, + Kernel = 6, + TaskNV = 5267, + MeshNV = 5268, + RayGenerationKHR = 5313, + RayGenerationNV = 5313, + IntersectionKHR = 5314, + IntersectionNV = 5314, + AnyHitKHR = 5315, + AnyHitNV = 5315, + ClosestHitKHR = 5316, + ClosestHitNV = 5316, + MissKHR = 5317, + MissNV = 5317, + CallableKHR = 5318, + CallableNV = 5318, +} + +enum AddressingModel : uint +{ + Logical = 0, + Physical32 = 1, + Physical64 = 2, + PhysicalStorageBuffer64 = 5348, + PhysicalStorageBuffer64EXT = 5348, +} + +enum MemoryModel : uint +{ + Simple = 0, + GLSL450 = 1, + OpenCL = 2, + Vulkan = 3, + VulkanKHR = 3, +} + +enum ExecutionMode : uint +{ + Invocations = 0, + SpacingEqual = 1, + SpacingFractionalEven = 2, + SpacingFractionalOdd = 3, + VertexOrderCw = 4, + VertexOrderCcw = 5, + PixelCenterInteger = 6, + OriginUpperLeft = 7, + OriginLowerLeft = 8, + EarlyFragmentTests = 9, + PointMode = 10, + Xfb = 11, + DepthReplacing = 12, + DepthGreater = 14, + DepthLess = 15, + DepthUnchanged = 16, + LocalSize = 17, + LocalSizeHint = 18, + InputPoints = 19, + InputLines = 20, + InputLinesAdjacency = 21, + Triangles = 22, + InputTrianglesAdjacency = 23, + Quads = 24, + Isolines = 25, + OutputVertices = 26, + OutputPoints = 27, + OutputLineStrip = 28, + OutputTriangleStrip = 29, + VecTypeHint = 30, + ContractionOff = 31, + Initializer = 33, + Finalizer = 34, + SubgroupSize = 35, + SubgroupsPerWorkgroup = 36, + SubgroupsPerWorkgroupId = 37, + LocalSizeId = 38, + LocalSizeHintId = 39, + PostDepthCoverage = 4446, + DenormPreserve = 4459, + DenormFlushToZero = 4460, + SignedZeroInfNanPreserve = 4461, + RoundingModeRTE = 4462, + RoundingModeRTZ = 4463, + StencilRefReplacingEXT = 5027, + OutputLinesNV = 5269, + OutputPrimitivesNV = 5270, + DerivativeGroupQuadsNV = 5289, + DerivativeGroupLinearNV = 5290, + OutputTrianglesNV = 5298, + PixelInterlockOrderedEXT = 5366, + PixelInterlockUnorderedEXT = 5367, + SampleInterlockOrderedEXT = 5368, + SampleInterlockUnorderedEXT = 5369, + ShadingRateInterlockOrderedEXT = 5370, + ShadingRateInterlockUnorderedEXT = 5371, + SharedLocalMemorySizeINTEL = 5618, + RoundingModeRTPINTEL = 5620, + RoundingModeRTNINTEL = 5621, + FloatingPointModeALTINTEL = 5622, + FloatingPointModeIEEEINTEL = 5623, + MaxWorkgroupSizeINTEL = 5893, + MaxWorkDimINTEL = 5894, + NoGlobalOffsetINTEL = 5895, + NumSIMDWorkitemsINTEL = 5896, + SchedulerTargetFmaxMhzINTEL = 5903, +} + +enum StorageClass : uint +{ + UniformConstant = 0, + Input = 1, + Uniform = 2, + Output = 3, + Workgroup = 4, + CrossWorkgroup = 5, + Private = 6, + Function = 7, + Generic = 8, + PushConstant = 9, + AtomicCounter = 10, + Image = 11, + StorageBuffer = 12, + CallableDataKHR = 5328, + CallableDataNV = 5328, + IncomingCallableDataKHR = 5329, + IncomingCallableDataNV = 5329, + RayPayloadKHR = 5338, + RayPayloadNV = 5338, + HitAttributeKHR = 5339, + HitAttributeNV = 5339, + IncomingRayPayloadKHR = 5342, + IncomingRayPayloadNV = 5342, + ShaderRecordBufferKHR = 5343, + ShaderRecordBufferNV = 5343, + PhysicalStorageBuffer = 5349, + PhysicalStorageBufferEXT = 5349, + CodeSectionINTEL = 5605, + DeviceOnlyINTEL = 5936, + HostOnlyINTEL = 5937, +} + +enum Dim : uint +{ + _1D = 0, + _2D = 1, + _3D = 2, + Cube = 3, + Rect = 4, + Buffer = 5, + SubpassData = 6, +} + +enum SamplerAddressingMode : uint +{ + None = 0, + ClampToEdge = 1, + Clamp = 2, + Repeat = 3, + RepeatMirrored = 4, +} + +enum SamplerFilterMode : uint +{ + Nearest = 0, + Linear = 1, +} + +enum ImageFormat : uint +{ + Unknown = 0, + Rgba32f = 1, + Rgba16f = 2, + R32f = 3, + Rgba8 = 4, + Rgba8Snorm = 5, + Rg32f = 6, + Rg16f = 7, + R11fG11fB10f = 8, + R16f = 9, + Rgba16 = 10, + Rgb10A2 = 11, + Rg16 = 12, + Rg8 = 13, + R16 = 14, + R8 = 15, + Rgba16Snorm = 16, + Rg16Snorm = 17, + Rg8Snorm = 18, + R16Snorm = 19, + R8Snorm = 20, + Rgba32i = 21, + Rgba16i = 22, + Rgba8i = 23, + R32i = 24, + Rg32i = 25, + Rg16i = 26, + Rg8i = 27, + R16i = 28, + R8i = 29, + Rgba32ui = 30, + Rgba16ui = 31, + Rgba8ui = 32, + R32ui = 33, + Rgb10a2ui = 34, + Rg32ui = 35, + Rg16ui = 36, + Rg8ui = 37, + R16ui = 38, + R8ui = 39, + R64ui = 40, + R64i = 41, +} + +enum ImageChannelOrder : uint +{ + R = 0, + A = 1, + RG = 2, + RA = 3, + RGB = 4, + RGBA = 5, + BGRA = 6, + ARGB = 7, + Intensity = 8, + Luminance = 9, + Rx = 10, + RGx = 11, + RGBx = 12, + Depth = 13, + DepthStencil = 14, + sRGB = 15, + sRGBx = 16, + sRGBA = 17, + sBGRA = 18, + ABGR = 19, +} + +enum ImageChannelDataType : uint +{ + SnormInt8 = 0, + SnormInt16 = 1, + UnormInt8 = 2, + UnormInt16 = 3, + UnormShort565 = 4, + UnormShort555 = 5, + UnormInt101010 = 6, + SignedInt8 = 7, + SignedInt16 = 8, + SignedInt32 = 9, + UnsignedInt8 = 10, + UnsignedInt16 = 11, + UnsignedInt32 = 12, + HalfFloat = 13, + Float = 14, + UnormInt24 = 15, + UnormInt101010_2 = 16, +} + +enum ImageOperandsShift : uint +{ + Bias = 0, + Lod = 1, + Grad = 2, + ConstOffset = 3, + Offset = 4, + ConstOffsets = 5, + Sample = 6, + MinLod = 7, + MakeTexelAvailable = 8, + MakeTexelAvailableKHR = 8, + MakeTexelVisible = 9, + MakeTexelVisibleKHR = 9, + NonPrivateTexel = 10, + NonPrivateTexelKHR = 10, + VolatileTexel = 11, + VolatileTexelKHR = 11, + SignExtend = 12, + ZeroExtend = 13, +} + +enum ImageOperandsMask : uint +{ + MaskNone = 0, + Bias = 0x00000001, + Lod = 0x00000002, + Grad = 0x00000004, + ConstOffset = 0x00000008, + Offset = 0x00000010, + ConstOffsets = 0x00000020, + Sample = 0x00000040, + MinLod = 0x00000080, + MakeTexelAvailable = 0x00000100, + MakeTexelAvailableKHR = 0x00000100, + MakeTexelVisible = 0x00000200, + MakeTexelVisibleKHR = 0x00000200, + NonPrivateTexel = 0x00000400, + NonPrivateTexelKHR = 0x00000400, + VolatileTexel = 0x00000800, + VolatileTexelKHR = 0x00000800, + SignExtend = 0x00001000, + ZeroExtend = 0x00002000, +} + +enum FPFastMathModeShift : uint +{ + NotNaN = 0, + NotInf = 1, + NSZ = 2, + AllowRecip = 3, + Fast = 4, + AllowContractFastINTEL = 16, + AllowReassocINTEL = 17, +} + +enum FPFastMathModeMask : uint +{ + MaskNone = 0, + NotNaN = 0x00000001, + NotInf = 0x00000002, + NSZ = 0x00000004, + AllowRecip = 0x00000008, + Fast = 0x00000010, + AllowContractFastINTEL = 0x00010000, + AllowReassocINTEL = 0x00020000, +} + +enum FPRoundingMode : uint +{ + RTE = 0, + RTZ = 1, + RTP = 2, + RTN = 3, +} + +enum LinkageType : uint +{ + Export = 0, + Import = 1, +} + +enum AccessQualifier : uint +{ + ReadOnly = 0, + WriteOnly = 1, + ReadWrite = 2, +} + +enum FunctionParameterAttribute : uint +{ + Zext = 0, + Sext = 1, + ByVal = 2, + Sret = 3, + NoAlias = 4, + NoCapture = 5, + NoWrite = 6, + NoReadWrite = 7, +} + +enum Decoration : uint +{ + RelaxedPrecision = 0, + SpecId = 1, + Block = 2, + BufferBlock = 3, + RowMajor = 4, + ColMajor = 5, + ArrayStride = 6, + MatrixStride = 7, + GLSLShared = 8, + GLSLPacked = 9, + CPacked = 10, + BuiltIn = 11, + NoPerspective = 13, + Flat = 14, + Patch = 15, + Centroid = 16, + Sample = 17, + Invariant = 18, + Restrict = 19, + Aliased = 20, + Volatile = 21, + Constant = 22, + Coherent = 23, + NonWritable = 24, + NonReadable = 25, + Uniform = 26, + UniformId = 27, + SaturatedConversion = 28, + Stream = 29, + Location = 30, + Component = 31, + Index = 32, + Binding = 33, + DescriptorSet = 34, + Offset = 35, + XfbBuffer = 36, + XfbStride = 37, + FuncParamAttr = 38, + FPRoundingMode = 39, + FPFastMathMode = 40, + LinkageAttributes = 41, + NoContraction = 42, + InputAttachmentIndex = 43, + Alignment = 44, + MaxByteOffset = 45, + AlignmentId = 46, + MaxByteOffsetId = 47, + NoSignedWrap = 4469, + NoUnsignedWrap = 4470, + ExplicitInterpAMD = 4999, + OverrideCoverageNV = 5248, + PassthroughNV = 5250, + ViewportRelativeNV = 5252, + SecondaryViewportRelativeNV = 5256, + PerPrimitiveNV = 5271, + PerViewNV = 5272, + PerTaskNV = 5273, + PerVertexNV = 5285, + NonUniform = 5300, + NonUniformEXT = 5300, + RestrictPointer = 5355, + RestrictPointerEXT = 5355, + AliasedPointer = 5356, + AliasedPointerEXT = 5356, + SIMTCallINTEL = 5599, + ReferencedIndirectlyINTEL = 5602, + ClobberINTEL = 5607, + SideEffectsINTEL = 5608, + VectorComputeVariableINTEL = 5624, + FuncParamIOKindINTEL = 5625, + VectorComputeFunctionINTEL = 5626, + StackCallINTEL = 5627, + GlobalVariableOffsetINTEL = 5628, + CounterBuffer = 5634, + HlslCounterBufferGOOGLE = 5634, + HlslSemanticGOOGLE = 5635, + UserSemantic = 5635, + UserTypeGOOGLE = 5636, + FunctionRoundingModeINTEL = 5822, + FunctionDenormModeINTEL = 5823, + RegisterINTEL = 5825, + MemoryINTEL = 5826, + NumbanksINTEL = 5827, + BankwidthINTEL = 5828, + MaxPrivateCopiesINTEL = 5829, + SinglepumpINTEL = 5830, + DoublepumpINTEL = 5831, + MaxReplicatesINTEL = 5832, + SimpleDualPortINTEL = 5833, + MergeINTEL = 5834, + BankBitsINTEL = 5835, + ForcePow2DepthINTEL = 5836, + BurstCoalesceINTEL = 5899, + CacheSizeINTEL = 5900, + DontStaticallyCoalesceINTEL = 5901, + PrefetchINTEL = 5902, + StallEnableINTEL = 5905, + FuseLoopsInFunctionINTEL = 5907, + BufferLocationINTEL = 5921, + IOPipeStorageINTEL = 5944, + FunctionFloatingPointModeINTEL = 6080, + SingleElementVectorINTEL = 6085, + VectorComputeCallableFunctionINTEL = 6087, +} + +enum BuiltIn : uint +{ + Position = 0, + PointSize = 1, + ClipDistance = 3, + CullDistance = 4, + VertexId = 5, + InstanceId = 6, + PrimitiveId = 7, + InvocationId = 8, + Layer = 9, + ViewportIndex = 10, + TessLevelOuter = 11, + TessLevelInner = 12, + TessCoord = 13, + PatchVertices = 14, + FragCoord = 15, + PointCoord = 16, + FrontFacing = 17, + SampleId = 18, + SamplePosition = 19, + SampleMask = 20, + FragDepth = 22, + HelperInvocation = 23, + NumWorkgroups = 24, + WorkgroupSize = 25, + WorkgroupId = 26, + LocalInvocationId = 27, + GlobalInvocationId = 28, + LocalInvocationIndex = 29, + WorkDim = 30, + GlobalSize = 31, + EnqueuedWorkgroupSize = 32, + GlobalOffset = 33, + GlobalLinearId = 34, + SubgroupSize = 36, + SubgroupMaxSize = 37, + NumSubgroups = 38, + NumEnqueuedSubgroups = 39, + SubgroupId = 40, + SubgroupLocalInvocationId = 41, + VertexIndex = 42, + InstanceIndex = 43, + SubgroupEqMask = 4416, + SubgroupEqMaskKHR = 4416, + SubgroupGeMask = 4417, + SubgroupGeMaskKHR = 4417, + SubgroupGtMask = 4418, + SubgroupGtMaskKHR = 4418, + SubgroupLeMask = 4419, + SubgroupLeMaskKHR = 4419, + SubgroupLtMask = 4420, + SubgroupLtMaskKHR = 4420, + BaseVertex = 4424, + BaseInstance = 4425, + DrawIndex = 4426, + PrimitiveShadingRateKHR = 4432, + DeviceIndex = 4438, + ViewIndex = 4440, + ShadingRateKHR = 4444, + BaryCoordNoPerspAMD = 4992, + BaryCoordNoPerspCentroidAMD = 4993, + BaryCoordNoPerspSampleAMD = 4994, + BaryCoordSmoothAMD = 4995, + BaryCoordSmoothCentroidAMD = 4996, + BaryCoordSmoothSampleAMD = 4997, + BaryCoordPullModelAMD = 4998, + FragStencilRefEXT = 5014, + ViewportMaskNV = 5253, + SecondaryPositionNV = 5257, + SecondaryViewportMaskNV = 5258, + PositionPerViewNV = 5261, + ViewportMaskPerViewNV = 5262, + FullyCoveredEXT = 5264, + TaskCountNV = 5274, + PrimitiveCountNV = 5275, + PrimitiveIndicesNV = 5276, + ClipDistancePerViewNV = 5277, + CullDistancePerViewNV = 5278, + LayerPerViewNV = 5279, + MeshViewCountNV = 5280, + MeshViewIndicesNV = 5281, + BaryCoordNV = 5286, + BaryCoordNoPerspNV = 5287, + FragSizeEXT = 5292, + FragmentSizeNV = 5292, + FragInvocationCountEXT = 5293, + InvocationsPerPixelNV = 5293, + LaunchIdKHR = 5319, + LaunchIdNV = 5319, + LaunchSizeKHR = 5320, + LaunchSizeNV = 5320, + WorldRayOriginKHR = 5321, + WorldRayOriginNV = 5321, + WorldRayDirectionKHR = 5322, + WorldRayDirectionNV = 5322, + ObjectRayOriginKHR = 5323, + ObjectRayOriginNV = 5323, + ObjectRayDirectionKHR = 5324, + ObjectRayDirectionNV = 5324, + RayTminKHR = 5325, + RayTminNV = 5325, + RayTmaxKHR = 5326, + RayTmaxNV = 5326, + InstanceCustomIndexKHR = 5327, + InstanceCustomIndexNV = 5327, + ObjectToWorldKHR = 5330, + ObjectToWorldNV = 5330, + WorldToObjectKHR = 5331, + WorldToObjectNV = 5331, + HitTNV = 5332, + HitKindKHR = 5333, + HitKindNV = 5333, + IncomingRayFlagsKHR = 5351, + IncomingRayFlagsNV = 5351, + RayGeometryIndexKHR = 5352, + WarpsPerSMNV = 5374, + SMCountNV = 5375, + WarpIDNV = 5376, + SMIDNV = 5377, +} + +enum SelectionControlShift : uint +{ + Flatten = 0, + DontFlatten = 1, +} + +enum SelectionControlMask : uint +{ + MaskNone = 0, + Flatten = 0x00000001, + DontFlatten = 0x00000002, +} + +enum LoopControlShift : uint +{ + Unroll = 0, + DontUnroll = 1, + DependencyInfinite = 2, + DependencyLength = 3, + MinIterations = 4, + MaxIterations = 5, + IterationMultiple = 6, + PeelCount = 7, + PartialCount = 8, + InitiationIntervalINTEL = 16, + MaxConcurrencyINTEL = 17, + DependencyArrayINTEL = 18, + PipelineEnableINTEL = 19, + LoopCoalesceINTEL = 20, + MaxInterleavingINTEL = 21, + SpeculatedIterationsINTEL = 22, + NoFusionINTEL = 23, +} + +enum LoopControlMask : uint +{ + MaskNone = 0, + Unroll = 0x00000001, + DontUnroll = 0x00000002, + DependencyInfinite = 0x00000004, + DependencyLength = 0x00000008, + MinIterations = 0x00000010, + MaxIterations = 0x00000020, + IterationMultiple = 0x00000040, + PeelCount = 0x00000080, + PartialCount = 0x00000100, + InitiationIntervalINTEL = 0x00010000, + MaxConcurrencyINTEL = 0x00020000, + DependencyArrayINTEL = 0x00040000, + PipelineEnableINTEL = 0x00080000, + LoopCoalesceINTEL = 0x00100000, + MaxInterleavingINTEL = 0x00200000, + SpeculatedIterationsINTEL = 0x00400000, + NoFusionINTEL = 0x00800000, +} + +enum FunctionControlShift : uint +{ + Inline = 0, + DontInline = 1, + Pure = 2, + Const = 3, +} + +enum FunctionControlMask : uint +{ + MaskNone = 0, + Inline = 0x00000001, + DontInline = 0x00000002, + Pure = 0x00000004, + Const = 0x00000008, +} + +enum MemorySemanticsShift : uint +{ + Acquire = 1, + Release = 2, + AcquireRelease = 3, + SequentiallyConsistent = 4, + UniformMemory = 6, + SubgroupMemory = 7, + WorkgroupMemory = 8, + CrossWorkgroupMemory = 9, + AtomicCounterMemory = 10, + ImageMemory = 11, + OutputMemory = 12, + OutputMemoryKHR = 12, + MakeAvailable = 13, + MakeAvailableKHR = 13, + MakeVisible = 14, + MakeVisibleKHR = 14, + Volatile = 15, +} + +enum MemorySemanticsMask : uint +{ + MaskNone = 0, + Acquire = 0x00000002, + Release = 0x00000004, + AcquireRelease = 0x00000008, + SequentiallyConsistent = 0x00000010, + UniformMemory = 0x00000040, + SubgroupMemory = 0x00000080, + WorkgroupMemory = 0x00000100, + CrossWorkgroupMemory = 0x00000200, + AtomicCounterMemory = 0x00000400, + ImageMemory = 0x00000800, + OutputMemory = 0x00001000, + OutputMemoryKHR = 0x00001000, + MakeAvailable = 0x00002000, + MakeAvailableKHR = 0x00002000, + MakeVisible = 0x00004000, + MakeVisibleKHR = 0x00004000, + Volatile = 0x00008000, +} + +enum MemoryAccessShift : uint +{ + Volatile = 0, + Aligned = 1, + Nontemporal = 2, + MakePointerAvailable = 3, + MakePointerAvailableKHR = 3, + MakePointerVisible = 4, + MakePointerVisibleKHR = 4, + NonPrivatePointer = 5, + NonPrivatePointerKHR = 5, +} + +enum MemoryAccessMask : uint +{ + MaskNone = 0, + Volatile = 0x00000001, + Aligned = 0x00000002, + Nontemporal = 0x00000004, + MakePointerAvailable = 0x00000008, + MakePointerAvailableKHR = 0x00000008, + MakePointerVisible = 0x00000010, + MakePointerVisibleKHR = 0x00000010, + NonPrivatePointer = 0x00000020, + NonPrivatePointerKHR = 0x00000020, +} + +enum Scope : uint +{ + CrossDevice = 0, + Device = 1, + Workgroup = 2, + Subgroup = 3, + Invocation = 4, + QueueFamily = 5, + QueueFamilyKHR = 5, + ShaderCallKHR = 6, +} + +enum GroupOperation : uint +{ + Reduce = 0, + InclusiveScan = 1, + ExclusiveScan = 2, + ClusteredReduce = 3, + PartitionedReduceNV = 6, + PartitionedInclusiveScanNV = 7, + PartitionedExclusiveScanNV = 8, +} + +enum KernelEnqueueFlags : uint +{ + NoWait = 0, + WaitKernel = 1, + WaitWorkGroup = 2, +} + +enum KernelProfilingInfoShift : uint +{ + CmdExecTime = 0, +} + +enum KernelProfilingInfoMask : uint +{ + MaskNone = 0, + CmdExecTime = 0x00000001, +} + +enum Capability : uint +{ + Matrix = 0, + Shader = 1, + Geometry = 2, + Tessellation = 3, + Addresses = 4, + Linkage = 5, + Kernel = 6, + Vector16 = 7, + Float16Buffer = 8, + Float16 = 9, + Float64 = 10, + Int64 = 11, + Int64Atomics = 12, + ImageBasic = 13, + ImageReadWrite = 14, + ImageMipmap = 15, + Pipes = 17, + Groups = 18, + DeviceEnqueue = 19, + LiteralSampler = 20, + AtomicStorage = 21, + Int16 = 22, + TessellationPointSize = 23, + GeometryPointSize = 24, + ImageGatherExtended = 25, + StorageImageMultisample = 27, + UniformBufferArrayDynamicIndexing = 28, + SampledImageArrayDynamicIndexing = 29, + StorageBufferArrayDynamicIndexing = 30, + StorageImageArrayDynamicIndexing = 31, + ClipDistance = 32, + CullDistance = 33, + ImageCubeArray = 34, + SampleRateShading = 35, + ImageRect = 36, + SampledRect = 37, + GenericPointer = 38, + Int8 = 39, + InputAttachment = 40, + SparseResidency = 41, + MinLod = 42, + Sampled1D = 43, + Image1D = 44, + SampledCubeArray = 45, + SampledBuffer = 46, + ImageBuffer = 47, + ImageMSArray = 48, + StorageImageExtendedFormats = 49, + ImageQuery = 50, + DerivativeControl = 51, + InterpolationFunction = 52, + TransformFeedback = 53, + GeometryStreams = 54, + StorageImageReadWithoutFormat = 55, + StorageImageWriteWithoutFormat = 56, + MultiViewport = 57, + SubgroupDispatch = 58, + NamedBarrier = 59, + PipeStorage = 60, + GroupNonUniform = 61, + GroupNonUniformVote = 62, + GroupNonUniformArithmetic = 63, + GroupNonUniformBallot = 64, + GroupNonUniformShuffle = 65, + GroupNonUniformShuffleRelative = 66, + GroupNonUniformClustered = 67, + GroupNonUniformQuad = 68, + ShaderLayer = 69, + ShaderViewportIndex = 70, + FragmentShadingRateKHR = 4422, + SubgroupBallotKHR = 4423, + DrawParameters = 4427, + WorkgroupMemoryExplicitLayoutKHR = 4428, + WorkgroupMemoryExplicitLayout8BitAccessKHR = 4429, + WorkgroupMemoryExplicitLayout16BitAccessKHR = 4430, + SubgroupVoteKHR = 4431, + StorageBuffer16BitAccess = 4433, + StorageUniformBufferBlock16 = 4433, + StorageUniform16 = 4434, + UniformAndStorageBuffer16BitAccess = 4434, + StoragePushConstant16 = 4435, + StorageInputOutput16 = 4436, + DeviceGroup = 4437, + MultiView = 4439, + VariablePointersStorageBuffer = 4441, + VariablePointers = 4442, + AtomicStorageOps = 4445, + SampleMaskPostDepthCoverage = 4447, + StorageBuffer8BitAccess = 4448, + UniformAndStorageBuffer8BitAccess = 4449, + StoragePushConstant8 = 4450, + DenormPreserve = 4464, + DenormFlushToZero = 4465, + SignedZeroInfNanPreserve = 4466, + RoundingModeRTE = 4467, + RoundingModeRTZ = 4468, + RayQueryProvisionalKHR = 4471, + RayQueryKHR = 4472, + RayTraversalPrimitiveCullingKHR = 4478, + RayTracingKHR = 4479, + Float16ImageAMD = 5008, + ImageGatherBiasLodAMD = 5009, + FragmentMaskAMD = 5010, + StencilExportEXT = 5013, + ImageReadWriteLodAMD = 5015, + Int64ImageEXT = 5016, + ShaderClockKHR = 5055, + SampleMaskOverrideCoverageNV = 5249, + GeometryShaderPassthroughNV = 5251, + ShaderViewportIndexLayerEXT = 5254, + ShaderViewportIndexLayerNV = 5254, + ShaderViewportMaskNV = 5255, + ShaderStereoViewNV = 5259, + PerViewAttributesNV = 5260, + FragmentFullyCoveredEXT = 5265, + MeshShadingNV = 5266, + ImageFootprintNV = 5282, + FragmentBarycentricNV = 5284, + ComputeDerivativeGroupQuadsNV = 5288, + FragmentDensityEXT = 5291, + ShadingRateNV = 5291, + GroupNonUniformPartitionedNV = 5297, + ShaderNonUniform = 5301, + ShaderNonUniformEXT = 5301, + RuntimeDescriptorArray = 5302, + RuntimeDescriptorArrayEXT = 5302, + InputAttachmentArrayDynamicIndexing = 5303, + InputAttachmentArrayDynamicIndexingEXT = 5303, + UniformTexelBufferArrayDynamicIndexing = 5304, + UniformTexelBufferArrayDynamicIndexingEXT = 5304, + StorageTexelBufferArrayDynamicIndexing = 5305, + StorageTexelBufferArrayDynamicIndexingEXT = 5305, + UniformBufferArrayNonUniformIndexing = 5306, + UniformBufferArrayNonUniformIndexingEXT = 5306, + SampledImageArrayNonUniformIndexing = 5307, + SampledImageArrayNonUniformIndexingEXT = 5307, + StorageBufferArrayNonUniformIndexing = 5308, + StorageBufferArrayNonUniformIndexingEXT = 5308, + StorageImageArrayNonUniformIndexing = 5309, + StorageImageArrayNonUniformIndexingEXT = 5309, + InputAttachmentArrayNonUniformIndexing = 5310, + InputAttachmentArrayNonUniformIndexingEXT = 5310, + UniformTexelBufferArrayNonUniformIndexing = 5311, + UniformTexelBufferArrayNonUniformIndexingEXT = 5311, + StorageTexelBufferArrayNonUniformIndexing = 5312, + StorageTexelBufferArrayNonUniformIndexingEXT = 5312, + RayTracingNV = 5340, + VulkanMemoryModel = 5345, + VulkanMemoryModelKHR = 5345, + VulkanMemoryModelDeviceScope = 5346, + VulkanMemoryModelDeviceScopeKHR = 5346, + PhysicalStorageBufferAddresses = 5347, + PhysicalStorageBufferAddressesEXT = 5347, + ComputeDerivativeGroupLinearNV = 5350, + RayTracingProvisionalKHR = 5353, + CooperativeMatrixNV = 5357, + FragmentShaderSampleInterlockEXT = 5363, + FragmentShaderShadingRateInterlockEXT = 5372, + ShaderSMBuiltinsNV = 5373, + FragmentShaderPixelInterlockEXT = 5378, + DemoteToHelperInvocationEXT = 5379, + SubgroupShuffleINTEL = 5568, + SubgroupBufferBlockIOINTEL = 5569, + SubgroupImageBlockIOINTEL = 5570, + SubgroupImageMediaBlockIOINTEL = 5579, + RoundToInfinityINTEL = 5582, + FloatingPointModeINTEL = 5583, + IntegerFunctions2INTEL = 5584, + FunctionPointersINTEL = 5603, + IndirectReferencesINTEL = 5604, + AsmINTEL = 5606, + AtomicFloat32MinMaxEXT = 5612, + AtomicFloat64MinMaxEXT = 5613, + AtomicFloat16MinMaxEXT = 5616, + VectorComputeINTEL = 5617, + VectorAnyINTEL = 5619, + SubgroupAvcMotionEstimationINTEL = 5696, + SubgroupAvcMotionEstimationIntraINTEL = 5697, + SubgroupAvcMotionEstimationChromaINTEL = 5698, + VariableLengthArrayINTEL = 5817, + FunctionFloatControlINTEL = 5821, + FPGAMemoryAttributesINTEL = 5824, + FPFastMathModeINTEL = 5837, + ArbitraryPrecisionIntegersINTEL = 5844, + UnstructuredLoopControlsINTEL = 5886, + FPGALoopControlsINTEL = 5888, + KernelAttributesINTEL = 5892, + FPGAKernelAttributesINTEL = 5897, + FPGAMemoryAccessesINTEL = 5898, + FPGAClusterAttributesINTEL = 5904, + LoopFuseINTEL = 5906, + FPGABufferLocationINTEL = 5920, + USMStorageClassesINTEL = 5935, + IOPipesINTEL = 5943, + BlockingPipesINTEL = 5945, + FPGARegINTEL = 5948, + AtomicFloat32AddEXT = 6033, + AtomicFloat64AddEXT = 6034, + LongConstantCompositeINTEL = 6089, +} + +enum RayFlagsShift : uint +{ + OpaqueKHR = 0, + NoOpaqueKHR = 1, + TerminateOnFirstHitKHR = 2, + SkipClosestHitShaderKHR = 3, + CullBackFacingTrianglesKHR = 4, + CullFrontFacingTrianglesKHR = 5, + CullOpaqueKHR = 6, + CullNoOpaqueKHR = 7, + SkipTrianglesKHR = 8, + SkipAABBsKHR = 9, +} + +enum RayFlagsMask : uint +{ + MaskNone = 0, + OpaqueKHR = 0x00000001, + NoOpaqueKHR = 0x00000002, + TerminateOnFirstHitKHR = 0x00000004, + SkipClosestHitShaderKHR = 0x00000008, + CullBackFacingTrianglesKHR = 0x00000010, + CullFrontFacingTrianglesKHR = 0x00000020, + CullOpaqueKHR = 0x00000040, + CullNoOpaqueKHR = 0x00000080, + SkipTrianglesKHR = 0x00000100, + SkipAABBsKHR = 0x00000200, +} + +enum RayQueryIntersection : uint +{ + RayQueryCandidateIntersectionKHR = 0, + RayQueryCommittedIntersectionKHR = 1, +} + +enum RayQueryCommittedIntersectionType : uint +{ + RayQueryCommittedIntersectionNoneKHR = 0, + RayQueryCommittedIntersectionTriangleKHR = 1, + RayQueryCommittedIntersectionGeneratedKHR = 2, +} + +enum RayQueryCandidateIntersectionType : uint +{ + RayQueryCandidateIntersectionTriangleKHR = 0, + RayQueryCandidateIntersectionAABBKHR = 1, +} + +enum FragmentShadingRateShift : uint +{ + Vertical2Pixels = 0, + Vertical4Pixels = 1, + Horizontal2Pixels = 2, + Horizontal4Pixels = 3, +} + +enum FragmentShadingRateMask : uint +{ + MaskNone = 0, + Vertical2Pixels = 0x00000001, + Vertical4Pixels = 0x00000002, + Horizontal2Pixels = 0x00000004, + Horizontal4Pixels = 0x00000008, +} + +enum FPDenormMode : uint +{ + Preserve = 0, + FlushToZero = 1, +} + +enum FPOperationMode : uint +{ + IEEE = 0, + ALT = 1, +} + +enum Op : uint +{ + OpNop = 0, + OpUndef = 1, + OpSourceContinued = 2, + OpSource = 3, + OpSourceExtension = 4, + OpName = 5, + OpMemberName = 6, + OpString = 7, + OpLine = 8, + OpExtension = 10, + OpExtInstImport = 11, + OpExtInst = 12, + OpMemoryModel = 14, + OpEntryPoint = 15, + OpExecutionMode = 16, + OpCapability = 17, + OpTypeVoid = 19, + OpTypeBool = 20, + OpTypeInt = 21, + OpTypeFloat = 22, + OpTypeVector = 23, + OpTypeMatrix = 24, + OpTypeImage = 25, + OpTypeSampler = 26, + OpTypeSampledImage = 27, + OpTypeArray = 28, + OpTypeRuntimeArray = 29, + OpTypeStruct = 30, + OpTypeOpaque = 31, + OpTypePointer = 32, + OpTypeFunction = 33, + OpTypeEvent = 34, + OpTypeDeviceEvent = 35, + OpTypeReserveId = 36, + OpTypeQueue = 37, + OpTypePipe = 38, + OpTypeForwardPointer = 39, + OpConstantTrue = 41, + OpConstantFalse = 42, + OpConstant = 43, + OpConstantComposite = 44, + OpConstantSampler = 45, + OpConstantNull = 46, + OpSpecConstantTrue = 48, + OpSpecConstantFalse = 49, + OpSpecConstant = 50, + OpSpecConstantComposite = 51, + OpSpecConstantOp = 52, + OpFunction = 54, + OpFunctionParameter = 55, + OpFunctionEnd = 56, + OpFunctionCall = 57, + OpVariable = 59, + OpImageTexelPointer = 60, + OpLoad = 61, + OpStore = 62, + OpCopyMemory = 63, + OpCopyMemorySized = 64, + OpAccessChain = 65, + OpInBoundsAccessChain = 66, + OpPtrAccessChain = 67, + OpArrayLength = 68, + OpGenericPtrMemSemantics = 69, + OpInBoundsPtrAccessChain = 70, + OpDecorate = 71, + OpMemberDecorate = 72, + OpDecorationGroup = 73, + OpGroupDecorate = 74, + OpGroupMemberDecorate = 75, + OpVectorExtractDynamic = 77, + OpVectorInsertDynamic = 78, + OpVectorShuffle = 79, + OpCompositeConstruct = 80, + OpCompositeExtract = 81, + OpCompositeInsert = 82, + OpCopyObject = 83, + OpTranspose = 84, + OpSampledImage = 86, + OpImageSampleImplicitLod = 87, + OpImageSampleExplicitLod = 88, + OpImageSampleDrefImplicitLod = 89, + OpImageSampleDrefExplicitLod = 90, + OpImageSampleProjImplicitLod = 91, + OpImageSampleProjExplicitLod = 92, + OpImageSampleProjDrefImplicitLod = 93, + OpImageSampleProjDrefExplicitLod = 94, + OpImageFetch = 95, + OpImageGather = 96, + OpImageDrefGather = 97, + OpImageRead = 98, + OpImageWrite = 99, + OpImage = 100, + OpImageQueryFormat = 101, + OpImageQueryOrder = 102, + OpImageQuerySizeLod = 103, + OpImageQuerySize = 104, + OpImageQueryLod = 105, + OpImageQueryLevels = 106, + OpImageQuerySamples = 107, + OpConvertFToU = 109, + OpConvertFToS = 110, + OpConvertSToF = 111, + OpConvertUToF = 112, + OpUConvert = 113, + OpSConvert = 114, + OpFConvert = 115, + OpQuantizeToF16 = 116, + OpConvertPtrToU = 117, + OpSatConvertSToU = 118, + OpSatConvertUToS = 119, + OpConvertUToPtr = 120, + OpPtrCastToGeneric = 121, + OpGenericCastToPtr = 122, + OpGenericCastToPtrExplicit = 123, + OpBitcast = 124, + OpSNegate = 126, + OpFNegate = 127, + OpIAdd = 128, + OpFAdd = 129, + OpISub = 130, + OpFSub = 131, + OpIMul = 132, + OpFMul = 133, + OpUDiv = 134, + OpSDiv = 135, + OpFDiv = 136, + OpUMod = 137, + OpSRem = 138, + OpSMod = 139, + OpFRem = 140, + OpFMod = 141, + OpVectorTimesScalar = 142, + OpMatrixTimesScalar = 143, + OpVectorTimesMatrix = 144, + OpMatrixTimesVector = 145, + OpMatrixTimesMatrix = 146, + OpOuterProduct = 147, + OpDot = 148, + OpIAddCarry = 149, + OpISubBorrow = 150, + OpUMulExtended = 151, + OpSMulExtended = 152, + OpAny = 154, + OpAll = 155, + OpIsNan = 156, + OpIsInf = 157, + OpIsFinite = 158, + OpIsNormal = 159, + OpSignBitSet = 160, + OpLessOrGreater = 161, + OpOrdered = 162, + OpUnordered = 163, + OpLogicalEqual = 164, + OpLogicalNotEqual = 165, + OpLogicalOr = 166, + OpLogicalAnd = 167, + OpLogicalNot = 168, + OpSelect = 169, + OpIEqual = 170, + OpINotEqual = 171, + OpUGreaterThan = 172, + OpSGreaterThan = 173, + OpUGreaterThanEqual = 174, + OpSGreaterThanEqual = 175, + OpULessThan = 176, + OpSLessThan = 177, + OpULessThanEqual = 178, + OpSLessThanEqual = 179, + OpFOrdEqual = 180, + OpFUnordEqual = 181, + OpFOrdNotEqual = 182, + OpFUnordNotEqual = 183, + OpFOrdLessThan = 184, + OpFUnordLessThan = 185, + OpFOrdGreaterThan = 186, + OpFUnordGreaterThan = 187, + OpFOrdLessThanEqual = 188, + OpFUnordLessThanEqual = 189, + OpFOrdGreaterThanEqual = 190, + OpFUnordGreaterThanEqual = 191, + OpShiftRightLogical = 194, + OpShiftRightArithmetic = 195, + OpShiftLeftLogical = 196, + OpBitwiseOr = 197, + OpBitwiseXor = 198, + OpBitwiseAnd = 199, + OpNot = 200, + OpBitFieldInsert = 201, + OpBitFieldSExtract = 202, + OpBitFieldUExtract = 203, + OpBitReverse = 204, + OpBitCount = 205, + OpDPdx = 207, + OpDPdy = 208, + OpFwidth = 209, + OpDPdxFine = 210, + OpDPdyFine = 211, + OpFwidthFine = 212, + OpDPdxCoarse = 213, + OpDPdyCoarse = 214, + OpFwidthCoarse = 215, + OpEmitVertex = 218, + OpEndPrimitive = 219, + OpEmitStreamVertex = 220, + OpEndStreamPrimitive = 221, + OpControlBarrier = 224, + OpMemoryBarrier = 225, + OpAtomicLoad = 227, + OpAtomicStore = 228, + OpAtomicExchange = 229, + OpAtomicCompareExchange = 230, + OpAtomicCompareExchangeWeak = 231, + OpAtomicIIncrement = 232, + OpAtomicIDecrement = 233, + OpAtomicIAdd = 234, + OpAtomicISub = 235, + OpAtomicSMin = 236, + OpAtomicUMin = 237, + OpAtomicSMax = 238, + OpAtomicUMax = 239, + OpAtomicAnd = 240, + OpAtomicOr = 241, + OpAtomicXor = 242, + OpPhi = 245, + OpLoopMerge = 246, + OpSelectionMerge = 247, + OpLabel = 248, + OpBranch = 249, + OpBranchConditional = 250, + OpSwitch = 251, + OpKill = 252, + OpReturn = 253, + OpReturnValue = 254, + OpUnreachable = 255, + OpLifetimeStart = 256, + OpLifetimeStop = 257, + OpGroupAsyncCopy = 259, + OpGroupWaitEvents = 260, + OpGroupAll = 261, + OpGroupAny = 262, + OpGroupBroadcast = 263, + OpGroupIAdd = 264, + OpGroupFAdd = 265, + OpGroupFMin = 266, + OpGroupUMin = 267, + OpGroupSMin = 268, + OpGroupFMax = 269, + OpGroupUMax = 270, + OpGroupSMax = 271, + OpReadPipe = 274, + OpWritePipe = 275, + OpReservedReadPipe = 276, + OpReservedWritePipe = 277, + OpReserveReadPipePackets = 278, + OpReserveWritePipePackets = 279, + OpCommitReadPipe = 280, + OpCommitWritePipe = 281, + OpIsValidReserveId = 282, + OpGetNumPipePackets = 283, + OpGetMaxPipePackets = 284, + OpGroupReserveReadPipePackets = 285, + OpGroupReserveWritePipePackets = 286, + OpGroupCommitReadPipe = 287, + OpGroupCommitWritePipe = 288, + OpEnqueueMarker = 291, + OpEnqueueKernel = 292, + OpGetKernelNDrangeSubGroupCount = 293, + OpGetKernelNDrangeMaxSubGroupSize = 294, + OpGetKernelWorkGroupSize = 295, + OpGetKernelPreferredWorkGroupSizeMultiple = 296, + OpRetainEvent = 297, + OpReleaseEvent = 298, + OpCreateUserEvent = 299, + OpIsValidEvent = 300, + OpSetUserEventStatus = 301, + OpCaptureEventProfilingInfo = 302, + OpGetDefaultQueue = 303, + OpBuildNDRange = 304, + OpImageSparseSampleImplicitLod = 305, + OpImageSparseSampleExplicitLod = 306, + OpImageSparseSampleDrefImplicitLod = 307, + OpImageSparseSampleDrefExplicitLod = 308, + OpImageSparseSampleProjImplicitLod = 309, + OpImageSparseSampleProjExplicitLod = 310, + OpImageSparseSampleProjDrefImplicitLod = 311, + OpImageSparseSampleProjDrefExplicitLod = 312, + OpImageSparseFetch = 313, + OpImageSparseGather = 314, + OpImageSparseDrefGather = 315, + OpImageSparseTexelsResident = 316, + OpNoLine = 317, + OpAtomicFlagTestAndSet = 318, + OpAtomicFlagClear = 319, + OpImageSparseRead = 320, + OpSizeOf = 321, + OpTypePipeStorage = 322, + OpConstantPipeStorage = 323, + OpCreatePipeFromPipeStorage = 324, + OpGetKernelLocalSizeForSubgroupCount = 325, + OpGetKernelMaxNumSubgroups = 326, + OpTypeNamedBarrier = 327, + OpNamedBarrierInitialize = 328, + OpMemoryNamedBarrier = 329, + OpModuleProcessed = 330, + OpExecutionModeId = 331, + OpDecorateId = 332, + OpGroupNonUniformElect = 333, + OpGroupNonUniformAll = 334, + OpGroupNonUniformAny = 335, + OpGroupNonUniformAllEqual = 336, + OpGroupNonUniformBroadcast = 337, + OpGroupNonUniformBroadcastFirst = 338, + OpGroupNonUniformBallot = 339, + OpGroupNonUniformInverseBallot = 340, + OpGroupNonUniformBallotBitExtract = 341, + OpGroupNonUniformBallotBitCount = 342, + OpGroupNonUniformBallotFindLSB = 343, + OpGroupNonUniformBallotFindMSB = 344, + OpGroupNonUniformShuffle = 345, + OpGroupNonUniformShuffleXor = 346, + OpGroupNonUniformShuffleUp = 347, + OpGroupNonUniformShuffleDown = 348, + OpGroupNonUniformIAdd = 349, + OpGroupNonUniformFAdd = 350, + OpGroupNonUniformIMul = 351, + OpGroupNonUniformFMul = 352, + OpGroupNonUniformSMin = 353, + OpGroupNonUniformUMin = 354, + OpGroupNonUniformFMin = 355, + OpGroupNonUniformSMax = 356, + OpGroupNonUniformUMax = 357, + OpGroupNonUniformFMax = 358, + OpGroupNonUniformBitwiseAnd = 359, + OpGroupNonUniformBitwiseOr = 360, + OpGroupNonUniformBitwiseXor = 361, + OpGroupNonUniformLogicalAnd = 362, + OpGroupNonUniformLogicalOr = 363, + OpGroupNonUniformLogicalXor = 364, + OpGroupNonUniformQuadBroadcast = 365, + OpGroupNonUniformQuadSwap = 366, + OpCopyLogical = 400, + OpPtrEqual = 401, + OpPtrNotEqual = 402, + OpPtrDiff = 403, + OpTerminateInvocation = 4416, + OpSubgroupBallotKHR = 4421, + OpSubgroupFirstInvocationKHR = 4422, + OpSubgroupAllKHR = 4428, + OpSubgroupAnyKHR = 4429, + OpSubgroupAllEqualKHR = 4430, + OpSubgroupReadInvocationKHR = 4432, + OpTraceRayKHR = 4445, + OpExecuteCallableKHR = 4446, + OpConvertUToAccelerationStructureKHR = 4447, + OpIgnoreIntersectionKHR = 4448, + OpTerminateRayKHR = 4449, + OpTypeRayQueryKHR = 4472, + OpRayQueryInitializeKHR = 4473, + OpRayQueryTerminateKHR = 4474, + OpRayQueryGenerateIntersectionKHR = 4475, + OpRayQueryConfirmIntersectionKHR = 4476, + OpRayQueryProceedKHR = 4477, + OpRayQueryGetIntersectionTypeKHR = 4479, + OpGroupIAddNonUniformAMD = 5000, + OpGroupFAddNonUniformAMD = 5001, + OpGroupFMinNonUniformAMD = 5002, + OpGroupUMinNonUniformAMD = 5003, + OpGroupSMinNonUniformAMD = 5004, + OpGroupFMaxNonUniformAMD = 5005, + OpGroupUMaxNonUniformAMD = 5006, + OpGroupSMaxNonUniformAMD = 5007, + OpFragmentMaskFetchAMD = 5011, + OpFragmentFetchAMD = 5012, + OpReadClockKHR = 5056, + OpImageSampleFootprintNV = 5283, + OpGroupNonUniformPartitionNV = 5296, + OpWritePackedPrimitiveIndices4x8NV = 5299, + OpReportIntersectionKHR = 5334, + OpReportIntersectionNV = 5334, + OpIgnoreIntersectionNV = 5335, + OpTerminateRayNV = 5336, + OpTraceNV = 5337, + OpTypeAccelerationStructureKHR = 5341, + OpTypeAccelerationStructureNV = 5341, + OpExecuteCallableNV = 5344, + OpTypeCooperativeMatrixNV = 5358, + OpCooperativeMatrixLoadNV = 5359, + OpCooperativeMatrixStoreNV = 5360, + OpCooperativeMatrixMulAddNV = 5361, + OpCooperativeMatrixLengthNV = 5362, + OpBeginInvocationInterlockEXT = 5364, + OpEndInvocationInterlockEXT = 5365, + OpDemoteToHelperInvocationEXT = 5380, + OpIsHelperInvocationEXT = 5381, + OpSubgroupShuffleINTEL = 5571, + OpSubgroupShuffleDownINTEL = 5572, + OpSubgroupShuffleUpINTEL = 5573, + OpSubgroupShuffleXorINTEL = 5574, + OpSubgroupBlockReadINTEL = 5575, + OpSubgroupBlockWriteINTEL = 5576, + OpSubgroupImageBlockReadINTEL = 5577, + OpSubgroupImageBlockWriteINTEL = 5578, + OpSubgroupImageMediaBlockReadINTEL = 5580, + OpSubgroupImageMediaBlockWriteINTEL = 5581, + OpUCountLeadingZerosINTEL = 5585, + OpUCountTrailingZerosINTEL = 5586, + OpAbsISubINTEL = 5587, + OpAbsUSubINTEL = 5588, + OpIAddSatINTEL = 5589, + OpUAddSatINTEL = 5590, + OpIAverageINTEL = 5591, + OpUAverageINTEL = 5592, + OpIAverageRoundedINTEL = 5593, + OpUAverageRoundedINTEL = 5594, + OpISubSatINTEL = 5595, + OpUSubSatINTEL = 5596, + OpIMul32x16INTEL = 5597, + OpUMul32x16INTEL = 5598, + OpConstFunctionPointerINTEL = 5600, + OpFunctionPointerCallINTEL = 5601, + OpAsmTargetINTEL = 5609, + OpAsmINTEL = 5610, + OpAsmCallINTEL = 5611, + OpAtomicFMinEXT = 5614, + OpAtomicFMaxEXT = 5615, + OpDecorateString = 5632, + OpDecorateStringGOOGLE = 5632, + OpMemberDecorateString = 5633, + OpMemberDecorateStringGOOGLE = 5633, + OpVmeImageINTEL = 5699, + OpTypeVmeImageINTEL = 5700, + OpTypeAvcImePayloadINTEL = 5701, + OpTypeAvcRefPayloadINTEL = 5702, + OpTypeAvcSicPayloadINTEL = 5703, + OpTypeAvcMcePayloadINTEL = 5704, + OpTypeAvcMceResultINTEL = 5705, + OpTypeAvcImeResultINTEL = 5706, + OpTypeAvcImeResultSingleReferenceStreamoutINTEL = 5707, + OpTypeAvcImeResultDualReferenceStreamoutINTEL = 5708, + OpTypeAvcImeSingleReferenceStreaminINTEL = 5709, + OpTypeAvcImeDualReferenceStreaminINTEL = 5710, + OpTypeAvcRefResultINTEL = 5711, + OpTypeAvcSicResultINTEL = 5712, + OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL = 5713, + OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL = 5714, + OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL = 5715, + OpSubgroupAvcMceSetInterShapePenaltyINTEL = 5716, + OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL = 5717, + OpSubgroupAvcMceSetInterDirectionPenaltyINTEL = 5718, + OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL = 5719, + OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL = 5720, + OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL = 5721, + OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL = 5722, + OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL = 5723, + OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL = 5724, + OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL = 5725, + OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL = 5726, + OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL = 5727, + OpSubgroupAvcMceSetAcOnlyHaarINTEL = 5728, + OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL = 5729, + OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL = 5730, + OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL = 5731, + OpSubgroupAvcMceConvertToImePayloadINTEL = 5732, + OpSubgroupAvcMceConvertToImeResultINTEL = 5733, + OpSubgroupAvcMceConvertToRefPayloadINTEL = 5734, + OpSubgroupAvcMceConvertToRefResultINTEL = 5735, + OpSubgroupAvcMceConvertToSicPayloadINTEL = 5736, + OpSubgroupAvcMceConvertToSicResultINTEL = 5737, + OpSubgroupAvcMceGetMotionVectorsINTEL = 5738, + OpSubgroupAvcMceGetInterDistortionsINTEL = 5739, + OpSubgroupAvcMceGetBestInterDistortionsINTEL = 5740, + OpSubgroupAvcMceGetInterMajorShapeINTEL = 5741, + OpSubgroupAvcMceGetInterMinorShapeINTEL = 5742, + OpSubgroupAvcMceGetInterDirectionsINTEL = 5743, + OpSubgroupAvcMceGetInterMotionVectorCountINTEL = 5744, + OpSubgroupAvcMceGetInterReferenceIdsINTEL = 5745, + OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL = 5746, + OpSubgroupAvcImeInitializeINTEL = 5747, + OpSubgroupAvcImeSetSingleReferenceINTEL = 5748, + OpSubgroupAvcImeSetDualReferenceINTEL = 5749, + OpSubgroupAvcImeRefWindowSizeINTEL = 5750, + OpSubgroupAvcImeAdjustRefOffsetINTEL = 5751, + OpSubgroupAvcImeConvertToMcePayloadINTEL = 5752, + OpSubgroupAvcImeSetMaxMotionVectorCountINTEL = 5753, + OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL = 5754, + OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL = 5755, + OpSubgroupAvcImeSetWeightedSadINTEL = 5756, + OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL = 5757, + OpSubgroupAvcImeEvaluateWithDualReferenceINTEL = 5758, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL = 5759, + OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL = 5760, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL = 5761, + OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL = 5762, + OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL = 5763, + OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL = 5764, + OpSubgroupAvcImeConvertToMceResultINTEL = 5765, + OpSubgroupAvcImeGetSingleReferenceStreaminINTEL = 5766, + OpSubgroupAvcImeGetDualReferenceStreaminINTEL = 5767, + OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL = 5768, + OpSubgroupAvcImeStripDualReferenceStreamoutINTEL = 5769, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL = 5770, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL = 5771, + OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL = 5772, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL = 5773, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL = 5774, + OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL = 5775, + OpSubgroupAvcImeGetBorderReachedINTEL = 5776, + OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL = 5777, + OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL = 5778, + OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL = 5779, + OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL = 5780, + OpSubgroupAvcFmeInitializeINTEL = 5781, + OpSubgroupAvcBmeInitializeINTEL = 5782, + OpSubgroupAvcRefConvertToMcePayloadINTEL = 5783, + OpSubgroupAvcRefSetBidirectionalMixDisableINTEL = 5784, + OpSubgroupAvcRefSetBilinearFilterEnableINTEL = 5785, + OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL = 5786, + OpSubgroupAvcRefEvaluateWithDualReferenceINTEL = 5787, + OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL = 5788, + OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL = 5789, + OpSubgroupAvcRefConvertToMceResultINTEL = 5790, + OpSubgroupAvcSicInitializeINTEL = 5791, + OpSubgroupAvcSicConfigureSkcINTEL = 5792, + OpSubgroupAvcSicConfigureIpeLumaINTEL = 5793, + OpSubgroupAvcSicConfigureIpeLumaChromaINTEL = 5794, + OpSubgroupAvcSicGetMotionVectorMaskINTEL = 5795, + OpSubgroupAvcSicConvertToMcePayloadINTEL = 5796, + OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL = 5797, + OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL = 5798, + OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL = 5799, + OpSubgroupAvcSicSetBilinearFilterEnableINTEL = 5800, + OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL = 5801, + OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL = 5802, + OpSubgroupAvcSicEvaluateIpeINTEL = 5803, + OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL = 5804, + OpSubgroupAvcSicEvaluateWithDualReferenceINTEL = 5805, + OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL = 5806, + OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL = 5807, + OpSubgroupAvcSicConvertToMceResultINTEL = 5808, + OpSubgroupAvcSicGetIpeLumaShapeINTEL = 5809, + OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL = 5810, + OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL = 5811, + OpSubgroupAvcSicGetPackedIpeLumaModesINTEL = 5812, + OpSubgroupAvcSicGetIpeChromaModeINTEL = 5813, + OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL = 5814, + OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL = 5815, + OpSubgroupAvcSicGetInterRawSadsINTEL = 5816, + OpVariableLengthArrayINTEL = 5818, + OpSaveMemoryINTEL = 5819, + OpRestoreMemoryINTEL = 5820, + OpLoopControlINTEL = 5887, + OpPtrCastToCrossWorkgroupINTEL = 5934, + OpCrossWorkgroupCastToPtrINTEL = 5938, + OpReadPipeBlockingINTEL = 5946, + OpWritePipeBlockingINTEL = 5947, + OpFPGARegINTEL = 5949, + OpRayQueryGetRayTMinKHR = 6016, + OpRayQueryGetRayFlagsKHR = 6017, + OpRayQueryGetIntersectionTKHR = 6018, + OpRayQueryGetIntersectionInstanceCustomIndexKHR = 6019, + OpRayQueryGetIntersectionInstanceIdKHR = 6020, + OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR = 6021, + OpRayQueryGetIntersectionGeometryIndexKHR = 6022, + OpRayQueryGetIntersectionPrimitiveIndexKHR = 6023, + OpRayQueryGetIntersectionBarycentricsKHR = 6024, + OpRayQueryGetIntersectionFrontFaceKHR = 6025, + OpRayQueryGetIntersectionCandidateAABBOpaqueKHR = 6026, + OpRayQueryGetIntersectionObjectRayDirectionKHR = 6027, + OpRayQueryGetIntersectionObjectRayOriginKHR = 6028, + OpRayQueryGetWorldRayDirectionKHR = 6029, + OpRayQueryGetWorldRayOriginKHR = 6030, + OpRayQueryGetIntersectionObjectToWorldKHR = 6031, + OpRayQueryGetIntersectionWorldToObjectKHR = 6032, + OpAtomicFAddEXT = 6035, + OpTypeBufferSurfaceINTEL = 6086, + OpTypeStructContinuedINTEL = 6090, + OpConstantCompositeContinuedINTEL = 6091, + OpSpecConstantCompositeContinuedINTEL = 6092, +} + + diff --git a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/generate_language_headers.py b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/generate_language_headers.py new file mode 100755 index 000000000..c56780c1c --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/generate_language_headers.py @@ -0,0 +1,242 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2020 Google LLC +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and/or associated documentation files (the +# "Materials"), to deal in the Materials without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Materials, and to +# permit persons to whom the Materials are furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Materials. +# +# MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS +# KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS +# SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT +# https://www.khronos.org/registry/ +# +# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. + +"""Generates a C language headers from a SPIR-V JSON grammar file""" + +import errno +import json +import os.path +import re + +DEFAULT_COPYRIGHT="""Copyright (c) 2020 The Khronos Group Inc. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and/or associated documentation files (the +"Materials"), to deal in the Materials without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Materials, and to +permit persons to whom the Materials are furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Materials. + +MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS +KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS +SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT + https://www.khronos.org/registry/ + +THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +""".split('\n') + +def make_path_to_file(f): + """Makes all ancestor directories to the given file, if they + don't yet exist. + + Arguments: + f: The file whose ancestor directories are to be created. + """ + dir = os.path.dirname(os.path.abspath(f)) + try: + os.makedirs(dir) + except OSError as e: + if e.errno == errno.EEXIST and os.path.isdir(dir): + pass + else: + raise + +class ExtInstGrammar: + """The grammar for an extended instruction set""" + + def __init__(self, name, copyright, instructions, operand_kinds, version = None, revision = None): + self.name = name + self.copyright = copyright + self.instructions = instructions + self.operand_kinds = operand_kinds + self.version = version + self.revision = revision + + +class LangGenerator: + """A language-specific generator""" + + def __init__(self): + self.upper_case_initial = re.compile('^[A-Z]') + pass + + def comment_prefix(self): + return "" + + def namespace_prefix(self): + return "" + + def uses_guards(self): + return False + + def cpp_guard_preamble(self): + return "" + + def cpp_guard_postamble(self): + return "" + + def enum_value(self, prefix, name, value): + if self.upper_case_initial.match(name): + use_name = name + else: + use_name = '_' + name + + return " {}{} = {},".format(prefix, use_name, value) + + def generate(self, grammar): + """Returns a string that is the language-specific header for the given grammar""" + + parts = [] + if grammar.copyright: + parts.extend(["{}{}".format(self.comment_prefix(), f) for f in grammar.copyright]) + parts.append('') + + guard = 'SPIRV_UNIFIED1_{}_H_'.format(grammar.name) + if self.uses_guards: + parts.append('#ifndef {}'.format(guard)) + parts.append('#define {}'.format(guard)) + parts.append('') + + parts.append(self.cpp_guard_preamble()) + + if grammar.version: + parts.append(self.const_definition(grammar.name, 'Version', grammar.version)) + + if grammar.revision is not None: + parts.append(self.const_definition(grammar.name, 'Revision', grammar.revision)) + + parts.append('') + + if grammar.instructions: + parts.append(self.enum_prefix(grammar.name, 'Instructions')) + for inst in grammar.instructions: + parts.append(self.enum_value(grammar.name, inst['opname'], inst['opcode'])) + parts.append(self.enum_end(grammar.name, 'Instructions')) + parts.append('') + + if grammar.operand_kinds: + for kind in grammar.operand_kinds: + parts.append(self.enum_prefix(grammar.name, kind['kind'])) + for e in kind['enumerants']: + parts.append(self.enum_value(grammar.name, e['enumerant'], e['value'])) + parts.append(self.enum_end(grammar.name, kind['kind'])) + parts.append('') + + parts.append(self.cpp_guard_postamble()) + + if self.uses_guards: + parts.append('#endif // {}'.format(guard)) + + # Ensre the file ends in an end of line + parts.append('') + + return '\n'.join(parts) + + +class CLikeGenerator(LangGenerator): + def uses_guards(self): + return True + + def comment_prefix(self): + return "// " + + def const_definition(self, prefix, var, value): + # Use an anonymous enum. Don't use a static const int variable because + # that can bloat binary size. + return 'enum {0}{1}{2}{3} = {4},{1}{2}{3}_BitWidthPadding = 0x7fffffff{5};'.format( + '{', '\n ', prefix, var, value, '\n}') + + def enum_prefix(self, prefix, name): + return 'enum {}{} {}'.format(prefix, name, '{') + + def enum_end(self, prefix, enum): + return ' {}{}Max = 0x7fffffff\n{};\n'.format(prefix, enum, '}') + + def cpp_guard_preamble(self): + return '#ifdef __cplusplus\nextern "C" {\n#endif\n' + + def cpp_guard_postamble(self): + return '#ifdef __cplusplus\n}\n#endif\n' + + +class CGenerator(CLikeGenerator): + pass + + +def main(): + import argparse + parser = argparse.ArgumentParser(description='Generate language headers from a JSON grammar') + + parser.add_argument('--extinst-name', + type=str, required=True, + help='The name to use in tokens') + parser.add_argument('--extinst-grammar', metavar='', + type=str, required=True, + help='input JSON grammar file for extended instruction set') + parser.add_argument('--extinst-output-base', metavar='', + type=str, required=True, + help='Basename of the language-specific output file.') + args = parser.parse_args() + + with open(args.extinst_grammar) as json_file: + grammar_json = json.loads(json_file.read()) + if 'copyright' in grammar_json: + copyright = grammar_json['copyright'] + else: + copyright = DEFAULT_COPYRIGHT + if 'version' in grammar_json: + version = grammar_json['version'] + else: + version = 0 + if 'operand_kinds' in grammar_json: + operand_kinds = grammar_json['operand_kinds'] + else: + operand_kinds = [] + + grammar = ExtInstGrammar(name = args.extinst_name, + copyright = copyright, + instructions = grammar_json['instructions'], + operand_kinds = operand_kinds, + version = version, + revision = grammar_json['revision']) + make_path_to_file(args.extinst_output_base) + with open(args.extinst_output_base + '.h', 'w') as f: + f.write(CGenerator().generate(grammar)) + + +if __name__ == '__main__': + main() diff --git a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/makeExtinstHeaders.py b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/makeExtinstHeaders.py new file mode 100755 index 000000000..9359747cc --- /dev/null +++ b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/makeExtinstHeaders.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 +"""Generate C headers for certain extended instruction sets""" + +import subprocess +import os + +# Assume we are running from the tools/buildHeaders directory +os.chdir('../../include/spirv/unified1') + +def mk_extinst(name, grammar_file): + """Generate one C header from a grammar""" + script = '../../../tools/buildHeaders/bin/generate_language_headers.py' + subprocess.check_call(['python3', + script, + '--extinst-name=' + name, + '--extinst-grammar=' + grammar_file, + '--extinst-output-base=' + name]) + subprocess.check_call(['dos2unix', name + '.h']) + + +mk_extinst('DebugInfo', 'extinst.debuginfo.grammar.json') +mk_extinst('OpenCLDebugInfo100', 'extinst.opencl.debuginfo.100.grammar.json') +mk_extinst('AMD_gcn_shader', 'extinst.spv-amd-gcn-shader.grammar.json') +mk_extinst('AMD_shader_ballot', 'extinst.spv-amd-shader-ballot.grammar.json') +mk_extinst('AMD_shader_explicit_vertex_parameter', 'extinst.spv-amd-shader-explicit-vertex-parameter.grammar.json') +mk_extinst('AMD_shader_trinary_minmax', 'extinst.spv-amd-shader-trinary-minmax.grammar.json') +mk_extinst('NonSemanticDebugPrintf', 'extinst.nonsemantic.debugprintf.grammar.json') +mk_extinst('NonSemanticClspvReflection', 'extinst.nonsemantic.clspvreflection.grammar.json') diff --git a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/makeHeaders b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/makeHeaders index bf2c61515..0ca0b2f25 100755 --- a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/makeHeaders +++ b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/bin/makeHeaders @@ -1,5 +1,7 @@ #!/usr/bin/env bash +python3 bin/makeExtinstHeaders.py + cd ../../include/spirv/unified1 ../../../tools/buildHeaders/build/install/bin/buildSpvHeaders -H spirv.core.grammar.json -dos2unix spirv.* +dos2unix spirv.* SpirV.* spv.* diff --git a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/header.cpp b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/header.cpp index b8b227f3b..926905e9c 100755 --- a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/header.cpp +++ b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/header.cpp @@ -1,19 +1,19 @@ -// Copyright (c) 2014-2018 The Khronos Group Inc. -// +// Copyright (c) 2014-2020 The Khronos Group Inc. +// // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and/or associated documentation files (the "Materials"), // to deal in the Materials without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Materials, and to permit persons to whom the // Materials are furnished to do so, subject to the following conditions: -// +// // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Materials. -// +// // MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS // STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ -// +// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +// // THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL @@ -42,6 +42,7 @@ #include #include #include +#include #include "jsoncpp/dist/json/json.h" @@ -68,9 +69,9 @@ namespace { TPrinter(); static const int DocMagicNumber = 0x07230203; - static const int DocVersion = 0x00010300; - static const int DocRevision = 1; - #define DocRevisionString "1" + static const int DocVersion = 0x00010500; + static const int DocRevision = 4; + #define DocRevisionString "4" static const std::string DocCopyright; static const std::string DocComment1; static const std::string DocComment2; @@ -97,7 +98,8 @@ namespace { virtual void printEpilogue(std::ostream&) const { } virtual void printMeta(std::ostream&) const; virtual void printTypes(std::ostream&) const { } - + virtual void printHasResultType(std::ostream&) const { }; + virtual std::string escapeComment(const std::string& s) const; // Default printComments() uses these comment strings @@ -107,7 +109,7 @@ namespace { virtual std::string commentEOL(bool isLast) const { return ""; } typedef std::pair valpair_t; - + // for printing enum values virtual std::string enumBeg(const std::string&, enumStyle_t) const { return ""; } virtual std::string enumEnd(const std::string&, enumStyle_t, bool isLast = false) const { @@ -126,7 +128,7 @@ namespace { const char* fmt, bool isLast = false) const { return ""; } - + std::vector getSortedVals(const Json::Value&) const; virtual std::string indent(int count = 1) const { @@ -149,7 +151,7 @@ namespace { } void addComment(Json::Value& node, const std::string& str); - + Json::Value spvRoot; // JSON SPIR-V data }; @@ -167,7 +169,7 @@ namespace { } const std::string TPrinter::DocCopyright = - "Copyright (c) 2014-2018 The Khronos Group Inc.\n" + "Copyright (c) 2014-2020 The Khronos Group Inc.\n" "\n" "Permission is hereby granted, free of charge, to any person obtaining a copy\n" "of this software and/or associated documentation files (the \"Materials\"),\n" @@ -197,13 +199,16 @@ namespace { const std::string TPrinter::DocComment2 = "Enumeration tokens for SPIR-V, in various styles:\n" - " C, C++, C++11, JSON, Lua, Python\n" + " C, C++, C++11, JSON, Lua, Python, C#, D\n" "\n" "- C will have tokens with a \"Spv\" prefix, e.g.: SpvSourceLanguageGLSL\n" "- C++ will have tokens in the \"spv\" name space, e.g.: spv::SourceLanguageGLSL\n" "- C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL\n" "- Lua will use tables, e.g.: spv.SourceLanguage.GLSL\n" "- Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL']\n" + "- C# will use enum classes in the Specification class located in the \"Spv\" namespace,\n" + " e.g.: Spv.Specification.SourceLanguage.GLSL\n" + "- D will have tokens under the \"spv\" module, e.g: spv.SourceLanguage.GLSL\n" "\n" "Some tokens act like mask values, which can be OR'd together,\n" "while others are mutually exclusive. The mask-like ones have\n" @@ -290,7 +295,7 @@ namespace { { const int commentCount = spvRoot["spv"]["meta"]["Comment"].size(); int commentNum = 0; - + for (const auto& comment : spvRoot["spv"]["meta"]["Comment"]) { out << commentBeg(); @@ -322,7 +327,7 @@ namespace { void TPrinter::printDefs(std::ostream& out) const { const Json::Value& enums = spvRoot["spv"]["enum"]; - + for (auto opClass = enums.begin(); opClass != enums.end(); ++opClass) { const bool isMask = (*opClass)["Type"].asString() == "Bit"; const auto opName = (*opClass)["Name"].asString(); @@ -337,13 +342,13 @@ namespace { out << enumFmt(opPrefix, valpair_t(0, "MaskNone"), enumNoMask); const auto sorted = getSortedVals((*opClass)["Values"]); - - std::string maxEnum = maxEnumFmt(opName, valpair_t(0x7FFFFFFF, "Max"), enumHex); + + std::string maxEnum = maxEnumFmt(opName, valpair_t(0x7FFFFFFF, "Max"), enumHex); bool printMax = (style != enumMask && maxEnum.size() > 0); for (const auto& v : sorted) - out << enumFmt(opPrefix, v, style, !printMax && v.first == sorted.back().first); + out << enumFmt(opPrefix, v, style, !printMax && v.second == sorted.back().second); if (printMax) out << maxEnum; @@ -361,6 +366,7 @@ namespace { printTypes(out); printMeta(out); printDefs(out); + printHasResultType(out); printEpilogue(out); } @@ -390,7 +396,7 @@ namespace { } return newStr; } - + std::string fmtConstInt(unsigned val, const std::string& name, const char* fmt, bool isLast) const override { return indent(3) + '"' + name + "\": " + fmtNum("%d", val) + (isLast ? "\n" : ",\n"); @@ -475,7 +481,7 @@ namespace { } virtual void printEpilogue(std::ostream& out) const override { - out << "#endif // #ifndef spirv_" << headerGuardSuffix() << std::endl; + out << "#endif" << std::endl; } virtual void printTypes(std::ostream& out) const override { @@ -488,9 +494,48 @@ namespace { return std::string("static const unsigned int ") + pre() + name + " = " + fmtNum(fmt, val) + (isLast ? ";\n\n" : ";\n"); } - + virtual std::string pre() const { return ""; } // C name prefix virtual std::string headerGuardSuffix() const = 0; + + virtual std::string fmtEnumUse(const std::string& opPrefix, const std::string& name) const { return pre() + name; } + + virtual void printHasResultType(std::ostream& out) const + { + const Json::Value& enums = spvRoot["spv"]["enum"]; + + std::set seenValues; + + for (auto opClass = enums.begin(); opClass != enums.end(); ++opClass) { + const auto opName = (*opClass)["Name"].asString(); + if (opName != "Op") { + continue; + } + + out << "#ifdef SPV_ENABLE_UTILITY_CODE" << std::endl; + out << "inline void " << pre() << "HasResultAndType(" << pre() << opName << " opcode, bool *hasResult, bool *hasResultType) {" << std::endl; + out << " *hasResult = *hasResultType = false;" << std::endl; + out << " switch (opcode) {" << std::endl; + out << " default: /* unknown opcode */ break;" << std::endl; + + for (auto& inst : spv::InstructionDesc) { + + // Filter out duplicate enum values, which would break the switch statement. + // These are probably just extension enums promoted to core. + if (seenValues.find(inst.value) != seenValues.end()) { + continue; + } + seenValues.insert(inst.value); + + std::string name = inst.name; + out << " case " << fmtEnumUse("Op", name) << ": *hasResult = " << (inst.hasResult() ? "true" : "false") << "; *hasResultType = " << (inst.hasType() ? "true" : "false") << "; break;" << std::endl; + } + + out << " }" << std::endl; + out << "}" << std::endl; + out << "#endif /* SPV_ENABLE_UTILITY_CODE */" << std::endl << std::endl; + } + } }; // C printer @@ -542,19 +587,19 @@ namespace { if (isMask) { const auto typeName = opName + styleStr(enumMask); - + out << "inline " + typeName + " operator|(" + typeName + " a, " + typeName + " b) { return " + typeName + "(unsigned(a) | unsigned(b)); }\n"; } } out << "\n} // end namespace spv\n\n"; - TPrinterCBase::printEpilogue(out); + out << "#endif // #ifndef spirv_" << headerGuardSuffix() << std::endl; } std::string commentBOL() const override { return "// "; } - + virtual std::string enumBeg(const std::string& s, enumStyle_t style) const override { return std::string("enum ") + s + styleStr(style) + " {\n"; } @@ -597,6 +642,9 @@ namespace { return enumFmt(s, v, style, true); } + // Add type prefix for scoped enum + virtual std::string fmtEnumUse(const std::string& opPrefix, const std::string& name) const { return opPrefix + "::" + name; } + std::string headerGuardSuffix() const override { return "HPP"; } }; @@ -658,6 +706,76 @@ namespace { } }; + // C# printer + class TPrinterCSharp final : public TPrinter { + private: + std::string commentBOL() const override { return "// "; } + + void printPrologue(std::ostream& out) const override { + out << "namespace Spv\n{\n\n"; + out << indent() << "public static class Specification\n"; + out << indent() << "{\n"; + } + + void printEpilogue(std::ostream& out) const override { + out << indent() << "}\n"; + out << "}\n"; + } + + std::string enumBeg(const std::string& s, enumStyle_t style) const override { + return indent(2) + "public enum " + s + styleStr(style) + "\n" + indent(2) + "{\n"; + } + + std::string enumEnd(const std::string& s, enumStyle_t style, bool isLast) const override { + return indent(2) + "}" + + (isLast ? "\n" : "\n\n"); + } + + std::string enumFmt(const std::string& s, const valpair_t& v, + enumStyle_t style, bool isLast) const override { + return indent(3) + prependIfDigit(s, v.second) + " = " + fmtStyleVal(v.first, style) + ",\n"; + } + + std::string fmtConstInt(unsigned val, const std::string& name, + const char* fmt, bool isLast) const override { + return indent(2) + std::string("public const uint ") + name + + " = " + fmtNum(fmt, val) + (isLast ? ";\n\n" : ";\n"); + } + }; + + // D printer + class TPrinterD final : public TPrinter { + private: + std::string commentBeg() const override { return "/+\n"; } + std::string commentBOL() const override { return " + "; } + std::string commentEnd(bool isLast) const override { return " +/\n"; } + + void printPrologue(std::ostream& out) const override { + out << "module spv;\n\n"; + } + + void printEpilogue(std::ostream& out) const override { + } + + std::string enumBeg(const std::string& s, enumStyle_t style) const override { + return "enum " + s + styleStr(style) + " : uint\n{\n"; + } + + std::string enumEnd(const std::string& s, enumStyle_t style, bool isLast) const override { + return std::string("}\n\n"); + } + + std::string enumFmt(const std::string& s, const valpair_t& v, + enumStyle_t style, bool isLast) const override { + return indent() + prependIfDigit("_", v.second) + " = " + fmtStyleVal(v.first, style) + ",\n"; + } + + std::string fmtConstInt(unsigned val, const std::string& name, + const char* fmt, bool isLast) const override { + return std::string("enum uint ") + name + + " = " + fmtNum(fmt, val) + (isLast ? ";\n\n" : ";\n"); + } + }; + } // namespace namespace spv { @@ -672,6 +790,8 @@ namespace spv { langInfo.push_back(std::make_pair(ELangJSON, "spirv.json")); langInfo.push_back(std::make_pair(ELangLua, "spirv.lua")); langInfo.push_back(std::make_pair(ELangPython, "spirv.py")); + langInfo.push_back(std::make_pair(ELangCSharp, "spirv.cs")); + langInfo.push_back(std::make_pair(ELangD, "spv.d")); for (const auto& lang : langInfo) { std::ofstream out(lang.second, std::ios::out); @@ -697,6 +817,8 @@ namespace spv { case ELangJSON: p = TPrinterPtr(new TPrinterJSON); break; case ELangLua: p = TPrinterPtr(new TPrinterLua); break; case ELangPython: p = TPrinterPtr(new TPrinterPython); break; + case ELangCSharp: p = TPrinterPtr(new TPrinterCSharp); break; + case ELangD: p = TPrinterPtr(new TPrinterD); break; case ELangAll: PrintAllHeaders(); break; default: std::cerr << "Unknown language." << std::endl; diff --git a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/header.h b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/header.h index 5a0952db2..9c34b2112 100755 --- a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/header.h +++ b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/header.h @@ -1,19 +1,19 @@ -// Copyright (c) 2014-2018 The Khronos Group Inc. -// +// Copyright (c) 2014-2019 The Khronos Group Inc. +// // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and/or associated documentation files (the "Materials"), // to deal in the Materials without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Materials, and to permit persons to whom the // Materials are furnished to do so, subject to the following conditions: -// +// // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Materials. -// +// // MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS // STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ -// +// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +// // THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL @@ -41,6 +41,8 @@ namespace spv { ELangJSON, // JSON ELangLua, // Lua ELangPython, // Python + ELangCSharp, // CSharp + ELangD, // D ELangAll, // print headers in all languages to files }; diff --git a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/jsonToSpirv.cpp b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/jsonToSpirv.cpp index bb325661e..17d2ea47c 100755 --- a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/jsonToSpirv.cpp +++ b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/jsonToSpirv.cpp @@ -1,4 +1,4 @@ -// Copyright (c) 2014-2018 The Khronos Group Inc. +// Copyright (c) 2014-2020 The Khronos Group Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and/or associated documentation files (the "Materials"), @@ -25,8 +25,10 @@ #include #include #include +#include #include #include +#include #include #include @@ -40,6 +42,10 @@ namespace spv { // parameterization information. InstructionValues InstructionDesc; +// The ordered list (in printing order) of printing classes +// (specification subsections). +PrintingClasses InstructionPrintingClasses; + // Note: There is no entry for OperandOpcode. Use InstructionDesc instead. EnumDefinition OperandClassParams[OperandOpcode]; EnumValues SourceLanguageParams; @@ -56,6 +62,8 @@ EnumValues ImageChannelDataTypeParams; EnumValues ImageOperandsParams; EnumValues FPFastMathParams; EnumValues FPRoundingModeParams; +EnumValues FPDenormModeParams; +EnumValues FPOperationModeParams; EnumValues LinkageTypeParams; EnumValues DecorationParams; EnumValues BuiltInParams; @@ -72,6 +80,11 @@ EnumValues ScopeParams; EnumValues KernelEnqueueFlagsParams; EnumValues KernelProfilingInfoParams; EnumValues CapabilityParams; +EnumValues RayFlagsParams; +EnumValues RayQueryIntersectionParams; +EnumValues RayQueryCommittedIntersectionTypeParams; +EnumValues RayQueryCandidateIntersectionTypeParams; +EnumValues FragmentShadingRateParams; std::pair ReadFile(const std::string& path) { @@ -119,8 +132,7 @@ ClassOptionality ToOperandClassAndOptionality(const std::string& operandKind, co else if (quantifier == "?") return {OperandLiteralString, true}; else { - assert(0 && "this case should not exist"); - return {OperandNone, false}; + return {OperandOptionalLiteralStrings, false}; } } else if (operandKind == "PairLiteralIntegerIdRef") { // Used by OpSwitch in the grammar @@ -142,7 +154,7 @@ ClassOptionality ToOperandClassAndOptionality(const std::string& operandKind, co } else if (operandKind == "LiteralSpecConstantOpInteger") { type = OperandLiteralNumber; } else if (operandKind == "LiteralContextDependentNumber") { - type = OperandVariableLiterals; + type = OperandAnySizeLiteralNumber; } else if (operandKind == "SourceLanguage") { type = OperandSource; } else if (operandKind == "ExecutionModel") { @@ -169,6 +181,10 @@ ClassOptionality ToOperandClassAndOptionality(const std::string& operandKind, co type = OperandImageChannelDataType; } else if (operandKind == "FPRoundingMode") { type = OperandFPRoundingMode; + } else if (operandKind == "FPDenormMode") { + type = OperandFPDenormMode; + } else if (operandKind == "FPOperationMode") { + type = OperandFPOperationMode; } else if (operandKind == "LinkageType") { type = OperandLinkageType; } else if (operandKind == "AccessQualifier") { @@ -198,7 +214,17 @@ ClassOptionality ToOperandClassAndOptionality(const std::string& operandKind, co } else if (operandKind == "FunctionControl") { type = OperandFunction; } else if (operandKind == "MemoryAccess") { - type = OperandMemoryAccess; + type = OperandMemoryOperands; + } else if (operandKind == "RayFlags") { + type = OperandRayFlags; + } else if (operandKind == "RayQueryIntersection") { + type = OperandRayQueryIntersection; + } else if (operandKind == "RayQueryCommittedIntersectionType") { + type = OperandRayQueryCommittedIntersectionType; + } else if (operandKind == "RayQueryCandidateIntersectionType") { + type = OperandRayQueryCandidateIntersectionType; + } else if (operandKind == "FragmentShadingRate") { + type = OperandFragmentShadingRate; } if (type == OperandNone) { @@ -230,7 +256,7 @@ unsigned int NumberStringToBit(const std::string& str) return bit; } -void jsonToSpirv(const std::string& jsonPath) +void jsonToSpirv(const std::string& jsonPath, bool buildingHeaders) { // only do this once. static bool initialized = false; @@ -285,12 +311,55 @@ void jsonToSpirv(const std::string& jsonPath) return result; }; + // set up the printing classes + std::unordered_set tags; // short-lived local for error checking below + const Json::Value printingClasses = root["instruction_printing_class"]; + for (const auto& printingClass : printingClasses) { + if (printingClass["tag"].asString().size() > 0) + tags.insert(printingClass["tag"].asString()); // just for error checking + else + std::cerr << "Error: each instruction_printing_class requires a non-empty \"tag\"" << std::endl; + if (buildingHeaders || printingClass["tag"].asString() != "@exclude") { + InstructionPrintingClasses.push_back({printingClass["tag"].asString(), + printingClass["heading"].asString()}); + } + } + + // process the instructions const Json::Value insts = root["instructions"]; + unsigned maxOpcode = 0; + bool firstOpcode = true; for (const auto& inst : insts) { - const unsigned int opcode = inst["opcode"].asUInt(); + const auto printingClass = inst["class"].asString(); + if (printingClass.size() == 0) { + std::cerr << "Error: " << inst["opname"].asString() + << " requires a non-empty printing \"class\" tag" << std::endl; + } + if (!buildingHeaders && printingClass == "@exclude") + continue; + if (tags.find(printingClass) == tags.end()) { + std::cerr << "Error: " << inst["opname"].asString() + << " requires a \"class\" declared as a \"tag\" in \"instruction printing_class\"" + << std::endl; + } + const auto opcode = inst["opcode"].asUInt(); const std::string name = inst["opname"].asString(); + if (firstOpcode) { + maxOpcode = opcode; + firstOpcode = false; + } else { + if (maxOpcode > opcode) { + std::cerr << "Error: " << name + << " is out of order. It follows the instruction with opcode " << maxOpcode + << std::endl; + std::exit(1); + } else { + maxOpcode = opcode; + } + } EnumCaps caps = getCaps(inst); std::string version = inst["version"].asString(); + std::string lastVersion = inst["lastVersion"].asString(); Extensions exts = getExts(inst); OperandParameters operands; bool defResultId = false; @@ -306,9 +375,9 @@ void jsonToSpirv(const std::string& jsonPath) } InstructionDesc.emplace_back( std::move(EnumValue(opcode, name, - std::move(caps), std::move(version), std::move(exts), + std::move(caps), std::move(version), std::move(lastVersion), std::move(exts), std::move(operands))), - defTypeId, defResultId); + printingClass, defTypeId, defResultId); } // Specific additional context-dependent operands @@ -331,14 +400,30 @@ void jsonToSpirv(const std::string& jsonPath) return result; }; + unsigned maxValue = 0; + bool firstValue = true; for (const auto& enumerant : source["enumerants"]) { unsigned value; bool skip_zero_in_bitfield; std::tie(value, skip_zero_in_bitfield) = getValue(enumerant); if (skip_zero_in_bitfield) continue; + if (firstValue) { + maxValue = value; + firstValue = false; + } else { + if (maxValue > value) { + std::cerr << "Error: " << source["kind"] << " enumerant " << enumerant["enumerant"] + << " is out of order. It has value " << value + << " but follows the enumerant with value " << maxValue << std::endl; + std::exit(1); + } else { + maxValue = value; + } + } EnumCaps caps(getCaps(enumerant)); std::string version = enumerant["version"].asString(); + std::string lastVersion = enumerant["lastVersion"].asString(); Extensions exts(getExts(enumerant)); OperandParameters params; const Json::Value& paramsJson = enumerant["parameters"]; @@ -353,7 +438,7 @@ void jsonToSpirv(const std::string& jsonPath) } dest->emplace_back( value, enumerant["enumerant"].asString(), - std::move(caps), std::move(version), std::move(exts), std::move(params)); + std::move(caps), std::move(version), std::move(lastVersion), std::move(exts), std::move(params)); } }; @@ -404,6 +489,10 @@ void jsonToSpirv(const std::string& jsonPath) establishOperandClass(enumName, OperandFPFastMath, &FPFastMathParams, operandEnum, category); } else if (enumName == "FPRoundingMode") { establishOperandClass(enumName, OperandFPRoundingMode, &FPRoundingModeParams, operandEnum, category); + } else if (enumName == "FPDenormMode") { + establishOperandClass(enumName, OperandFPDenormMode, &FPDenormModeParams, operandEnum, category); + } else if (enumName == "FPOperationMode") { + establishOperandClass(enumName, OperandFPOperationMode, &FPOperationModeParams, operandEnum, category); } else if (enumName == "LinkageType") { establishOperandClass(enumName, OperandLinkageType, &LinkageTypeParams, operandEnum, category); } else if (enumName == "FunctionParameterAttribute") { @@ -421,7 +510,7 @@ void jsonToSpirv(const std::string& jsonPath) } else if (enumName == "Dim") { establishOperandClass(enumName, OperandDimensionality, &DimensionalityParams, operandEnum, category); } else if (enumName == "MemoryAccess") { - establishOperandClass(enumName, OperandMemoryAccess, &MemoryAccessParams, operandEnum, category); + establishOperandClass(enumName, OperandMemoryOperands, &MemoryAccessParams, operandEnum, category); } else if (enumName == "Scope") { establishOperandClass(enumName, OperandScope, &ScopeParams, operandEnum, category); } else if (enumName == "GroupOperation") { @@ -430,6 +519,16 @@ void jsonToSpirv(const std::string& jsonPath) establishOperandClass(enumName, OperandKernelEnqueueFlags, &KernelEnqueueFlagsParams, operandEnum, category); } else if (enumName == "KernelProfilingInfo") { establishOperandClass(enumName, OperandKernelProfilingInfo, &KernelProfilingInfoParams, operandEnum, category); + } else if (enumName == "RayFlags") { + establishOperandClass(enumName, OperandRayFlags, &RayFlagsParams, operandEnum, category); + } else if (enumName == "RayQueryIntersection") { + establishOperandClass(enumName, OperandRayQueryIntersection, &RayQueryIntersectionParams, operandEnum, category); + } else if (enumName == "RayQueryCommittedIntersectionType") { + establishOperandClass(enumName, OperandRayQueryCommittedIntersectionType, &RayQueryCommittedIntersectionTypeParams, operandEnum, category); + } else if (enumName == "RayQueryCandidateIntersectionType") { + establishOperandClass(enumName, OperandRayQueryCandidateIntersectionType, &RayQueryCandidateIntersectionTypeParams, operandEnum, category); + } else if (enumName == "FragmentShadingRate") { + establishOperandClass(enumName, OperandFragmentShadingRate, &FragmentShadingRateParams, operandEnum, category); } } } diff --git a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/jsonToSpirv.h b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/jsonToSpirv.h index 00a2f70d9..51aa763c0 100755 --- a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/jsonToSpirv.h +++ b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/jsonToSpirv.h @@ -1,4 +1,4 @@ -// Copyright (c) 2014-2018 The Khronos Group Inc. +// Copyright (c) 2014-2020 The Khronos Group Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and/or associated documentation files (the "Materials"), @@ -38,18 +38,22 @@ namespace spv { std::pair ReadFile(const std::string& path); // Fill in all the parameters -void jsonToSpirv(const std::string& jsonPath); +void jsonToSpirv(const std::string& jsonPath, bool buildingHeaders); // For parameterizing operands. +// The ordering here affects the printing order in the SPIR-V specification. +// Please add new operand classes at the end. enum OperandClass { OperandNone, OperandId, OperandVariableIds, OperandOptionalLiteral, OperandOptionalLiteralString, + OperandOptionalLiteralStrings, OperandVariableLiterals, OperandVariableIdLiteral, OperandVariableLiteralId, + OperandAnySizeLiteralNumber, OperandLiteralNumber, OperandLiteralString, OperandSource, @@ -76,18 +80,32 @@ enum OperandClass { OperandLoop, OperandFunction, OperandMemorySemantics, - OperandMemoryAccess, + OperandMemoryOperands, OperandScope, - OperandGroupOperation, + OperandGroupOperation, OperandKernelEnqueueFlags, OperandKernelProfilingInfo, OperandCapability, + OperandRayFlags, + OperandRayQueryIntersection, + OperandRayQueryCommittedIntersectionType, + OperandRayQueryCandidateIntersectionType, + OperandFragmentShadingRate, + OperandFPDenormMode, + OperandFPOperationMode, OperandOpcode, OperandCount }; +// For direct representation of the JSON grammar "instruction_printing_class". +struct PrintingClass { + std::string tag; + std::string heading; +}; +using PrintingClasses = std::vector; + // Any specific enum can have a set of capabilities that allow it: typedef std::vector EnumCaps; @@ -145,6 +163,12 @@ public: assert((where != end()) && "Could not find enum in the enum list"); return *where; } + // gets *all* entries for the value, including the first one + void gatherAliases(unsigned value, std::vector& aliases) { + std::for_each(begin(), end(), [&](EValue& e) { + if (value == e.value) + aliases.push_back(&e);}); + } // Returns the EValue with the given name. We assume uniqueness // by name. EValue& at(std::string name) { @@ -167,9 +191,11 @@ private: class EnumValue { public: EnumValue() : value(0), desc(nullptr) {} - EnumValue(unsigned int the_value, const std::string& the_name, EnumCaps&& the_caps, const std::string& the_version, - Extensions&& the_extensions, OperandParameters&& the_operands) : - value(the_value), name(the_name), capabilities(std::move(the_caps)), version(std::move(the_version)), + EnumValue(unsigned int the_value, const std::string& the_name, EnumCaps&& the_caps, + const std::string& the_firstVersion, const std::string& the_lastVersion, + Extensions&& the_extensions, OperandParameters&& the_operands) : + value(the_value), name(the_name), capabilities(std::move(the_caps)), + firstVersion(std::move(the_firstVersion)), lastVersion(std::move(the_lastVersion)), extensions(std::move(the_extensions)), operands(std::move(the_operands)), desc(nullptr) { } // For ValueEnum, the value from the JSON file. @@ -178,7 +204,8 @@ public: unsigned value; std::string name; EnumCaps capabilities; - std::string version; + std::string firstVersion; + std::string lastVersion; // A feature only be enabled by certain extensions. // An empty list means the feature does not require an extension. // Normally, only Capability enums are enabled by extension. In turn, @@ -228,28 +255,39 @@ public: // per OperandParameters above. class InstructionValue : public EnumValue { public: - InstructionValue(EnumValue&& e, bool has_type, bool has_result) + InstructionValue(EnumValue&& e, const std::string& printClass, bool has_type, bool has_result) : EnumValue(std::move(e)), + printingClass(printClass), opDesc("TBD"), - opClass(0), typePresent(has_type), - resultPresent(has_result) {} + resultPresent(has_result), + alias(this) { } + InstructionValue(const InstructionValue& v) + { + *this = v; + alias = this; + } bool hasResult() const { return resultPresent != 0; } bool hasType() const { return typePresent != 0; } + void setAlias(const InstructionValue& a) { alias = &a; } + const InstructionValue& getAlias() const { return *alias; } + bool isAlias() const { return alias != this; } + std::string printingClass; const char* opDesc; - int opClass; protected: int typePresent : 1; int resultPresent : 1; + const InstructionValue* alias; // correct only after discovering the aliases; otherwise points to this }; using InstructionValues = EnumValuesContainer; // Parameterization info for all instructions. extern InstructionValues InstructionDesc; +extern PrintingClasses InstructionPrintingClasses; // These hold definitions of the enumerants used for operands. // This is indexed by OperandClass, but not including OperandOpcode. diff --git a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/main.cpp b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/main.cpp index e146b39b4..7e5f7f808 100755 --- a/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/main.cpp +++ b/externals/sirit/externals/SPIRV-Headers/tools/buildHeaders/main.cpp @@ -1,19 +1,19 @@ -// Copyright (c) 2014-2018 The Khronos Group Inc. -// +// Copyright (c) 2014-2019 The Khronos Group Inc. +// // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and/or associated documentation files (the "Materials"), // to deal in the Materials without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Materials, and to permit persons to whom the // Materials are furnished to do so, subject to the following conditions: -// +// // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Materials. -// +// // MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS // STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ -// +// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +// // THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL @@ -50,6 +50,8 @@ void Usage() " JSON - JSON format data\n" " Lua - Lua module\n" " Python - Python module (also accepts Py)\n" + " C# - C# module (also accepts CSharp)\n" + " D - D module\n" " -H print header in all supported languages to files in current directory\n" ); } @@ -90,6 +92,10 @@ bool ProcessArguments(int argc, char* argv[]) Language = spv::ELangLua; } else if (language == "python" || language == "py") { Language = spv::ELangPython; + } else if (language == "c#" || language == "csharp") { + Language = spv::ELangCSharp; + } else if (language == "d") { + Language = spv::ELangD; } else return false; @@ -113,7 +119,7 @@ int main(int argc, char* argv[]) return 1; } - spv::jsonToSpirv(jsonPath); + spv::jsonToSpirv(jsonPath, (Options & EOptionPrintHeader) != 0); if (Options & EOptionPrintHeader) spv::PrintHeader(Language, std::cout); diff --git a/externals/sirit/include/sirit/sirit.h b/externals/sirit/include/sirit/sirit.h index 5d90b334b..3baa56f30 100755 --- a/externals/sirit/include/sirit/sirit.h +++ b/externals/sirit/include/sirit/sirit.h @@ -6,25 +6,39 @@ #pragma once +#include #include +#include #include #include +#include #include +#include +#include #include #include #include + #include namespace Sirit { constexpr std::uint32_t GENERATOR_MAGIC_NUMBER = 0; -class Op; +class Declarations; class Operand; +class Stream; using Literal = std::variant; -using Id = const Op*; + +struct Id { + std::uint32_t value; +}; + +[[nodiscard]] inline bool ValidId(Id id) noexcept { + return id.value != 0; +} class Module { public: @@ -39,6 +53,9 @@ public: */ std::vector Assemble() const; + /// Patches deferred phi nodes calling the passed function on each phi argument + void PatchDeferredPhi(const std::function& func); + /// Adds a SPIR-V extension. void AddExtension(std::string extension_name); @@ -49,24 +66,29 @@ public: void SetMemoryModel(spv::AddressingModel addressing_model_, spv::MemoryModel memory_model_); /// Adds an entry point. - void AddEntryPoint(spv::ExecutionModel execution_model, Id entry_point, std::string name, - const std::vector& interfaces = {}); + void AddEntryPoint(spv::ExecutionModel execution_model, Id entry_point, std::string_view name, + std::span interfaces = {}); /// Adds an entry point. + // TODO: Change std::is_convertible_v to std::convertible_to when compilers + // support it; same elsewhere. template - void AddEntryPoint(spv::ExecutionModel execution_model, Id entry_point, std::string name, - Ts&&... interfaces) { - AddEntryPoint(execution_model, std::move(entry_point), name, {interfaces...}); + requires(...&& std::is_convertible_v) void AddEntryPoint( + spv::ExecutionModel execution_model, Id entry_point, std::string_view name, + Ts&&... interfaces) { + AddEntryPoint(execution_model, std::move(entry_point), name, + std::span({interfaces...})); } /// Declare an execution mode for an entry point. void AddExecutionMode(Id entry_point, spv::ExecutionMode mode, - const std::vector& literals = {}); + std::span literals = {}); /// Declare an execution mode for an entry point. template - void AddExecutionMode(Id entry_point, spv::ExecutionMode mode, Ts&&... literals) { - AddExecutionMode(entry_point, mode, {literals...}); + requires(...&& std::is_convertible_v) void AddExecutionMode( + Id entry_point, spv::ExecutionMode mode, Ts&&... literals) { + AddExecutionMode(entry_point, mode, std::span({literals...})); } /** @@ -84,19 +106,13 @@ public: return AddLabel(OpLabel()); } - /** - * Adds a local variable to the code - * @param variable Variable to insert into code. - * @return Returns variable. - */ - Id AddLocalVariable(Id label); + /// Adds a local variable to the code + Id AddLocalVariable(Id result_type, spv::StorageClass storage_class, + std::optional initializer = std::nullopt); - /** - * Adds a global variable - * @param variable Global variable to add. - * @return Returns variable. - */ - Id AddGlobalVariable(Id variable); + /// Adds a global variable + Id AddGlobalVariable(Id result_type, spv::StorageClass storage_class, + std::optional initializer = std::nullopt); // Types @@ -121,7 +137,7 @@ public: /// Returns type image. Id TypeImage(Id sampled_type, spv::Dim dim, int depth, bool arrayed, bool ms, int sampled, spv::ImageFormat image_format, - std::optional access_qualifier = {}); + std::optional access_qualifier = std::nullopt); /// Returns type sampler. Id TypeSampler(); @@ -136,27 +152,28 @@ public: Id TypeRuntimeArray(Id element_type); /// Returns type struct. - Id TypeStruct(const std::vector& members = {}); + Id TypeStruct(std::span members = {}); /// Returns type struct. template - Id TypeStruct(Ts&&... members) { - return TypeStruct({members...}); + requires(...&& std::is_convertible_v) Id TypeStruct(Ts&&... members) { + return TypeStruct(std::span({members...})); } /// Returns type opaque. - Id TypeOpaque(std::string name); + Id TypeOpaque(std::string_view name); /// Returns type pointer. Id TypePointer(spv::StorageClass storage_class, Id type); /// Returns type function. - Id TypeFunction(Id return_type, const std::vector& arguments = {}); + Id TypeFunction(Id return_type, std::span arguments = {}); /// Returns type function. template - Id TypeFunction(Id return_type, Ts&&... arguments) { - return OpTypeFunction(return_type, {arguments...}); + requires(...&& std::is_convertible_v) Id + TypeFunction(Id return_type, Ts&&... arguments) { + return TypeFunction(return_type, std::span({arguments...})); } /// Returns type event. @@ -186,12 +203,13 @@ public: Id Constant(Id result_type, const Literal& literal); /// Returns a numeric scalar constant. - Id ConstantComposite(Id result_type, const std::vector& constituents); + Id ConstantComposite(Id result_type, std::span constituents); /// Returns a numeric scalar constant. template - Id ConstantComposite(Id result_type, Ts&&... constituents) { - return ConstantComposite(result_type, {constituents...}); + requires(...&& std::is_convertible_v) Id + ConstantComposite(Id result_type, Ts&&... constituents) { + return ConstantComposite(result_type, std::span({constituents...})); } /// Returns a sampler constant. @@ -207,28 +225,50 @@ public: Id OpFunction(Id result_type, spv::FunctionControlMask function_control, Id function_type); /// Ends a function. - Id OpFunctionEnd(); + void OpFunctionEnd(); /// Call a function. - Id OpFunctionCall(Id result_type, Id function, const std::vector& arguments = {}); + Id OpFunctionCall(Id result_type, Id function, std::span arguments = {}); /// Call a function. template - Id OpFunctionCall(Id result_type, Id function, Ts&&... arguments) { - return OpFunctionCall(result_type, function, {arguments...}); + requires(...&& std::is_convertible_v) Id + OpFunctionCall(Id result_type, Id function, Ts&&... arguments) { + return OpFunctionCall(result_type, function, std::span({arguments...})); } + /// Declare a formal parameter of the current function. + Id OpFunctionParameter(Id result_type); + // Flow + /** + * The SSA phi function. + * + * @param result_type The result type. + * @param operands An immutable span of variable, parent block pairs + */ + Id OpPhi(Id result_type, std::span operands); + + /** + * The SSA phi function. This instruction will be revisited when patching phi nodes. + * + * @param result_type The result type. + * @param blocks An immutable span of block pairs. + */ + Id DeferredOpPhi(Id result_type, std::span blocks); + /// Declare a structured loop. Id OpLoopMerge(Id merge_block, Id continue_target, spv::LoopControlMask loop_control, - const std::vector& literals = {}); + std::span literals = {}); /// Declare a structured loop. template - Id OpLoopMerge(Id merge_block, Id continue_target, spv::LoopControlMask loop_control, - Ts&&... literals) { - return OpLoopMerge(merge_block, continue_target, loop_control, {literals...}); + requires(...&& std::is_convertible_v) Id + OpLoopMerge(Id merge_block, Id continue_target, spv::LoopControlMask loop_control, + Ts&&... literals) { + return OpLoopMerge(merge_block, continue_target, loop_control, + std::span({literals...})); } /// Declare a structured selection. @@ -238,8 +278,8 @@ public: Id OpLabel(); /// The block label instruction: Any reference to a block is through this ref. - Id OpLabel(std::string label_name) { - return Name(OpLabel(), std::move(label_name)); + Id OpLabel(std::string_view label_name) { + return Name(OpLabel(), label_name); } /// Unconditional jump to label. @@ -251,55 +291,62 @@ public: std::uint32_t true_weight = 0, std::uint32_t false_weight = 0); /// Multi-way branch to one of the operand label. - Id OpSwitch(Id selector, Id default_label, const std::vector& literals, - const std::vector& labels); + Id OpSwitch(Id selector, Id default_label, std::span literals, + std::span labels); /// Returns with no value from a function with void return type. - Id OpReturn(); + void OpReturn(); + + /// Behavior is undefined if this instruction is executed. + void OpUnreachable(); /// Return a value from a function. Id OpReturnValue(Id value); /// Fragment-shader discard. - Id OpKill(); + void OpKill(); + + /// Demote fragment shader invocation to a helper invocation + void OpDemoteToHelperInvocationEXT(); // Debug /// Assign a name string to a reference. /// @return target - Id Name(Id target, std::string name); + Id Name(Id target, std::string_view name); /// Assign a name string to a member of a structure type. /// @return type - Id MemberName(Id type, std::uint32_t member, std::string name); + Id MemberName(Id type, std::uint32_t member, std::string_view name); /// Assign a Result to a string for use by other debug instructions. - Id String(std::string string); + Id String(std::string_view string); /// Add source-level location information Id OpLine(Id file, Literal line, Literal column); // Memory - /// Allocate an object in memory, resulting in a copy to it. - Id OpVariable(Id result_type, spv::StorageClass storage_class, Id initializer = nullptr); - - /// Form a pointer to a texel of an image. Use of such a pointer is limited to atomic operations. + /// Form a pointer to a texel of an image. Use of such a pointer is limited to atomic + /// operations. Id OpImageTexelPointer(Id result_type, Id image, Id coordinate, Id sample); /// Load through a pointer. - Id OpLoad(Id result_type, Id pointer, std::optional memory_access = {}); + Id OpLoad(Id result_type, Id pointer, + std::optional memory_access = std::nullopt); /// Store through a pointer. - Id OpStore(Id pointer, Id object, std::optional memory_access = {}); + Id OpStore(Id pointer, Id object, + std::optional memory_access = std::nullopt); /// Create a pointer into a composite object that can be used with OpLoad and OpStore. - Id OpAccessChain(Id result_type, Id base, const std::vector& indexes = {}); + Id OpAccessChain(Id result_type, Id base, std::span indexes = {}); /// Create a pointer into a composite object that can be used with OpLoad and OpStore. template - Id OpAccessChain(Id result_type, Id base, Ts&&... indexes) { - return OpAccessChain(result_type, base, {indexes...}); + requires(...&& std::is_convertible_v) Id + OpAccessChain(Id result_type, Id base, Ts&&... indexes) { + return OpAccessChain(result_type, base, std::span({indexes...})); } /// Extract a single, dynamically selected, component of a vector. @@ -310,50 +357,67 @@ public: /// Make a copy of a composite object, while modifying one part of it. Id OpCompositeInsert(Id result_type, Id object, Id composite, - const std::vector& indexes = {}); + std::span indexes = {}); /// Make a copy of a composite object, while modifying one part of it. template - Id OpCompositeInsert(Id result_type, Id object, Id composite, Ts&&... indexes) { - return OpCompositeInsert(result_type, object, composite, {indexes...}); + requires(...&& std::is_convertible_v) Id + OpCompositeInsert(Id result_type, Id object, Id composite, Ts&&... indexes) { + const Literal stack_indexes[] = {std::forward(indexes)...}; + return OpCompositeInsert(result_type, object, composite, + std::span{stack_indexes}); } /// Extract a part of a composite object. - Id OpCompositeExtract(Id result_type, Id composite, const std::vector& indexes = {}); + Id OpCompositeExtract(Id result_type, Id composite, std::span indexes = {}); /// Extract a part of a composite object. template - Id OpCompositeExtract(Id result_type, Id composite, Ts&&... indexes) { - return OpCompositeExtract(result_type, composite, {indexes...}); + requires(...&& std::is_convertible_v) Id + OpCompositeExtract(Id result_type, Id composite, Ts&&... indexes) { + const Literal stack_indexes[] = {std::forward(indexes)...}; + return OpCompositeExtract(result_type, composite, std::span{stack_indexes}); } /// Construct a new composite object from a set of constituent objects that will fully form it. - Id OpCompositeConstruct(Id result_type, const std::vector& ids); + Id OpCompositeConstruct(Id result_type, std::span ids); /// Construct a new composite object from a set of constituent objects that will fully form it. template - Id OpCompositeConstruct(Id result_type, Ts&&... ids) { - return OpCompositeConstruct(result_type, {ids...}); + requires(...&& std::is_convertible_v) Id + OpCompositeConstruct(Id result_type, Ts&&... ids) { + return OpCompositeConstruct(result_type, std::span({ids...})); } // Annotation /// Add a decoration to target. - Id Decorate(Id target, spv::Decoration decoration, const std::vector& literals = {}); + Id Decorate(Id target, spv::Decoration decoration, std::span literals = {}); /// Add a decoration to target. template - Id Decorate(Id target, spv::Decoration decoration, Ts&&... literals) { - return Decorate(target, decoration, {literals...}); + requires(...&& std::is_convertible_v) Id + Decorate(Id target, spv::Decoration decoration, Ts&&... literals) { + const Literal stack_literals[] = {std::forward(literals)...}; + return Decorate(target, decoration, std::span{stack_literals}); + } + + /// Add a decoration to target. + template + requires std::is_enum_v Id Decorate(Id target, spv::Decoration decoration, T literal) { + return Decorate(target, decoration, static_cast(literal)); } Id MemberDecorate(Id structure_type, Literal member, spv::Decoration decoration, - const std::vector& literals = {}); + std::span literals = {}); template - Id MemberDecorate(Id structure_type, Literal member, spv::Decoration decoration, - Ts&&... literals) { - return MemberDecorate(structure_type, member, decoration, {literals...}); + requires(...&& std::is_convertible_v) Id + MemberDecorate(Id structure_type, Literal member, spv::Decoration decoration, + Ts&&... literals) { + const Literal stack_literals[] = {std::forward(literals)...}; + return MemberDecorate(structure_type, member, decoration, + std::span{stack_literals}); } // Misc @@ -362,10 +426,17 @@ public: Id OpUndef(Id result_type); /// Emits the current values of all output variables to the current output primitive. - Id OpEmitVertex(); + void OpEmitVertex(); /// Finish the current primitive and start a new one. No vertex is emitted. - Id OpEndPrimitive(); + void OpEndPrimitive(); + + /// Emits the current values of all output variables to the current output primitive. After + /// execution, the values of all output variables are undefined. + void OpEmitStreamVertex(Id stream); + + /// Finish the current primitive and start a new one. No vertex is emitted. + void OpEndStreamPrimitive(Id stream); // Barrier @@ -569,7 +640,7 @@ public: /// Integer substraction of Operand 1 and Operand 2. Id OpISub(Id result_type, Id operand_1, Id operand_2); - /// Floating-point substraction of Operand 1 and Operand 2. + /// Floating-point subtraction of Operand 1 and Operand 2. Id OpFSub(Id result_type, Id operand_1, Id operand_2); /// Integer multiplication of Operand 1 and Operand 2. @@ -608,13 +679,13 @@ public: // Extensions /// Execute an instruction in an imported set of extended instructions. - Id OpExtInst(Id result_type, Id set, std::uint32_t instruction, - const std::vector& operands); + Id OpExtInst(Id result_type, Id set, std::uint32_t instruction, std::span operands); /// Execute an instruction in an imported set of extended instructions. template - Id OpExtInst(Id result_type, Id set, std::uint32_t instruction, Ts&&... operands) { - return OpExtInst(result_type, set, instruction, {operands...}); + requires(...&& std::is_convertible_v) Id + OpExtInst(Id result_type, Id set, std::uint32_t instruction, Ts&&... operands) { + return OpExtInst(result_type, set, instruction, std::span({operands...})); } /// Result is x if x >= 0; otherwise result is -x. @@ -756,6 +827,52 @@ public: /// of the pixel specified by offset. Id OpInterpolateAtOffset(Id result_type, Id interpolant, Id offset); + // Derivatives + + /// Same result as either OpDPdxFine or OpDPdxCoarse on the input. + /// Selection of which one is based on external factors. + Id OpDPdx(Id result_type, Id operand); + + /// Same result as either OpDPdyFine or OpDPdyCoarse on the input. + /// Selection of which one is based on external factors. + Id OpDPdy(Id result_type, Id operand); + + /// Result is the same as computing the sum of the absolute values of OpDPdx and OpDPdy + /// on the input. + Id OpFwidth(Id result_type, Id operand); + + /// Result is the partial derivative of the input with respect to the window x coordinate. + /// Uses local differencing based on the value of the input for the current fragment and + /// its immediate neighbor(s). + Id OpDPdxFine(Id result_type, Id operand); + + /// Result is the partial derivative of the input with respect to the window y coordinate. + /// Uses local differencing based on the value of the input for the current fragment and + /// its immediate neighbor(s). + Id OpDPdyFine(Id result_type, Id operand); + + /// Result is the same as computing the sum of the absolute values of OpDPdxFine and OpDPdyFine + /// on the input. + Id OpFwidthFine(Id result_type, Id operand); + + /// Result is the partial derivative of the input with respect to the window x coordinate. + /// Uses local differencing based on the value of the input for the current fragment's + /// neighbors, and possibly, but not necessarily, includes the value of the input for the + /// current fragment. That is, over a given area, the implementation can compute x derivatives + /// in fewer unique locations than would be allowed for OpDPdxFine. + Id OpDPdxCoarse(Id result_type, Id operand); + + /// Result is the partial derivative of the input with respect to the window y coordinate. + /// Uses local differencing based on the value of the input for the current fragment's + /// neighbors, and possibly, but not necessarily, includes the value of the input for the + /// current fragment. That is, over a given area, the implementation can compute y derivatives + /// in fewer unique locations than would be allowed for OpDPdyFine. + Id OpDPdyCoarse(Id result_type, Id operand); + + /// Result is the same as computing the sum of the absolute values of OpDPdxCoarse and + /// OpDPdyCoarse on the input. + Id OpFwidthCoarse(Id result_type, Id operand); + // Image /// Create a sampled image, containing both a sampler and an image. @@ -763,172 +880,191 @@ public: /// Sample an image with an implicit level of detail. Id OpImageSampleImplicitLod(Id result_type, Id sampled_image, Id coordinate, - std::optional image_operands = {}, - const std::vector& operands = {}); + std::optional image_operands = std::nullopt, + std::span operands = {}); /// Sample an image with an implicit level of detail. template - Id OpImageSampleImplicitLod(Id result_type, Id sampled_image, Id coordinate, - spv::ImageOperandsMask image_operands, Ts&&... operands) { + requires(...&& std::is_convertible_v) Id + OpImageSampleImplicitLod(Id result_type, Id sampled_image, Id coordinate, + spv::ImageOperandsMask image_operands, Ts&&... operands) { return OpImageSampleImplicitLod(result_type, sampled_image, coordinate, image_operands, - {operands...}); + std::span({operands...})); } /// Sample an image using an explicit level of detail. Id OpImageSampleExplicitLod(Id result_type, Id sampled_image, Id coordinate, spv::ImageOperandsMask image_operands, - const std::vector& operands = {}); + std::span operands = {}); /// Sample an image using an explicit level of detail. template - Id OpImageSampleExplicitLod(Id result_type, Id sampled_image, Id coordinate, - spv::ImageOperandsMask image_operands, Ts&&... operands) { + requires(...&& std::is_convertible_v) Id + OpImageSampleExplicitLod(Id result_type, Id sampled_image, Id coordinate, + spv::ImageOperandsMask image_operands, Ts&&... operands) { return OpImageSampleExplicitLod(result_type, sampled_image, coordinate, image_operands, - {operands...}); + std::span({operands...})); } /// Sample an image doing depth-comparison with an implicit level of detail. - Id OpImageSampleDrefImplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, - std::optional image_operands = {}, - const std::vector& operands = {}); + Id OpImageSampleDrefImplicitLod( + Id result_type, Id sampled_image, Id coordinate, Id dref, + std::optional image_operands = std::nullopt, + std::span operands = {}); /// Sample an image doing depth-comparison with an implicit level of detail. template - Id OpImageSampleDrefImplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, - spv::ImageOperandsMask image_operands, Ts&&... operands) { + requires(...&& std::is_convertible_v) Id + OpImageSampleDrefImplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, + spv::ImageOperandsMask image_operands, Ts&&... operands) { return OpImageSampleDrefImplicitLod(result_type, sampled_image, coordinate, dref, - image_operands, {operands...}); + image_operands, std::span({operands...})); } /// Sample an image doing depth-comparison using an explicit level of detail. Id OpImageSampleDrefExplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, spv::ImageOperandsMask image_operands, - const std::vector& operands = {}); + std::span operands = {}); /// Sample an image doing depth-comparison using an explicit level of detail. template - Id OpImageSampleDrefExplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, - spv::ImageOperandsMask image_operands, Ts&&... operands) { + requires(...&& std::is_convertible_v) Id + OpImageSampleDrefExplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, + spv::ImageOperandsMask image_operands, Ts&&... operands) { return OpImageSampleDrefExplicitLod(result_type, sampled_image, coordinate, dref, - image_operands, {operands...}); + image_operands, std::span({operands...})); } /// Sample an image with with a project coordinate and an implicit level of detail. - Id OpImageSampleProjImplicitLod(Id result_type, Id sampled_image, Id coordinate, - std::optional image_operands = {}, - const std::vector& operands = {}); + Id OpImageSampleProjImplicitLod( + Id result_type, Id sampled_image, Id coordinate, + std::optional image_operands = std::nullopt, + std::span operands = {}); /// Sample an image with with a project coordinate and an implicit level of detail. template - Id OpImageSampleProjImplicitLod(Id result_type, Id sampled_image, Id coordinate, - spv::ImageOperandsMask image_operands, Ts&&... operands) { + requires(...&& std::is_convertible_v) Id + OpImageSampleProjImplicitLod(Id result_type, Id sampled_image, Id coordinate, + spv::ImageOperandsMask image_operands, Ts&&... operands) { return OpImageSampleProjImplicitLod(result_type, sampled_image, coordinate, image_operands, - {operands...}); + std::span({operands...})); } /// Sample an image with a project coordinate using an explicit level of detail. Id OpImageSampleProjExplicitLod(Id result_type, Id sampled_image, Id coordinate, spv::ImageOperandsMask image_operands, - const std::vector& operands = {}); + std::span operands = {}); /// Sample an image with a project coordinate using an explicit level of detail. template - Id OpImageSampleProjExplicitLod(Id result_type, Id sampled_image, Id coordinate, - spv::ImageOperandsMask image_operands, Ts&&... operands) { + requires(...&& std::is_convertible_v) Id + OpImageSampleProjExplicitLod(Id result_type, Id sampled_image, Id coordinate, + spv::ImageOperandsMask image_operands, Ts&&... operands) { return OpImageSampleProjExplicitLod(result_type, sampled_image, coordinate, image_operands, - {operands...}); + std::span({operands...})); } /// Sample an image with a project coordinate, doing depth-comparison, with an implicit level of /// detail. - Id OpImageSampleProjDrefImplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, - std::optional image_operands = {}, - const std::vector& operands = {}); + Id OpImageSampleProjDrefImplicitLod( + Id result_type, Id sampled_image, Id coordinate, Id dref, + std::optional image_operands = std::nullopt, + std::span operands = {}); /// Sample an image with a project coordinate, doing depth-comparison, with an implicit level of /// detail. template - Id OpImageSampleProjDrefImplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, - spv::ImageOperandsMask image_operands, Ts&&... operands) { + requires(...&& std::is_convertible_v) Id + OpImageSampleProjDrefImplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, + spv::ImageOperandsMask image_operands, Ts&&... operands) { return OpImageSampleProjDrefImplicitLod(result_type, sampled_image, coordinate, dref, - image_operands, {operands...}); + image_operands, std::span({operands...})); } /// Sample an image with a project coordinate, doing depth-comparison, using an explicit level /// of detail. Id OpImageSampleProjDrefExplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, spv::ImageOperandsMask image_operands, - const std::vector& operands = {}); + std::span operands = {}); /// Sample an image with a project coordinate, doing depth-comparison, using an explicit level /// of detail. template - Id OpImageSampleProjDrefExplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, - spv::ImageOperandsMask image_operands, Ts&&... operands) { + requires(...&& std::is_convertible_v) Id + OpImageSampleProjDrefExplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, + spv::ImageOperandsMask image_operands, Ts&&... operands) { return OpImageSampleProjDrefExplicitLod(result_type, sampled_image, coordinate, dref, - image_operands, {operands...}); + image_operands, std::span({operands...})); } /// Fetch a single texel from an image whose Sampled operand is 1. Id OpImageFetch(Id result_type, Id sampled_image, Id coordinate, - std::optional image_operands = {}, - const std::vector& operands = {}); + std::optional image_operands = std::nullopt, + std::span operands = {}); /// Fetch a single texel from an image whose Sampled operand is 1. template - Id OpImageFetch(Id result_type, Id sampled_image, Id coordinate, - spv::ImageOperandsMask image_operands, Ts&&... operands) { - return OpImageFetch(result_type, sampled_image, coordinate, image_operands, {operands...}); - } - - /// Gathers the requested component from four texels. - Id OpImageGather(Id result_type, Id sampled_image, Id coordinate, Id component, - std::optional image_operands = {}, - const std::vector& operands = {}); - - /// Gathers the requested component from four texels. - template - Id OpImageGather(Id result_type, Id sampled_image, Id coordinate, Id component, + requires(...&& std::is_convertible_v) Id + OpImageFetch(Id result_type, Id sampled_image, Id coordinate, spv::ImageOperandsMask image_operands, Ts&&... operands) { + return OpImageFetch(result_type, sampled_image, coordinate, image_operands, + std::span({operands...})); + } + + /// Gathers the requested component from four texels. + Id OpImageGather(Id result_type, Id sampled_image, Id coordinate, Id component, + std::optional image_operands = std::nullopt, + std::span operands = {}); + + /// Gathers the requested component from four texels. + template + requires(...&& std::is_convertible_v) Id + OpImageGather(Id result_type, Id sampled_image, Id coordinate, Id component, + spv::ImageOperandsMask image_operands, Ts&&... operands) { return OpImageGather(result_type, sampled_image, coordinate, component, image_operands, - {operands...}); + std::span({operands...})); } /// Gathers the requested depth-comparison from four texels. Id OpImageDrefGather(Id result_type, Id sampled_image, Id coordinate, Id dref, - std::optional image_operands = {}, - const std::vector& operands = {}); + std::optional image_operands = std::nullopt, + std::span operands = {}); /// Gathers the requested depth-comparison from four texels. template - Id OpImageDrefGather(Id result_type, Id sampled_image, Id coordinate, Id dref, - spv::ImageOperandsMask image_operands, Ts&&... operands) { + requires(...&& std::is_convertible_v) Id + OpImageDrefGather(Id result_type, Id sampled_image, Id coordinate, Id dref, + spv::ImageOperandsMask image_operands, Ts&&... operands) { return OpImageDrefGather(result_type, sampled_image, coordinate, dref, image_operands, - {operands...}); + std::span({operands...})); } /// Read a texel from an image without a sampler. Id OpImageRead(Id result_type, Id sampled_image, Id coordinate, - std::optional image_operands = {}, - const std::vector& operands = {}); + std::optional image_operands = std::nullopt, + std::span operands = {}); /// Read a texel from an image without a sampler. template - Id OpImageRead(Id result_type, Id sampled_image, Id coordinate, - spv::ImageOperandsMask image_operands, Ts&&... operands) { - return OpImageRead(result_type, sampled_image, coordinate, image_operands, {operands...}); + requires(...&& std::is_convertible_v) Id + OpImageRead(Id result_type, Id sampled_image, Id coordinate, + spv::ImageOperandsMask image_operands, Ts&&... operands) { + return OpImageRead(result_type, sampled_image, coordinate, image_operands, + std::span({operands...})); } /// Write a texel to an image without a sampler. Id OpImageWrite(Id image, Id coordinate, Id texel, - std::optional image_operands = {}, - const std::vector& operands = {}); + std::optional image_operands = std::nullopt, + std::span operands = {}); /// Write a texel to an image without a sampler. template - Id OpImageWrite(Id image, Id coordinate, Id texel, spv::ImageOperandsMask image_operands, - Ts&&... operands) { - return OpImageWrite(image, coordinate, texel, image_operands, {operands...}); + requires(...&& std::is_convertible_v) Id + OpImageWrite(Id image, Id coordinate, Id texel, spv::ImageOperandsMask image_operands, + Ts&&... operands) { + return OpImageWrite(image, coordinate, texel, image_operands, + std::span({operands...})); } /// Extract the image from a sampled image. @@ -950,6 +1086,50 @@ public: /// Query the number of samples available per texel fetch in a multisample image. Id OpImageQuerySamples(Id result_type, Id image); + /// Sample a sparse image with an implicit level of detail. + Id OpImageSparseSampleImplicitLod(Id result_type, Id sampled_image, Id coordinate, + std::optional image_operands, + std::span operands); + + /// Sample a sparse image using an explicit level of detail. + Id OpImageSparseSampleExplicitLod(Id result_type, Id sampled_image, Id coordinate, + spv::ImageOperandsMask image_operands, + std::span operands); + + /// Sample a sparse image doing depth-comparison with an implicit level of detail. + Id OpImageSparseSampleDrefImplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, + std::optional image_operands, + std::span operands); + + /// Sample a sparse image doing depth-comparison using an explicit level of detail. + Id OpImageSparseSampleDrefExplicitLod(Id result_type, Id sampled_image, Id coordinate, Id dref, + spv::ImageOperandsMask image_operands, + std::span operands); + + /// Fetch a single texel from a sampled sparse image. + Id OpImageSparseFetch(Id result_type, Id image, Id coordinate, + std::optional image_operands, + std::span operands); + + /// Gathers the requested component from four texels of a sparse image. + Id OpImageSparseGather(Id result_type, Id sampled_image, Id coordinate, Id component, + std::optional image_operands, + std::span operands); + + /// Gathers the requested depth-comparison from four texels of a sparse image. + Id OpImageSparseDrefGather(Id result_type, Id sampled_image, Id coordinate, Id dref, + std::optional image_operands, + std::span operands); + + /// Translates a Resident Code into a Boolean. Result is false if any of the texels were in + /// uncommitted texture memory, and true otherwise. + Id OpImageSparseTexelsResident(Id result_type, Id resident_code); + + /// Read a texel from a sparse image without a sampler. + Id OpImageSparseRead(Id result_type, Id image, Id coordinate, + std::optional image_operands, + std::span operands); + // Group /// Computes a bitfield value combining the Predicate value from all invocations in the current @@ -972,7 +1152,29 @@ public: /// Return the value of the invocation identified by the current invocation's id within the /// group xor'ed with mask. - Id OpGroupNonUniformShuffleXor(Id result_type, spv::Scope scope, Id value, Id mask); + Id OpGroupNonUniformShuffleXor(Id result_type, Id scope, Id value, Id mask); + + /// Evaluates a predicate for all active invocations in the group, resulting in + /// true if predicate evaluates to true for all active invocations in the + /// group, otherwise the result is false. + Id OpGroupNonUniformAll(Id result_type, Id scope, Id predicate); + + /// Evaluates a predicate for all active invocations in the group, + /// resulting in true if predicate evaluates to true for any active + /// invocation in the group, otherwise the result is false. + Id OpGroupNonUniformAny(Id result_type, Id scope, Id predicate); + + /// Evaluates a value for all active invocations in the group. The result + /// is true if Value is equal for all active invocations in the group. + /// Otherwise, the result is false. + Id OpGroupNonUniformAllEqual(Id result_type, Id scope, Id value); + + /// Result is a bitfield value combining the Predicate value from all + /// invocations in the group that execute the same dynamic instance of this + /// instruction. The bit is set to one if the corresponding invocation is + /// active and the Predicate for that invocation evaluated to true; + /// otherwise, it is set to zero. + Id OpGroupNonUniformBallot(Id result_type, Id scope, Id predicate); // Atomic @@ -1077,38 +1279,27 @@ public: Id OpAtomicXor(Id result_type, Id pointer, Id memory, Id semantics, Id value); private: - Id AddCode(std::unique_ptr op); - - Id AddCode(spv::Op opcode, std::optional id = {}); - - Id AddDeclaration(std::unique_ptr op); - - void AddAnnotation(std::unique_ptr op); - Id GetGLSLstd450(); std::uint32_t version{}; - std::uint32_t bound{1}; + std::uint32_t bound{}; std::unordered_set extensions; std::unordered_set capabilities; - std::unordered_set> ext_inst_import; - std::unique_ptr glsl_std_450; + std::optional glsl_std_450; spv::AddressingModel addressing_model{spv::AddressingModel::Logical}; spv::MemoryModel memory_model{spv::MemoryModel::GLSL450}; - std::vector> entry_points; - std::vector> execution_modes; - std::vector> debug; - std::vector> annotations; - std::vector> declarations; - - std::vector global_variables; - - std::vector code; - - std::vector> code_store; + std::unique_ptr ext_inst_imports; + std::unique_ptr entry_points; + std::unique_ptr execution_modes; + std::unique_ptr debug; + std::unique_ptr annotations; + std::unique_ptr declarations; + std::unique_ptr global_variables; + std::unique_ptr code; + std::vector deferred_phi_nodes; }; } // namespace Sirit diff --git a/externals/sirit/src/CMakeLists.txt b/externals/sirit/src/CMakeLists.txt index 5d62c7c90..c9bf0934a 100755 --- a/externals/sirit/src/CMakeLists.txt +++ b/externals/sirit/src/CMakeLists.txt @@ -1,22 +1,14 @@ add_library(sirit ../include/sirit/sirit.h sirit.cpp - op.cpp - op.h - stream.cpp stream.h - operand.cpp - operand.h - literal_number.cpp - literal_number.h - literal_string.cpp - literal_string.h common_types.h instructions/type.cpp instructions/constant.cpp instructions/function.cpp instructions/flow.cpp instructions/debug.cpp + instructions/derivatives.cpp instructions/memory.cpp instructions/annotation.cpp instructions/misc.cpp diff --git a/externals/sirit/src/instructions/annotation.cpp b/externals/sirit/src/instructions/annotation.cpp index da796846b..e3524bb83 100755 --- a/externals/sirit/src/instructions/annotation.cpp +++ b/externals/sirit/src/instructions/annotation.cpp @@ -4,32 +4,24 @@ * 3-Clause BSD License */ -#include -#include -#include "common_types.h" -#include "op.h" +#include + #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { -Id Module::Decorate(Id target, spv::Decoration decoration, const std::vector& literals) { - auto op{std::make_unique(spv::Op::OpDecorate)}; - op->Add(target); - op->Add(static_cast(decoration)); - op->Add(literals); - AddAnnotation(std::move(op)); - return target; +Id Module::Decorate(Id target, spv::Decoration decoration, std::span literals) { + annotations->Reserve(3 + literals.size()); + return *annotations << spv::Op::OpDecorate << target << decoration << literals << EndOp{}; } Id Module::MemberDecorate(Id structure_type, Literal member, spv::Decoration decoration, - const std::vector& literals) { - auto op{std::make_unique(spv::Op::OpMemberDecorate)}; - op->Add(structure_type); - op->Add(member); - op->Add(static_cast(decoration)); - op->Add(literals); - AddAnnotation(std::move(op)); - return structure_type; + std::span literals) { + annotations->Reserve(4 + literals.size()); + return *annotations << spv::Op::OpMemberDecorate << structure_type << member << decoration + << literals << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/arithmetic.cpp b/externals/sirit/src/instructions/arithmetic.cpp index 4cc1cf0fb..4fa8057e3 100755 --- a/externals/sirit/src/instructions/arithmetic.cpp +++ b/externals/sirit/src/instructions/arithmetic.cpp @@ -4,35 +4,22 @@ * 3-Clause BSD License */ -#include -#include "common_types.h" -#include "op.h" #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { #define DEFINE_UNARY(funcname, opcode) \ Id Module::funcname(Id result_type, Id operand) { \ - auto op{std::make_unique(opcode, bound++, result_type)}; \ - op->Add(operand); \ - return AddCode(std::move(op)); \ + code->Reserve(4); \ + return *code << OpId{opcode, result_type} << operand << EndOp{}; \ } #define DEFINE_BINARY(funcname, opcode) \ Id Module::funcname(Id result_type, Id operand_1, Id operand_2) { \ - auto op{std::make_unique(opcode, bound++, result_type)}; \ - op->Add(operand_1); \ - op->Add(operand_2); \ - return AddCode(std::move(op)); \ - } - -#define DEFINE_TRINARY(funcname, opcode) \ - Id Module::funcname(Id result_type, Id operand_1, Id operand_2, Id operand_3) { \ - auto op{std::make_unique(opcode, bound++, result_type)}; \ - op->Add(operand_1); \ - op->Add(operand_2); \ - op->Add(operand_3); \ - return AddCode(std::move(op)); \ + code->Reserve(5); \ + return *code << OpId{opcode, result_type} << operand_1 << operand_2 << EndOp{}; \ } DEFINE_UNARY(OpSNegate, spv::Op::OpSNegate) diff --git a/externals/sirit/src/instructions/atomic.cpp b/externals/sirit/src/instructions/atomic.cpp index 146a2a6ba..cee8849f5 100755 --- a/externals/sirit/src/instructions/atomic.cpp +++ b/externals/sirit/src/instructions/atomic.cpp @@ -4,145 +4,101 @@ * 3-Clause BSD License */ -#include "common_types.h" -#include "op.h" #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { Id Module::OpAtomicLoad(Id result_type, Id pointer, Id memory, Id semantics) { - auto op{std::make_unique(spv::Op::OpAtomicLoad, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - return AddCode(std::move(op)); + code->Reserve(6); + return *code << OpId{spv::Op::OpAtomicLoad, result_type} << pointer << memory << semantics + << EndOp{}; } Id Module::OpAtomicStore(Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicStore)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpAtomicStore} << pointer << memory << semantics << value + << EndOp{}; } Id Module::OpAtomicExchange(Id result_type, Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicExchange, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpAtomicExchange, result_type} << pointer << memory << semantics + << value << EndOp{}; } Id Module::OpAtomicCompareExchange(Id result_type, Id pointer, Id memory, Id equal, Id unequal, Id value, Id comparator) { - auto op{std::make_unique(spv::Op::OpAtomicCompareExchange, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(equal); - op->Add(unequal); - op->Add(value); - op->Add(comparator); - return AddCode(std::move(op)); + code->Reserve(9); + return *code << OpId{spv::Op::OpAtomicCompareExchange, result_type} << pointer << memory + << equal << unequal << value << comparator << EndOp{}; } Id Module::OpAtomicIIncrement(Id result_type, Id pointer, Id memory, Id semantics) { - auto op{std::make_unique(spv::Op::OpAtomicIIncrement, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - return AddCode(std::move(op)); + code->Reserve(6); + return *code << OpId{spv::Op::OpAtomicIIncrement, result_type} << pointer << memory << semantics + << EndOp{}; } Id Module::OpAtomicIDecrement(Id result_type, Id pointer, Id memory, Id semantics) { - auto op{std::make_unique(spv::Op::OpAtomicIDecrement, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - return AddCode(std::move(op)); + code->Reserve(6); + return *code << OpId{spv::Op::OpAtomicIDecrement, result_type} << pointer << memory << semantics + << EndOp{}; } Id Module::OpAtomicIAdd(Id result_type, Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicIAdd, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpAtomicIAdd, result_type} << pointer << memory << semantics + << value << EndOp{}; } Id Module::OpAtomicISub(Id result_type, Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicISub, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpAtomicISub, result_type} << pointer << memory << semantics + << value << EndOp{}; } Id Module::OpAtomicSMin(Id result_type, Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicSMin, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpAtomicSMin, result_type} << pointer << memory << semantics + << value << EndOp{}; } Id Module::OpAtomicUMin(Id result_type, Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicUMin, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpAtomicUMin, result_type} << pointer << memory << semantics + << value << EndOp{}; } Id Module::OpAtomicSMax(Id result_type, Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicSMax, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpAtomicSMax, result_type} << pointer << memory << semantics + << value << EndOp{}; } Id Module::OpAtomicUMax(Id result_type, Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicUMax, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpAtomicUMax, result_type} << pointer << memory << semantics + << value << EndOp{}; } Id Module::OpAtomicAnd(Id result_type, Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicAnd, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpAtomicAnd, result_type} << pointer << memory << semantics + << value << EndOp{}; } Id Module::OpAtomicOr(Id result_type, Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicOr, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpAtomicOr, result_type} << pointer << memory << semantics + << value << EndOp{}; } Id Module::OpAtomicXor(Id result_type, Id pointer, Id memory, Id semantics, Id value) { - auto op{std::make_unique(spv::Op::OpAtomicXor, bound++, result_type)}; - op->Add(pointer); - op->Add(memory); - op->Add(semantics); - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpAtomicXor, result_type} << pointer << memory << semantics + << value << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/barrier.cpp b/externals/sirit/src/instructions/barrier.cpp index da74bf566..646b5cfb4 100755 --- a/externals/sirit/src/instructions/barrier.cpp +++ b/externals/sirit/src/instructions/barrier.cpp @@ -4,25 +4,20 @@ * 3-Clause BSD License */ -#include -#include "op.h" #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { Id Module::OpControlBarrier(Id execution, Id memory, Id semantics) { - auto op = std::make_unique(spv::Op::OpControlBarrier); - op->Add(execution); - op->Add(memory); - op->Add(semantics); - return AddCode(std::move(op)); + code->Reserve(4); + return *code << spv::Op::OpControlBarrier << execution << memory << semantics << EndOp{}; } Id Module::OpMemoryBarrier(Id scope, Id semantics) { - auto op = std::make_unique(spv::Op::OpMemoryBarrier); - op->Add(scope); - op->Add(semantics); - return AddCode(std::move(op)); + code->Reserve(3); + return *code << spv::Op::OpMemoryBarrier << scope << semantics << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/bit.cpp b/externals/sirit/src/instructions/bit.cpp index 25f4eff71..c860613fc 100755 --- a/externals/sirit/src/instructions/bit.cpp +++ b/externals/sirit/src/instructions/bit.cpp @@ -4,96 +4,73 @@ * 3-Clause BSD License */ -#include -#include "common_types.h" -#include "op.h" #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { Id Module::OpShiftRightLogical(Id result_type, Id base, Id shift) { - auto op{std::make_unique(spv::Op::OpShiftRightLogical, bound++, result_type)}; - op->Add(base); - op->Add(shift); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpShiftRightLogical, result_type} << base << shift << EndOp{}; } Id Module::OpShiftRightArithmetic(Id result_type, Id base, Id shift) { - auto op{std::make_unique(spv::Op::OpShiftRightArithmetic, bound++, result_type)}; - op->Add(base); - op->Add(shift); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpShiftRightArithmetic, result_type} << base << shift << EndOp{}; } Id Module::OpShiftLeftLogical(Id result_type, Id base, Id shift) { - auto op{std::make_unique(spv::Op::OpShiftLeftLogical, bound++, result_type)}; - op->Add(base); - op->Add(shift); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpShiftLeftLogical, result_type} << base << shift << EndOp{}; } Id Module::OpBitwiseOr(Id result_type, Id operand_1, Id operand_2) { - auto op{std::make_unique(spv::Op::OpBitwiseOr, bound++, result_type)}; - op->Add(operand_1); - op->Add(operand_2); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpBitwiseOr, result_type} << operand_1 << operand_2 << EndOp{}; } Id Module::OpBitwiseXor(Id result_type, Id operand_1, Id operand_2) { - auto op{std::make_unique(spv::Op::OpBitwiseXor, bound++, result_type)}; - op->Add(operand_1); - op->Add(operand_2); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpBitwiseXor, result_type} << operand_1 << operand_2 << EndOp{}; } Id Module::OpBitwiseAnd(Id result_type, Id operand_1, Id operand_2) { - auto op{std::make_unique(spv::Op::OpBitwiseAnd, bound++, result_type)}; - op->Add(operand_1); - op->Add(operand_2); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpBitwiseAnd, result_type} << operand_1 << operand_2 << EndOp{}; } Id Module::OpNot(Id result_type, Id operand) { - auto op{std::make_unique(spv::Op::OpNot, bound++, result_type)}; - op->Add(operand); - return AddCode(std::move(op)); + code->Reserve(4); + return *code << OpId{spv::Op::OpNot, result_type} << operand << EndOp{}; } Id Module::OpBitFieldInsert(Id result_type, Id base, Id insert, Id offset, Id count) { - auto op{std::make_unique(spv::Op::OpBitFieldInsert, bound++, result_type)}; - op->Add(base); - op->Add(insert); - op->Add(offset); - op->Add(count); - return AddCode(std::move(op)); + code->Reserve(7); + return *code << OpId{spv::Op::OpBitFieldInsert, result_type} << base << insert << offset + << count << EndOp{}; } Id Module::OpBitFieldSExtract(Id result_type, Id base, Id offset, Id count) { - auto op{std::make_unique(spv::Op::OpBitFieldSExtract, bound++, result_type)}; - op->Add(base); - op->Add(offset); - op->Add(count); - return AddCode(std::move(op)); + code->Reserve(6); + return *code << OpId{spv::Op::OpBitFieldSExtract, result_type} << base << offset << count + << EndOp{}; } Id Module::OpBitFieldUExtract(Id result_type, Id base, Id offset, Id count) { - auto op{std::make_unique(spv::Op::OpBitFieldUExtract, bound++, result_type)}; - op->Add(base); - op->Add(offset); - op->Add(count); - return AddCode(std::move(op)); + code->Reserve(6); + return *code << OpId{spv::Op::OpBitFieldUExtract, result_type} << base << offset << count + << EndOp{}; } Id Module::OpBitReverse(Id result_type, Id base) { - auto op{std::make_unique(spv::Op::OpBitReverse, bound++, result_type)}; - op->Add(base); - return AddCode(std::move(op)); + code->Reserve(4); + return *code << OpId{spv::Op::OpBitReverse, result_type} << base << EndOp{}; } Id Module::OpBitCount(Id result_type, Id base) { - auto op{std::make_unique(spv::Op::OpBitCount, bound++, result_type)}; - op->Add(base); - return AddCode(std::move(op)); + code->Reserve(4); + return *code << OpId{spv::Op::OpBitCount, result_type} << base << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/constant.cpp b/externals/sirit/src/instructions/constant.cpp index 1d503279e..612049c4f 100755 --- a/externals/sirit/src/instructions/constant.cpp +++ b/externals/sirit/src/instructions/constant.cpp @@ -5,42 +5,44 @@ */ #include -#include "op.h" + #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { Id Module::ConstantTrue(Id result_type) { - return AddDeclaration(std::make_unique(spv::Op::OpConstantTrue, bound, result_type)); + declarations->Reserve(3); + return *declarations << OpId{spv::Op::OpConstantTrue, result_type} << EndOp{}; } Id Module::ConstantFalse(Id result_type) { - return AddDeclaration(std::make_unique(spv::Op::OpConstantFalse, bound, result_type)); + declarations->Reserve(3); + return *declarations << OpId{spv::Op::OpConstantFalse, result_type} << EndOp{}; } Id Module::Constant(Id result_type, const Literal& literal) { - auto op{std::make_unique(spv::Op::OpConstant, bound, result_type)}; - op->Add(literal); - return AddDeclaration(std::move(op)); + declarations->Reserve(3 + 2); + return *declarations << OpId{spv::Op::OpConstant, result_type} << literal << EndOp{}; } -Id Module::ConstantComposite(Id result_type, const std::vector& constituents) { - auto op{std::make_unique(spv::Op::OpConstantComposite, bound, result_type)}; - op->Add(constituents); - return AddDeclaration(std::move(op)); +Id Module::ConstantComposite(Id result_type, std::span constituents) { + declarations->Reserve(3 + constituents.size()); + return *declarations << OpId{spv::Op::OpConstantComposite, result_type} << constituents + << EndOp{}; } Id Module::ConstantSampler(Id result_type, spv::SamplerAddressingMode addressing_mode, bool normalized, spv::SamplerFilterMode filter_mode) { - auto op{std::make_unique(spv::Op::OpConstantSampler, bound, result_type)}; - op->Add(static_cast(addressing_mode)); - op->Add(normalized ? 1 : 0); - op->Add(static_cast(filter_mode)); - return AddDeclaration(std::move(op)); + declarations->Reserve(6); + return *declarations << OpId{spv::Op::OpConstantSampler, result_type} << addressing_mode + << normalized << filter_mode << EndOp{}; } Id Module::ConstantNull(Id result_type) { - return AddDeclaration(std::make_unique(spv::Op::OpConstantNull, bound, result_type)); + declarations->Reserve(3); + return *declarations << OpId{spv::Op::OpConstantNull, result_type} << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/conversion.cpp b/externals/sirit/src/instructions/conversion.cpp index 57d57c99b..0b4e2c8a3 100755 --- a/externals/sirit/src/instructions/conversion.cpp +++ b/externals/sirit/src/instructions/conversion.cpp @@ -4,18 +4,16 @@ * 3-Clause BSD License */ -#include -#include "common_types.h" -#include "op.h" #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { #define DEFINE_UNARY(opcode) \ Id Module::opcode(Id result_type, Id operand) { \ - auto op{std::make_unique(spv::Op::opcode, bound++, result_type)}; \ - op->Add(operand); \ - return AddCode(std::move(op)); \ + code->Reserve(4); \ + return *code << OpId{spv::Op::opcode, result_type} << operand << EndOp{}; \ } DEFINE_UNARY(OpConvertFToU) diff --git a/externals/sirit/src/instructions/debug.cpp b/externals/sirit/src/instructions/debug.cpp index abbf02ae5..1ca346265 100755 --- a/externals/sirit/src/instructions/debug.cpp +++ b/externals/sirit/src/instructions/debug.cpp @@ -4,44 +4,33 @@ * 3-Clause BSD License */ -#include -#include -#include "op.h" #include "sirit/sirit.h" +#include "common_types.h" +#include "stream.h" + namespace Sirit { -Id Module::Name(Id target, std::string name) { - auto op{std::make_unique(spv::Op::OpName)}; - op->Add(target); - op->Add(std::move(name)); - debug.push_back(std::move(op)); +Id Module::Name(Id target, std::string_view name) { + debug->Reserve(3 + WordsInString(name)); + *debug << spv::Op::OpName << target << name << EndOp{}; return target; } -Id Module::MemberName(Id type, u32 member, std::string name) { - auto op{std::make_unique(spv::Op::OpMemberName)}; - op->Add(type); - op->Add(member); - op->Add(std::move(name)); - debug.push_back(std::move(op)); +Id Module::MemberName(Id type, u32 member, std::string_view name) { + debug->Reserve(4 + WordsInString(name)); + *debug << spv::Op::OpMemberName << type << member << name << EndOp{}; return type; } -Id Module::String(std::string string) { - auto op{std::make_unique(spv::Op::OpString, bound++)}; - op->Add(std::move(string)); - const auto id = op.get(); - debug.push_back(std::move(op)); - return id; +Id Module::String(std::string_view string) { + debug->Reserve(3 + WordsInString(string)); + return *debug << OpId{spv::Op::OpString} << string << EndOp{}; } Id Module::OpLine(Id file, Literal line, Literal column) { - auto op{std::make_unique(spv::Op::OpLine)}; - op->Add(file); - op->Add(line); - op->Add(column); - return AddCode(std::move(op)); + debug->Reserve(4); + return *debug << spv::Op::OpLine << file << line << column << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/derivatives.cpp b/externals/sirit/src/instructions/derivatives.cpp new file mode 100755 index 000000000..c944e2a52 --- /dev/null +++ b/externals/sirit/src/instructions/derivatives.cpp @@ -0,0 +1,29 @@ +/* This file is part of the sirit project. + * Copyright (c) 2021 sirit + * This software may be used and distributed according to the terms of the + * 3-Clause BSD License + */ + +#include "sirit/sirit.h" + +#include "stream.h" + +namespace Sirit { + +#define DEFINE_UNARY(funcname, opcode) \ + Id Module::funcname(Id result_type, Id operand) { \ + code->Reserve(4); \ + return *code << OpId{opcode, result_type} << operand << EndOp{}; \ + } + +DEFINE_UNARY(OpDPdx, spv::Op::OpDPdx) +DEFINE_UNARY(OpDPdy, spv::Op::OpDPdy) +DEFINE_UNARY(OpFwidth, spv::Op::OpFwidth) +DEFINE_UNARY(OpDPdxFine, spv::Op::OpDPdxFine) +DEFINE_UNARY(OpDPdyFine, spv::Op::OpDPdyFine) +DEFINE_UNARY(OpFwidthFine, spv::Op::OpFwidthFine) +DEFINE_UNARY(OpDPdxCoarse, spv::Op::OpDPdxCoarse) +DEFINE_UNARY(OpDPdyCoarse, spv::Op::OpDPdyCoarse) +DEFINE_UNARY(OpFwidthCoarse, spv::Op::OpFwidthCoarse) + +} // namespace Sirit diff --git a/externals/sirit/src/instructions/extension.cpp b/externals/sirit/src/instructions/extension.cpp index a806347f3..9f7aa4305 100755 --- a/externals/sirit/src/instructions/extension.cpp +++ b/externals/sirit/src/instructions/extension.cpp @@ -4,20 +4,18 @@ * 3-Clause BSD License */ -#include #include -#include "common_types.h" -#include "op.h" + #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { -Id Module::OpExtInst(Id result_type, Id set, u32 instruction, const std::vector& operands) { - auto op{std::make_unique(spv::Op::OpExtInst, bound++, result_type)}; - op->Add(set); - op->Add(instruction); - op->Add(operands); - return AddCode(std::move(op)); +Id Module::OpExtInst(Id result_type, Id set, u32 instruction, std::span operands) { + code->Reserve(5 + operands.size()); + return *code << OpId{spv::Op::OpExtInst, result_type} << set << instruction << operands + << EndOp{}; } #define DEFINE_UNARY(funcname, opcode) \ @@ -74,4 +72,5 @@ DEFINE_UNARY(OpFindUMsb, GLSLstd450FindUMsb) DEFINE_UNARY(OpInterpolateAtCentroid, GLSLstd450InterpolateAtCentroid) DEFINE_BINARY(OpInterpolateAtSample, GLSLstd450InterpolateAtSample) DEFINE_BINARY(OpInterpolateAtOffset, GLSLstd450InterpolateAtOffset) + } // namespace Sirit diff --git a/externals/sirit/src/instructions/flow.cpp b/externals/sirit/src/instructions/flow.cpp index ccec4bf9d..e462dcb74 100755 --- a/externals/sirit/src/instructions/flow.cpp +++ b/externals/sirit/src/instructions/flow.cpp @@ -5,79 +5,96 @@ */ #include -#include -#include "common_types.h" -#include "op.h" + #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { +Id Module::OpPhi(Id result_type, std::span operands) { + assert(operands.size() % 2 == 0); + code->Reserve(3 + operands.size()); + return *code << OpId{spv::Op::OpPhi, result_type} << operands << EndOp{}; +} + +Id Module::DeferredOpPhi(Id result_type, std::span blocks) { + deferred_phi_nodes.push_back(code->LocalAddress()); + code->Reserve(3 + blocks.size() * 2); + *code << OpId{spv::Op::OpPhi, result_type}; + for (const Id block : blocks) { + *code << u32{0} << block; + } + return *code << EndOp{}; +} + Id Module::OpLoopMerge(Id merge_block, Id continue_target, spv::LoopControlMask loop_control, - const std::vector& literals) { - auto op{std::make_unique(spv::Op::OpLoopMerge)}; - op->Add(merge_block); - op->Add(continue_target); - op->Add(static_cast(loop_control)); - op->Add(literals); - return AddCode(std::move(op)); + std::span literals) { + code->Reserve(4 + literals.size()); + return *code << spv::Op::OpLoopMerge << merge_block << continue_target << loop_control + << literals << EndOp{}; } Id Module::OpSelectionMerge(Id merge_block, spv::SelectionControlMask selection_control) { - auto op{std::make_unique(spv::Op::OpSelectionMerge)}; - op->Add(merge_block); - op->Add(static_cast(selection_control)); - return AddCode(std::move(op)); + code->Reserve(3); + return *code << spv::Op::OpSelectionMerge << merge_block << selection_control << EndOp{}; } Id Module::OpLabel() { - return code_store.emplace_back(std::make_unique(spv::Op::OpLabel, bound++)).get(); + return Id{++bound}; } Id Module::OpBranch(Id target_label) { - auto op{std::make_unique(spv::Op::OpBranch)}; - op->Add(target_label); - return AddCode(std::move(op)); + code->Reserve(2); + return *code << spv::Op::OpBranch << target_label << EndOp{}; } Id Module::OpBranchConditional(Id condition, Id true_label, Id false_label, u32 true_weight, u32 false_weight) { - auto op{std::make_unique(spv::Op::OpBranchConditional)}; - op->Add(condition); - op->Add(true_label); - op->Add(false_label); + code->Reserve(6); + *code << spv::Op::OpBranchConditional << condition << true_label << false_label; if (true_weight != 0 || false_weight != 0) { - op->Add(true_weight); - op->Add(false_weight); + *code << true_weight << false_weight; } - return AddCode(std::move(op)); + return *code << EndOp{}; } -Id Module::OpSwitch(Id selector, Id default_label, const std::vector& literals, - const std::vector& labels) { - const std::size_t size = literals.size(); +Id Module::OpSwitch(Id selector, Id default_label, std::span literals, + std::span labels) { assert(literals.size() == labels.size()); - auto op{std::make_unique(spv::Op::OpSwitch)}; - op->Add(selector); - op->Add(default_label); + const size_t size = literals.size(); + code->Reserve(3 + size * 2); + + *code << spv::Op::OpSwitch << selector << default_label; for (std::size_t i = 0; i < size; ++i) { - op->Add(literals[i]); - op->Add(labels[i]); + *code << literals[i] << labels[i]; } - return AddCode(std::move(op)); + return *code << EndOp{}; } -Id Module::OpReturn() { - return AddCode(spv::Op::OpReturn); +void Module::OpReturn() { + code->Reserve(1); + *code << spv::Op::OpReturn << EndOp{}; +} + +void Module::OpUnreachable() { + code->Reserve(1); + *code << spv::Op::OpUnreachable << EndOp{}; } Id Module::OpReturnValue(Id value) { - auto op{std::make_unique(spv::Op::OpReturnValue)}; - op->Add(value); - return AddCode(std::move(op)); + code->Reserve(2); + return *code << spv::Op::OpReturnValue << value << EndOp{}; } -Id Module::OpKill() { - return AddCode(std::make_unique(spv::Op::OpKill)); +void Module::OpKill() { + code->Reserve(1); + *code << spv::Op::OpKill << EndOp{}; +} + +void Module::OpDemoteToHelperInvocationEXT() { + code->Reserve(1); + *code << spv::Op::OpDemoteToHelperInvocationEXT << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/function.cpp b/externals/sirit/src/instructions/function.cpp index 3fb112651..f84cd7ff5 100755 --- a/externals/sirit/src/instructions/function.cpp +++ b/externals/sirit/src/instructions/function.cpp @@ -4,28 +4,31 @@ * 3-Clause BSD License */ -#include "common_types.h" -#include "op.h" #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { Id Module::OpFunction(Id result_type, spv::FunctionControlMask function_control, Id function_type) { - auto op{std::make_unique(spv::Op::OpFunction, bound++, result_type)}; - op->Add(static_cast(function_control)); - op->Add(function_type); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpFunction, result_type} << function_control << function_type + << EndOp{}; } -Id Module::OpFunctionEnd() { - return AddCode(spv::Op::OpFunctionEnd); +void Module::OpFunctionEnd() { + code->Reserve(1); + *code << spv::Op::OpFunctionEnd << EndOp{}; } -Id Module::OpFunctionCall(Id result_type, Id function, const std::vector& arguments) { - auto op{std::make_unique(spv::Op::OpFunctionCall, bound++, result_type)}; - op->Add(function); - op->Add(arguments); - return AddCode(std::move(op)); +Id Module::OpFunctionCall(Id result_type, Id function, std::span arguments) { + code->Reserve(4 + arguments.size()); + return *code << OpId{spv::Op::OpFunctionCall, result_type} << function << arguments << EndOp{}; +} + +Id Module::OpFunctionParameter(Id result_type) { + code->Reserve(3); + return *code << OpId{spv::Op::OpFunctionParameter, result_type} << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/group.cpp b/externals/sirit/src/instructions/group.cpp index aba6422ad..274b5b63d 100755 --- a/externals/sirit/src/instructions/group.cpp +++ b/externals/sirit/src/instructions/group.cpp @@ -4,48 +4,62 @@ * 3-Clause BSD License */ -#include "op.h" #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { Id Module::OpSubgroupBallotKHR(Id result_type, Id predicate) { - auto op = std::make_unique(spv::Op::OpSubgroupBallotKHR, bound++, result_type); - op->Add(predicate); - return AddCode(std::move(op)); + code->Reserve(4); + return *code << OpId{spv::Op::OpSubgroupBallotKHR, result_type} << predicate << EndOp{}; } Id Module::OpSubgroupReadInvocationKHR(Id result_type, Id value, Id index) { - auto op = std::make_unique(spv::Op::OpSubgroupReadInvocationKHR, bound++, result_type); - op->Add(value); - op->Add(index); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpSubgroupReadInvocationKHR, result_type} << value << index + << EndOp{}; } Id Module::OpSubgroupAllKHR(Id result_type, Id predicate) { - auto op = std::make_unique(spv::Op::OpSubgroupAllKHR, bound++, result_type); - op->Add(predicate); - return AddCode(std::move(op)); + code->Reserve(4); + return *code << OpId{spv::Op::OpSubgroupAllKHR, result_type} << predicate << EndOp{}; } Id Module::OpSubgroupAnyKHR(Id result_type, Id predicate) { - auto op = std::make_unique(spv::Op::OpSubgroupAnyKHR, bound++, result_type); - op->Add(predicate); - return AddCode(std::move(op)); + code->Reserve(4); + return *code << OpId{spv::Op::OpSubgroupAnyKHR, result_type} << predicate << EndOp{}; } Id Module::OpSubgroupAllEqualKHR(Id result_type, Id predicate) { - auto op = std::make_unique(spv::Op::OpSubgroupAllEqualKHR, bound++, result_type); - op->Add(predicate); - return AddCode(std::move(op)); + code->Reserve(4); + return *code << OpId{spv::Op::OpSubgroupAllEqualKHR, result_type} << predicate << EndOp{}; } -Id Module::OpGroupNonUniformShuffleXor(Id result_type, spv::Scope scope, Id value, Id mask) { - auto op = std::make_unique(spv::Op::OpGroupNonUniformShuffleXor, bound++, result_type); - op->Add(static_cast(scope)); - op->Add(value); - op->Add(mask); - return AddCode(std::move(op)); +Id Module::OpGroupNonUniformShuffleXor(Id result_type, Id scope, Id value, Id mask) { + code->Reserve(6); + return *code << OpId{spv::Op::OpGroupNonUniformShuffleXor, result_type} << scope << value + << mask << EndOp{}; +} + +Id Module::OpGroupNonUniformAll(Id result_type, Id scope, Id predicate) { + code->Reserve(5); + return *code << OpId{spv::Op::OpGroupNonUniformAll, result_type} << scope << predicate << EndOp{}; +} + +Id Module::OpGroupNonUniformAny(Id result_type, Id scope, Id predicate) { + code->Reserve(5); + return *code << OpId{spv::Op::OpGroupNonUniformAny, result_type} << scope << predicate << EndOp{}; +} + +Id Module::OpGroupNonUniformAllEqual(Id result_type, Id scope, Id value) { + code->Reserve(5); + return *code << OpId{spv::Op::OpGroupNonUniformAllEqual, result_type} << scope << value << EndOp{}; +} + +Id Module::OpGroupNonUniformBallot(Id result_type, Id scope, Id predicate) { + code->Reserve(5); + return *code << OpId{spv::Op::OpGroupNonUniformBallot, result_type} << scope << predicate << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/image.cpp b/externals/sirit/src/instructions/image.cpp index d12dc2c88..e68447f1e 100755 --- a/externals/sirit/src/instructions/image.cpp +++ b/externals/sirit/src/instructions/image.cpp @@ -4,79 +4,58 @@ * 3-Clause BSD License */ -#include "common_types.h" -#include "op.h" +#include + #include "sirit/sirit.h" -namespace Sirit { +#include "stream.h" -static void AddImageOperands(Op* op, std::optional image_operands, - const std::vector& operands) { - if (!image_operands) - return; - op->Add(static_cast(*image_operands)); - op->Add(operands); -} +namespace Sirit { #define DEFINE_IMAGE_OP(opcode) \ Id Module::opcode(Id result_type, Id sampled_image, Id coordinate, \ std::optional image_operands, \ - const std::vector& operands) { \ - auto op{std::make_unique(spv::Op::opcode, bound++, result_type)}; \ - op->Add(sampled_image); \ - op->Add(coordinate); \ - AddImageOperands(op.get(), image_operands, operands); \ - return AddCode(std::move(op)); \ + std::span operands) { \ + code->Reserve(6 + operands.size()); \ + return *code << OpId{spv::Op::opcode, result_type} << sampled_image << coordinate \ + << image_operands << operands << EndOp{}; \ } #define DEFINE_IMAGE_EXP_OP(opcode) \ Id Module::opcode(Id result_type, Id sampled_image, Id coordinate, \ - spv::ImageOperandsMask image_operands, const std::vector& operands) { \ - auto op{std::make_unique(spv::Op::opcode, bound++, result_type)}; \ - op->Add(sampled_image); \ - op->Add(coordinate); \ - op->Add(static_cast(image_operands)); \ - op->Add(operands); \ - return AddCode(std::move(op)); \ + spv::ImageOperandsMask image_operands, std::span operands) { \ + code->Reserve(6 + operands.size()); \ + return *code << OpId{spv::Op::opcode, result_type} << sampled_image << coordinate \ + << image_operands << operands << EndOp{}; \ } #define DEFINE_IMAGE_EXTRA_OP(opcode) \ Id Module::opcode(Id result_type, Id sampled_image, Id coordinate, Id extra, \ std::optional image_operands, \ - const std::vector& operands) { \ - auto op{std::make_unique(spv::Op::opcode, bound++, result_type)}; \ - op->Add(sampled_image); \ - op->Add(coordinate); \ - op->Add(extra); \ - AddImageOperands(op.get(), image_operands, operands); \ - return AddCode(std::move(op)); \ + std::span operands) { \ + code->Reserve(7 + operands.size()); \ + return *code << OpId{spv::Op::opcode, result_type} << sampled_image << coordinate << extra \ + << image_operands << operands << EndOp{}; \ } #define DEFINE_IMAGE_EXTRA_EXP_OP(opcode) \ Id Module::opcode(Id result_type, Id sampled_image, Id coordinate, Id extra, \ - spv::ImageOperandsMask image_operands, const std::vector& operands) { \ - auto op{std::make_unique(spv::Op::opcode, bound++, result_type)}; \ - op->Add(sampled_image); \ - op->Add(coordinate); \ - op->Add(extra); \ - op->Add(static_cast(image_operands)); \ - op->Add(operands); \ - return AddCode(std::move(op)); \ + spv::ImageOperandsMask image_operands, std::span operands) { \ + code->Reserve(8 + operands.size()); \ + return *code << OpId{spv::Op::opcode, result_type} << sampled_image << coordinate << extra \ + << image_operands << operands << EndOp{}; \ } #define DEFINE_IMAGE_QUERY_OP(opcode) \ Id Module::opcode(Id result_type, Id image) { \ - auto op{std::make_unique(spv::Op::opcode, bound++, result_type)}; \ - op->Add(image); \ - return AddCode(std::move(op)); \ + code->Reserve(5); \ + return *code << OpId{spv::Op::opcode, result_type} << image << EndOp{}; \ } #define DEFINE_IMAGE_QUERY_BIN_OP(opcode) \ Id Module::opcode(Id result_type, Id image, Id extra) { \ - auto op{std::make_unique(spv::Op::opcode, bound++, result_type)}; \ - op->Add(image); \ - op->Add(extra); \ - return AddCode(std::move(op)); \ + code->Reserve(5); \ + return *code << OpId{spv::Op::opcode, result_type} << image << extra << EndOp{}; \ } DEFINE_IMAGE_OP(OpImageSampleImplicitLod) @@ -98,27 +77,93 @@ DEFINE_IMAGE_QUERY_OP(OpImageQueryLevels) DEFINE_IMAGE_QUERY_OP(OpImageQuerySamples) Id Module::OpSampledImage(Id result_type, Id image, Id sampler) { - auto op{std::make_unique(spv::Op::OpSampledImage, bound++, result_type)}; - op->Add(image); - op->Add(sampler); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpSampledImage, result_type} << image << sampler << EndOp{}; } Id Module::OpImageWrite(Id image, Id coordinate, Id texel, std::optional image_operands, - const std::vector& operands) { - auto op{std::make_unique(spv::Op::OpImageWrite)}; - op->Add(image); - op->Add(coordinate); - op->Add(texel); - AddImageOperands(op.get(), image_operands, operands); - return AddCode(std::move(op)); + std::span operands) { + assert(image_operands.has_value() != operands.empty()); + code->Reserve(5 + operands.size()); + return *code << spv::Op::OpImageWrite << image << coordinate << texel << image_operands + << operands << EndOp{}; } Id Module::OpImage(Id result_type, Id sampled_image) { - auto op{std::make_unique(spv::Op::OpImage, bound++, result_type)}; - op->Add(sampled_image); - return AddCode(std::move(op)); + code->Reserve(4); + return *code << OpId{spv::Op::OpImage, result_type} << sampled_image << EndOp{}; +} + +Id Module::OpImageSparseSampleImplicitLod(Id result_type, Id sampled_image, Id coordinate, + std::optional image_operands, + std::span operands) { + code->Reserve(5 + (image_operands.has_value() ? 1 : 0) + operands.size()); + return *code << OpId{spv::Op::OpImageSparseSampleImplicitLod, result_type} << sampled_image + << coordinate << image_operands << operands << EndOp{}; +} + +Id Module::OpImageSparseSampleExplicitLod(Id result_type, Id sampled_image, Id coordinate, + spv::ImageOperandsMask image_operands, + std::span operands) { + code->Reserve(6 + operands.size()); + return *code << OpId{spv::Op::OpImageSparseSampleExplicitLod, result_type} << sampled_image + << coordinate << image_operands << operands << EndOp{}; +} + +Id Module::OpImageSparseSampleDrefImplicitLod(Id result_type, Id sampled_image, Id coordinate, + Id dref, + std::optional image_operands, + std::span operands) { + code->Reserve(6 + (image_operands.has_value() ? 1 : 0) + operands.size()); + return *code << OpId{spv::Op::OpImageSparseSampleDrefImplicitLod, result_type} << sampled_image + << coordinate << dref << image_operands << operands << EndOp{}; +} + +Id Module::OpImageSparseSampleDrefExplicitLod(Id result_type, Id sampled_image, Id coordinate, + Id dref, spv::ImageOperandsMask image_operands, + std::span operands) { + code->Reserve(7 + operands.size()); + return *code << OpId{spv::Op::OpImageSparseSampleDrefExplicitLod, result_type} << sampled_image + << coordinate << dref << image_operands << operands << EndOp{}; +} + +Id Module::OpImageSparseFetch(Id result_type, Id image, Id coordinate, + std::optional image_operands, + std::span operands) { + code->Reserve(5 + (image_operands.has_value() ? 1 : 0) + operands.size()); + return *code << OpId{spv::Op::OpImageSparseFetch, result_type} << image << coordinate + << image_operands << operands << EndOp{}; +} + +Id Module::OpImageSparseGather(Id result_type, Id sampled_image, Id coordinate, Id component, + std::optional image_operands, + std::span operands) { + code->Reserve(6 + operands.size()); + return *code << OpId{spv::Op::OpImageSparseGather, result_type} << sampled_image << coordinate + << component << image_operands << operands << EndOp{}; +} + +Id Module::OpImageSparseDrefGather(Id result_type, Id sampled_image, Id coordinate, Id dref, + std::optional image_operands, + std::span operands) { + code->Reserve(6 + operands.size()); + return *code << OpId{spv::Op::OpImageSparseDrefGather, result_type} << sampled_image + << coordinate << dref << image_operands << operands << EndOp{}; +} + +Id Module::OpImageSparseTexelsResident(Id result_type, Id resident_code) { + code->Reserve(4); + return *code << OpId{spv::Op::OpImageSparseTexelsResident, result_type} << resident_code + << EndOp{}; +} + +Id Module::OpImageSparseRead(Id result_type, Id image, Id coordinate, + std::optional image_operands, + std::span operands) { + code->Reserve(5 + (image_operands.has_value() ? 1 : 0) + operands.size()); + return *code << OpId{spv::Op::OpImageSparseTexelsResident, result_type} << image << coordinate + << image_operands << operands << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/logical.cpp b/externals/sirit/src/instructions/logical.cpp index d88d6647c..4d7292bf0 100755 --- a/externals/sirit/src/instructions/logical.cpp +++ b/externals/sirit/src/instructions/logical.cpp @@ -4,68 +4,62 @@ * 3-Clause BSD License */ -#include -#include "common_types.h" -#include "op.h" #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { -#define DEFINE_UNARY(funcname, opcode) \ - Id Module::funcname(Id result_type, Id operand) { \ - auto op{std::make_unique(opcode, bound++, result_type)}; \ - op->Add(operand); \ - return AddCode(std::move(op)); \ +#define DEFINE_UNARY(opcode) \ + Id Module::opcode(Id result_type, Id operand) { \ + code->Reserve(4); \ + return *code << OpId{spv::Op::opcode, result_type} << operand << EndOp{}; \ } -#define DEFINE_BINARY(funcname, opcode) \ - Id Module::funcname(Id result_type, Id operand_1, Id operand_2) { \ - auto op{std::make_unique(opcode, bound++, result_type)}; \ - op->Add(operand_1); \ - op->Add(operand_2); \ - return AddCode(std::move(op)); \ +#define DEFINE_BINARY(opcode) \ + Id Module::opcode(Id result_type, Id operand_1, Id operand_2) { \ + code->Reserve(5); \ + return *code << OpId{spv::Op::opcode, result_type} << operand_1 << operand_2 << EndOp{}; \ } -#define DEFINE_TRINARY(funcname, opcode) \ - Id Module::funcname(Id result_type, Id operand_1, Id operand_2, Id operand_3) { \ - auto op{std::make_unique(opcode, bound++, result_type)}; \ - op->Add(operand_1); \ - op->Add(operand_2); \ - op->Add(operand_3); \ - return AddCode(std::move(op)); \ +#define DEFINE_TRINARY(opcode) \ + Id Module::opcode(Id result_type, Id operand_1, Id operand_2, Id operand_3) { \ + code->Reserve(6); \ + return *code << OpId{spv::Op::opcode, result_type} << operand_1 << operand_2 << operand_3 \ + << EndOp{}; \ } -DEFINE_UNARY(OpAny, spv::Op::OpAny) -DEFINE_UNARY(OpAll, spv::Op::OpAll) -DEFINE_UNARY(OpIsNan, spv::Op::OpIsNan) -DEFINE_UNARY(OpIsInf, spv::Op::OpIsInf) -DEFINE_BINARY(OpLogicalEqual, spv::Op::OpLogicalEqual) -DEFINE_BINARY(OpLogicalNotEqual, spv::Op::OpLogicalNotEqual) -DEFINE_BINARY(OpLogicalOr, spv::Op::OpLogicalOr) -DEFINE_BINARY(OpLogicalAnd, spv::Op::OpLogicalAnd) -DEFINE_UNARY(OpLogicalNot, spv::Op::OpLogicalNot) -DEFINE_TRINARY(OpSelect, spv::Op::OpSelect) -DEFINE_BINARY(OpIEqual, spv::Op::OpIEqual) -DEFINE_BINARY(OpINotEqual, spv::Op::OpINotEqual) -DEFINE_BINARY(OpUGreaterThan, spv::Op::OpUGreaterThan) -DEFINE_BINARY(OpSGreaterThan, spv::Op::OpSGreaterThan) -DEFINE_BINARY(OpUGreaterThanEqual, spv::Op::OpUGreaterThanEqual) -DEFINE_BINARY(OpSGreaterThanEqual, spv::Op::OpSGreaterThanEqual) -DEFINE_BINARY(OpULessThan, spv::Op::OpULessThan) -DEFINE_BINARY(OpSLessThan, spv::Op::OpSLessThan) -DEFINE_BINARY(OpULessThanEqual, spv::Op::OpULessThanEqual) -DEFINE_BINARY(OpSLessThanEqual, spv::Op::OpSLessThanEqual) -DEFINE_BINARY(OpFOrdEqual, spv::Op::OpFOrdEqual) -DEFINE_BINARY(OpFUnordEqual, spv::Op::OpFUnordEqual) -DEFINE_BINARY(OpFOrdNotEqual, spv::Op::OpFOrdNotEqual) -DEFINE_BINARY(OpFUnordNotEqual, spv::Op::OpFUnordNotEqual) -DEFINE_BINARY(OpFOrdLessThan, spv::Op::OpFOrdLessThan) -DEFINE_BINARY(OpFUnordLessThan, spv::Op::OpFUnordLessThan) -DEFINE_BINARY(OpFOrdGreaterThan, spv::Op::OpFOrdGreaterThan) -DEFINE_BINARY(OpFUnordGreaterThan, spv::Op::OpFUnordGreaterThan) -DEFINE_BINARY(OpFOrdLessThanEqual, spv::Op::OpFOrdLessThanEqual) -DEFINE_BINARY(OpFUnordLessThanEqual, spv::Op::OpFUnordLessThanEqual) -DEFINE_BINARY(OpFOrdGreaterThanEqual, spv::Op::OpFOrdGreaterThanEqual) -DEFINE_BINARY(OpFUnordGreaterThanEqual, spv::Op::OpFUnordGreaterThanEqual) +DEFINE_UNARY(OpAny) +DEFINE_UNARY(OpAll) +DEFINE_UNARY(OpIsNan) +DEFINE_UNARY(OpIsInf) +DEFINE_BINARY(OpLogicalEqual) +DEFINE_BINARY(OpLogicalNotEqual) +DEFINE_BINARY(OpLogicalOr) +DEFINE_BINARY(OpLogicalAnd) +DEFINE_UNARY(OpLogicalNot) +DEFINE_TRINARY(OpSelect) +DEFINE_BINARY(OpIEqual) +DEFINE_BINARY(OpINotEqual) +DEFINE_BINARY(OpUGreaterThan) +DEFINE_BINARY(OpSGreaterThan) +DEFINE_BINARY(OpUGreaterThanEqual) +DEFINE_BINARY(OpSGreaterThanEqual) +DEFINE_BINARY(OpULessThan) +DEFINE_BINARY(OpSLessThan) +DEFINE_BINARY(OpULessThanEqual) +DEFINE_BINARY(OpSLessThanEqual) +DEFINE_BINARY(OpFOrdEqual) +DEFINE_BINARY(OpFUnordEqual) +DEFINE_BINARY(OpFOrdNotEqual) +DEFINE_BINARY(OpFUnordNotEqual) +DEFINE_BINARY(OpFOrdLessThan) +DEFINE_BINARY(OpFUnordLessThan) +DEFINE_BINARY(OpFOrdGreaterThan) +DEFINE_BINARY(OpFUnordGreaterThan) +DEFINE_BINARY(OpFOrdLessThanEqual) +DEFINE_BINARY(OpFUnordLessThanEqual) +DEFINE_BINARY(OpFOrdGreaterThanEqual) +DEFINE_BINARY(OpFUnordGreaterThanEqual) } // namespace Sirit diff --git a/externals/sirit/src/instructions/memory.cpp b/externals/sirit/src/instructions/memory.cpp index 33c8b077e..a542e9fcc 100755 --- a/externals/sirit/src/instructions/memory.cpp +++ b/externals/sirit/src/instructions/memory.cpp @@ -5,91 +5,64 @@ */ #include -#include "op.h" + #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { -Id Module::OpVariable(Id result_type, spv::StorageClass storage_class, Id initializer) { - auto op{std::make_unique(spv::Op::OpVariable, bound++, result_type)}; - op->Add(static_cast(storage_class)); - if (initializer) { - op->Add(initializer); - } - return code_store.emplace_back(std::move(op)).get(); -} - Id Module::OpImageTexelPointer(Id result_type, Id image, Id coordinate, Id sample) { - auto op{std::make_unique(spv::Op::OpImageTexelPointer, bound++, result_type)}; - op->Add(image); - op->Add(coordinate); - op->Add(sample); - return AddCode(std::move(op)); + code->Reserve(6); + return *code << OpId{spv::Op::OpImageTexelPointer, result_type} << image << coordinate << sample + << EndOp{}; } Id Module::OpLoad(Id result_type, Id pointer, std::optional memory_access) { - auto op{std::make_unique(spv::Op::OpLoad, bound++, result_type)}; - op->Add(pointer); - if (memory_access) { - op->Add(static_cast(*memory_access)); - } - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpLoad, result_type} << pointer << memory_access << EndOp{}; } Id Module::OpStore(Id pointer, Id object, std::optional memory_access) { - auto op{std::make_unique(spv::Op::OpStore)}; - op->Add(pointer); - op->Add(object); - if (memory_access) { - op->Add(static_cast(*memory_access)); - } - return AddCode(std::move(op)); + code->Reserve(4); + return *code << spv::Op::OpStore << pointer << object << memory_access << EndOp{}; } -Id Module::OpAccessChain(Id result_type, Id base, const std::vector& indexes) { - assert(indexes.size() > 0); - auto op{std::make_unique(spv::Op::OpAccessChain, bound++, result_type)}; - op->Add(base); - op->Add(indexes); - return AddCode(std::move(op)); +Id Module::OpAccessChain(Id result_type, Id base, std::span indexes) { + assert(!indexes.empty()); + code->Reserve(4 + indexes.size()); + return *code << OpId{spv::Op::OpAccessChain, result_type} << base << indexes << EndOp{}; } Id Module::OpVectorExtractDynamic(Id result_type, Id vector, Id index) { - auto op{std::make_unique(spv::Op::OpVectorExtractDynamic, bound++, result_type)}; - op->Add(vector); - op->Add(index); - return AddCode(std::move(op)); + code->Reserve(5); + return *code << OpId{spv::Op::OpVectorExtractDynamic, result_type} << vector << index + << EndOp{}; } Id Module::OpVectorInsertDynamic(Id result_type, Id vector, Id component, Id index) { - auto op{std::make_unique(spv::Op::OpVectorInsertDynamic, bound++, result_type)}; - op->Add(vector); - op->Add(component); - op->Add(index); - return AddCode(std::move(op)); + code->Reserve(6); + return *code << OpId{spv::Op::OpVectorInsertDynamic, result_type} << vector << component + << index << EndOp{}; } Id Module::OpCompositeInsert(Id result_type, Id object, Id composite, - const std::vector& indexes) { - auto op{std::make_unique(spv::Op::OpCompositeInsert, bound++, result_type)}; - op->Add(object); - op->Add(composite); - op->Add(indexes); - return AddCode(std::move(op)); + std::span indexes) { + code->Reserve(5 + indexes.size()); + return *code << OpId{spv::Op::OpCompositeInsert, result_type} << object << composite << indexes + << EndOp{}; } -Id Module::OpCompositeExtract(Id result_type, Id composite, const std::vector& indexes) { - auto op{std::make_unique(spv::Op::OpCompositeExtract, bound++, result_type)}; - op->Add(composite); - op->Add(indexes); - return AddCode(std::move(op)); +Id Module::OpCompositeExtract(Id result_type, Id composite, std::span indexes) { + code->Reserve(4 + indexes.size()); + return *code << OpId{spv::Op::OpCompositeExtract, result_type} << composite << indexes + << EndOp{}; } -Id Module::OpCompositeConstruct(Id result_type, const std::vector& ids) { +Id Module::OpCompositeConstruct(Id result_type, std::span ids) { assert(ids.size() >= 1); - auto op{std::make_unique(spv::Op::OpCompositeConstruct, bound++, result_type)}; - op->Add(ids); - return AddCode(std::move(op)); + code->Reserve(3 + ids.size()); + return *code << OpId{spv::Op::OpCompositeConstruct, result_type} << ids << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/misc.cpp b/externals/sirit/src/instructions/misc.cpp index b589090f1..6785346eb 100755 --- a/externals/sirit/src/instructions/misc.cpp +++ b/externals/sirit/src/instructions/misc.cpp @@ -4,21 +4,35 @@ * 3-Clause BSD License */ -#include "op.h" #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { Id Module::OpUndef(Id result_type) { - return AddCode(std::make_unique(spv::Op::OpUndef, bound++, result_type)); + code->Reserve(3); + return *code << OpId{spv::Op::OpUndef, result_type} << EndOp{}; } -Id Module::OpEmitVertex() { - return AddCode(std::make_unique(spv::Op::OpEmitVertex)); +void Module::OpEmitVertex() { + code->Reserve(1); + *code << spv::Op::OpEmitVertex << EndOp{}; } -Id Module::OpEndPrimitive() { - return AddCode(std::make_unique(spv::Op::OpEndPrimitive)); +void Module::OpEndPrimitive() { + code->Reserve(1); + *code << spv::Op::OpEndPrimitive << EndOp{}; +} + +void Module::OpEmitStreamVertex(Id stream) { + code->Reserve(2); + *code << spv::Op::OpEmitStreamVertex << stream << EndOp{}; +} + +void Module::OpEndStreamPrimitive(Id stream) { + code->Reserve(2); + *code << spv::Op::OpEndStreamPrimitive << stream << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/instructions/type.cpp b/externals/sirit/src/instructions/type.cpp index 2f2b15a0d..c3d04a0cf 100755 --- a/externals/sirit/src/instructions/type.cpp +++ b/externals/sirit/src/instructions/type.cpp @@ -5,137 +5,118 @@ */ #include -#include #include -#include "op.h" #include "sirit/sirit.h" +#include "stream.h" + namespace Sirit { Id Module::TypeVoid() { - return AddDeclaration(std::make_unique(spv::Op::OpTypeVoid, bound)); + declarations->Reserve(2); + return *declarations << OpId{spv::Op::OpTypeVoid} << EndOp{}; } Id Module::TypeBool() { - return AddDeclaration(std::make_unique(spv::Op::OpTypeBool, bound)); + declarations->Reserve(2); + return *declarations << OpId{spv::Op::OpTypeBool} << EndOp{}; } Id Module::TypeInt(int width, bool is_signed) { - auto op{std::make_unique(spv::Op::OpTypeInt, bound)}; - op->Add(width); - op->Add(is_signed ? 1 : 0); - return AddDeclaration(std::move(op)); + declarations->Reserve(4); + return *declarations << OpId{spv::Op::OpTypeInt} << width << is_signed << EndOp{}; } Id Module::TypeFloat(int width) { - auto op{std::make_unique(spv::Op::OpTypeFloat, bound)}; - op->Add(width); - return AddDeclaration(std::move(op)); + declarations->Reserve(3); + return *declarations << OpId{spv::Op::OpTypeFloat} << width << EndOp{}; } Id Module::TypeVector(Id component_type, int component_count) { assert(component_count >= 2); - auto op{std::make_unique(spv::Op::OpTypeVector, bound)}; - op->Add(component_type); - op->Add(component_count); - return AddDeclaration(std::move(op)); + declarations->Reserve(4); + return *declarations << OpId{spv::Op::OpTypeVector} << component_type << component_count + << EndOp{}; } Id Module::TypeMatrix(Id column_type, int column_count) { assert(column_count >= 2); - auto op{std::make_unique(spv::Op::OpTypeMatrix, bound)}; - op->Add(column_type); - op->Add(column_count); - return AddDeclaration(std::move(op)); + declarations->Reserve(4); + return *declarations << OpId{spv::Op::OpTypeMatrix} << column_type << column_count << EndOp{}; } Id Module::TypeImage(Id sampled_type, spv::Dim dim, int depth, bool arrayed, bool ms, int sampled, spv::ImageFormat image_format, std::optional access_qualifier) { - auto op{std::make_unique(spv::Op::OpTypeImage, bound)}; - op->Add(sampled_type); - op->Add(static_cast(dim)); - op->Add(depth); - op->Add(arrayed ? 1 : 0); - op->Add(ms ? 1 : 0); - op->Add(sampled); - op->Add(static_cast(image_format)); - if (access_qualifier.has_value()) { - op->Add(static_cast(access_qualifier.value())); - } - return AddDeclaration(std::move(op)); + declarations->Reserve(10); + return *declarations << OpId{spv::Op::OpTypeImage} << sampled_type << dim << depth << arrayed + << ms << sampled << image_format << access_qualifier << EndOp{}; } Id Module::TypeSampler() { - return AddDeclaration(std::make_unique(spv::Op::OpTypeSampler, bound)); + declarations->Reserve(2); + return *declarations << OpId{spv::Op::OpTypeSampler} << EndOp{}; } Id Module::TypeSampledImage(Id image_type) { - auto op{std::make_unique(spv::Op::OpTypeSampledImage, bound)}; - op->Add(image_type); - return AddDeclaration(std::move(op)); + declarations->Reserve(3); + return *declarations << OpId{spv::Op::OpTypeSampledImage} << image_type << EndOp{}; } Id Module::TypeArray(Id element_type, Id length) { - auto op{std::make_unique(spv::Op::OpTypeArray, bound)}; - op->Add(element_type); - op->Add(length); - return AddDeclaration(std::move(op)); + declarations->Reserve(4); + return *declarations << OpId{spv::Op::OpTypeArray} << element_type << length << EndOp{}; } Id Module::TypeRuntimeArray(Id element_type) { - auto op{std::make_unique(spv::Op::OpTypeRuntimeArray, bound)}; - op->Add(element_type); - return AddDeclaration(std::move(op)); + declarations->Reserve(3); + return *declarations << OpId{spv::Op::OpTypeRuntimeArray} << element_type << EndOp{}; } -Id Module::TypeStruct(const std::vector& members) { - auto op{std::make_unique(spv::Op::OpTypeStruct, bound)}; - op->Add(members); - return AddDeclaration(std::move(op)); +Id Module::TypeStruct(std::span members) { + declarations->Reserve(2 + members.size()); + return *declarations << OpId{spv::Op::OpTypeStruct} << members << EndOp{}; } -Id Module::TypeOpaque(std::string name) { - auto op{std::make_unique(spv::Op::OpTypeOpaque, bound)}; - op->Add(std::move(name)); - return AddDeclaration(std::move(op)); +Id Module::TypeOpaque(std::string_view name) { + declarations->Reserve(3 + WordsInString(name)); + return *declarations << OpId{spv::Op::OpTypeOpaque} << name << EndOp{}; } Id Module::TypePointer(spv::StorageClass storage_class, Id type) { - auto op{std::make_unique(spv::Op::OpTypePointer, bound)}; - op->Add(static_cast(storage_class)); - op->Add(type); - return AddDeclaration(std::move(op)); + declarations->Reserve(4); + return *declarations << OpId{spv::Op::OpTypePointer} << storage_class << type << EndOp{}; } -Id Module::TypeFunction(Id return_type, const std::vector& arguments) { - auto op{std::make_unique(spv::Op::OpTypeFunction, bound)}; - op->Add(return_type); - op->Add(arguments); - return AddDeclaration(std::move(op)); +Id Module::TypeFunction(Id return_type, std::span arguments) { + declarations->Reserve(3 + arguments.size()); + return *declarations << OpId{spv::Op::OpTypeFunction} << return_type << arguments << EndOp{}; } Id Module::TypeEvent() { - return AddDeclaration(std::make_unique(spv::Op::OpTypeEvent, bound)); + declarations->Reserve(2); + return *declarations << OpId{spv::Op::OpTypeEvent} << EndOp{}; } Id Module::TypeDeviceEvent() { - return AddDeclaration(std::make_unique(spv::Op::OpTypeDeviceEvent, bound)); + declarations->Reserve(2); + return *declarations << OpId{spv::Op::OpTypeDeviceEvent} << EndOp{}; } Id Module::TypeReserveId() { - return AddDeclaration(std::make_unique(spv::Op::OpTypeReserveId, bound)); + declarations->Reserve(2); + return *declarations << OpId{spv::Op::OpTypeReserveId} << EndOp{}; } Id Module::TypeQueue() { - return AddDeclaration(std::make_unique(spv::Op::OpTypeQueue, bound)); + declarations->Reserve(2); + return *declarations << OpId{spv::Op::OpTypeQueue} << EndOp{}; } Id Module::TypePipe(spv::AccessQualifier access_qualifier) { - auto op{std::make_unique(spv::Op::OpTypePipe, bound)}; - op->Add(static_cast(access_qualifier)); - return AddDeclaration(std::move(op)); + declarations->Reserve(2); + return *declarations << OpId{spv::Op::OpTypePipe} << access_qualifier << EndOp{}; } } // namespace Sirit diff --git a/externals/sirit/src/sirit.cpp b/externals/sirit/src/sirit.cpp index 473830c0e..7075f23e1 100755 --- a/externals/sirit/src/sirit.cpp +++ b/externals/sirit/src/sirit.cpp @@ -4,66 +4,82 @@ * 3-Clause BSD License */ -#include #include -#include "common_types.h" -#include "op.h" + #include "sirit/sirit.h" + +#include "common_types.h" #include "stream.h" namespace Sirit { -template -static void WriteSet(Stream& stream, const T& set) { - for (const auto& item : set) { - item->Write(stream); - } +constexpr u32 MakeWord0(spv::Op op, size_t word_count) { + return static_cast(op) | static_cast(word_count) << 16; } -Module::Module(u32 version_) : version{version_} {} +Module::Module(u32 version_) + : version{version_}, ext_inst_imports{std::make_unique(&bound)}, + entry_points{std::make_unique(&bound)}, + execution_modes{std::make_unique(&bound)}, debug{std::make_unique(&bound)}, + annotations{std::make_unique(&bound)}, declarations{std::make_unique( + &bound)}, + global_variables{std::make_unique(&bound)}, code{std::make_unique(&bound)} {} Module::~Module() = default; std::vector Module::Assemble() const { - std::vector bytes; - Stream stream{bytes}; + std::vector words = {spv::MagicNumber, version, GENERATOR_MAGIC_NUMBER, bound + 1, 0}; + const auto insert = [&words](std::span input) { + words.insert(words.end(), input.begin(), input.end()); + }; - stream.Write(spv::MagicNumber); - stream.Write(version); - stream.Write(GENERATOR_MAGIC_NUMBER); - stream.Write(bound); - stream.Write(static_cast(0)); - - for (const auto capability : capabilities) { - Op op(spv::Op::OpCapability); - op.Add(static_cast(capability)); - op.Write(stream); + words.reserve(words.size() + capabilities.size() * 2); + for (const spv::Capability capability : capabilities) { + insert(std::array{ + MakeWord0(spv::Op::OpCapability, 2), + static_cast(capability), + }); } - for (const auto& extension_name : extensions) { - Op op(spv::Op::OpExtension); - op.Add(extension_name); - op.Write(stream); + for (const std::string_view extension_name : extensions) { + size_t string_words = WordsInString(extension_name); + words.push_back(MakeWord0(spv::Op::OpExtension, string_words + 1)); + size_t insert_index = words.size(); + words.resize(words.size() + string_words); + InsertStringView(words, insert_index, extension_name); } - if (glsl_std_450) { - glsl_std_450->Write(stream); + insert(ext_inst_imports->Words()); + + insert(std::array{ + MakeWord0(spv::Op::OpMemoryModel, 3), + static_cast(addressing_model), + static_cast(memory_model), + }); + + insert(entry_points->Words()); + insert(execution_modes->Words()); + insert(debug->Words()); + insert(annotations->Words()); + insert(declarations->Words()); + insert(global_variables->Words()); + insert(code->Words()); + + return words; +} + +void Module::PatchDeferredPhi(const std::function& func) { + for (const u32 phi_index : deferred_phi_nodes) { + const u32 first_word = code->Value(phi_index); + [[maybe_unused]] const spv::Op op = static_cast(first_word & 0xffff); + assert(op == spv::Op::OpPhi); + const u32 num_words = first_word >> 16; + const u32 num_args = (num_words - 3) / 2; + u32 cursor = phi_index + 3; + for (u32 arg = 0; arg < num_args; ++arg, cursor += 2) { + code->SetValue(cursor, func(arg).value); + } } - - Op memory_model_ref{spv::Op::OpMemoryModel}; - memory_model_ref.Add(static_cast(addressing_model)); - memory_model_ref.Add(static_cast(memory_model)); - memory_model_ref.Write(stream); - - WriteSet(stream, entry_points); - WriteSet(stream, execution_modes); - WriteSet(stream, debug); - WriteSet(stream, annotations); - WriteSet(stream, declarations); - WriteSet(stream, global_variables); - WriteSet(stream, code); - - return bytes; } void Module::AddExtension(std::string extension_name) { @@ -74,76 +90,53 @@ void Module::AddCapability(spv::Capability capability) { capabilities.insert(capability); } -void Module::SetMemoryModel(spv::AddressingModel addressing_model_, spv::MemoryModel memory_model_) { - this->addressing_model = addressing_model_; - this->memory_model = memory_model_; +void Module::SetMemoryModel(spv::AddressingModel addressing_model_, + spv::MemoryModel memory_model_) { + addressing_model = addressing_model_; + memory_model = memory_model_; } void Module::AddEntryPoint(spv::ExecutionModel execution_model, Id entry_point, - std::string name, const std::vector& interfaces) { - auto op{std::make_unique(spv::Op::OpEntryPoint)}; - op->Add(static_cast(execution_model)); - op->Add(entry_point); - op->Add(std::move(name)); - op->Add(interfaces); - entry_points.push_back(std::move(op)); + std::string_view name, std::span interfaces) { + entry_points->Reserve(4 + WordsInString(name) + interfaces.size()); + *entry_points << spv::Op::OpEntryPoint << execution_model << entry_point << name << interfaces + << EndOp{}; } void Module::AddExecutionMode(Id entry_point, spv::ExecutionMode mode, - const std::vector& literals) { - auto op{std::make_unique(spv::Op::OpExecutionMode)}; - op->Add(entry_point); - op->Add(static_cast(mode)); - op->Add(literals); - execution_modes.push_back(std::move(op)); + std::span literals) { + execution_modes->Reserve(3 + literals.size()); + *execution_modes << spv::Op::OpExecutionMode << entry_point << mode << literals << EndOp{}; } Id Module::AddLabel(Id label) { - assert(label != nullptr); - return code.emplace_back(label); + assert(label.value != 0); + code->Reserve(2); + *code << MakeWord0(spv::Op::OpLabel, 2) << label.value; + return label; } -Id Module::AddLocalVariable(Id variable) { - assert(variable != nullptr); - return code.emplace_back(variable); +Id Module::AddLocalVariable(Id result_type, spv::StorageClass storage_class, + std::optional initializer) { + code->Reserve(5); + return *code << OpId{spv::Op::OpVariable, result_type} << storage_class << initializer + << EndOp{}; } -Id Module::AddGlobalVariable(Id variable) { - assert(variable); - return global_variables.emplace_back(variable); -} - -Id Module::AddCode(std::unique_ptr op) { - const Id id = code_store.emplace_back(std::move(op)).get(); - return code.emplace_back(id); -} - -Id Module::AddCode(spv::Op opcode, std::optional id) { - return AddCode(std::make_unique(opcode, id)); -} - -Id Module::AddDeclaration(std::unique_ptr op) { - const auto& found{std::find_if(declarations.begin(), declarations.end(), - [&op](const auto& other) { return *other == *op; })}; - if (found != declarations.end()) { - return found->get(); - } - const auto id = op.get(); - declarations.push_back(std::move(op)); - bound++; - return id; -} - -void Module::AddAnnotation(std::unique_ptr op) { - annotations.push_back(std::move(op)); +Id Module::AddGlobalVariable(Id result_type, spv::StorageClass storage_class, + std::optional initializer) { + global_variables->Reserve(5); + return *global_variables << OpId{spv::Op::OpVariable, result_type} << storage_class + << initializer << EndOp{}; } Id Module::GetGLSLstd450() { if (!glsl_std_450) { - glsl_std_450 = std::make_unique(spv::Op::OpExtInstImport, bound++); - glsl_std_450->Add("GLSL.std.450"); + ext_inst_imports->Reserve(3 + 4); + glsl_std_450 = *ext_inst_imports << OpId{spv::Op::OpExtInstImport} << "GLSL.std.450" + << EndOp{}; } - return glsl_std_450.get(); + return *glsl_std_450; } } // namespace Sirit diff --git a/externals/sirit/src/stream.cpp b/externals/sirit/src/stream.cpp index 32bafbd38..e69de29bb 100755 --- a/externals/sirit/src/stream.cpp +++ b/externals/sirit/src/stream.cpp @@ -1,51 +0,0 @@ -/* This file is part of the sirit project. - * Copyright (c) 2019 sirit - * This software may be used and distributed according to the terms of the - * 3-Clause BSD License - */ - -#include "stream.h" - -namespace Sirit { - -Stream::Stream(std::vector& words_) : words{words_} {} - -Stream::~Stream() = default; - -void Stream::Write(std::string_view string) { - constexpr std::size_t word_size = 4; - const auto size = string.size(); - const auto read = [string, size](std::size_t offset) { - return offset < size ? static_cast(string[offset]) : u8(0); - }; - - words.reserve(words.size() + size / word_size + 1); - for (std::size_t i = 0; i < size; i += word_size) { - Write(read(i), read(i + 1), read(i + 2), read(i + 3)); - } - if (size % word_size == 0) { - Write(u32(0)); - } -} - -void Stream::Write(u64 value) { - const u32 dword[] = {static_cast(value), static_cast(value >> 32)}; - words.insert(std::begin(words), std::cbegin(dword), std::cend(dword)); -} - -void Stream::Write(u32 value) { - words.push_back(value); -} - -void Stream::Write(u16 first, u16 second) { - const u32 word = static_cast(first) | static_cast(second) << 16; - Write(word); -} - -void Stream::Write(u8 first, u8 second, u8 third, u8 fourth) { - const u32 word = static_cast(first) | static_cast(second) << 8 | - static_cast(third) << 16 | static_cast(fourth) << 24; - Write(word); -} - -} // namespace Sirit diff --git a/externals/sirit/src/stream.h b/externals/sirit/src/stream.h index b7ec87229..ee3e7b8b6 100755 --- a/externals/sirit/src/stream.h +++ b/externals/sirit/src/stream.h @@ -6,29 +6,260 @@ #pragma once +#include +#include +#include +#include +#include #include +#include +#include #include + +#ifndef __cpp_lib_bit_cast +#include +#endif + +#include + #include "common_types.h" namespace Sirit { +class Declarations; + +struct OpId { + OpId(spv::Op opcode_) : opcode{opcode_} {} + OpId(spv::Op opcode_, Id result_type_) : opcode{opcode_}, result_type{result_type_} { + assert(result_type.value != 0); + } + + spv::Op opcode{}; + Id result_type{}; +}; + +struct EndOp {}; + +inline size_t WordsInString(std::string_view string) { + return string.size() / sizeof(u32) + 1; +} + +inline void InsertStringView(std::vector& words, size_t& insert_index, + std::string_view string) { + const size_t size = string.size(); + const auto read = [string, size](size_t offset) { + return offset < size ? static_cast(string[offset]) : 0u; + }; + + for (size_t i = 0; i < size; i += sizeof(u32)) { + words[insert_index++] = read(i) | read(i + 1) << 8 | read(i + 2) << 16 | read(i + 3) << 24; + } + if (size % sizeof(u32) == 0) { + words[insert_index++] = 0; + } +} + class Stream { + friend Declarations; + public: - explicit Stream(std::vector& words); - ~Stream(); + explicit Stream(u32* bound_) : bound{bound_} {} - void Write(std::string_view string); + void Reserve(size_t num_words) { + if (insert_index + num_words <= words.size()) { + return; + } + words.resize(insert_index + num_words); + } - void Write(u64 value); + std::span Words() const noexcept { + return std::span(words.data(), insert_index); + } - void Write(u32 value); + u32 LocalAddress() const noexcept { + return static_cast(words.size()); + } - void Write(u16 first, u16 second); + u32 Value(u32 index) const noexcept { + return words[index]; + } - void Write(u8 first, u8 second, u8 third, u8 fourth); + void SetValue(u32 index, u32 value) noexcept { + words[index] = value; + } + + Stream& operator<<(spv::Op op) { + op_index = insert_index; + words[insert_index++] = static_cast(op); + return *this; + } + + Stream& operator<<(OpId op) { + op_index = insert_index; + words[insert_index++] = static_cast(op.opcode); + if (op.result_type.value != 0) { + words[insert_index++] = op.result_type.value; + } + words[insert_index++] = ++*bound; + return *this; + } + + Id operator<<(EndOp) { + const size_t num_words = insert_index - op_index; + words[op_index] |= static_cast(num_words) << 16; + return Id{*bound}; + } + + Stream& operator<<(u32 value) { + words[insert_index++] = value; + return *this; + } + + Stream& operator<<(s32 value) { + return *this << static_cast(value); + } + + Stream& operator<<(u64 value) { + return *this << static_cast(value) << static_cast(value >> 32); + } + + Stream& operator<<(s64 value) { + return *this << static_cast(value); + } + + Stream& operator<<(float value) { +#ifdef __cpp_lib_bit_cast + return *this << std::bit_cast(value); +#else + static_assert(sizeof(float) == sizeof(u32)); + u32 int_value; + std::memcpy(&int_value, &value, sizeof(int_value)); + return *this << int_value; +#endif + } + + Stream& operator<<(double value) { +#ifdef __cpp_lib_bit_cast + return *this << std::bit_cast(value); +#else + static_assert(sizeof(double) == sizeof(u64)); + u64 int_value; + std::memcpy(&int_value, &value, sizeof(int_value)); + return *this << int_value; +#endif + } + + Stream& operator<<(bool value) { + return *this << static_cast(value ? 1 : 0); + } + + Stream& operator<<(Id value) { + assert(value.value != 0); + return *this << value.value; + } + + Stream& operator<<(const Literal& literal) { + std::visit([this](auto value) { *this << value; }, literal); + return *this; + } + + Stream& operator<<(std::string_view string) { + InsertStringView(words, insert_index, string); + return *this; + } + + Stream& operator<<(const char* string) { + return *this << std::string_view{string}; + } + + template + requires std::is_enum_v Stream& operator<<(T value) { + static_assert(sizeof(T) == sizeof(u32)); + return *this << static_cast(value); + } + + template + Stream& operator<<(std::optional value) { + if (value) { + *this << *value; + } + return *this; + } + + template + Stream& operator<<(std::span values) { + for (const auto& value : values) { + *this << value; + } + return *this; + } private: - std::vector& words; + u32* bound = nullptr; + std::vector words; + size_t insert_index = 0; + size_t op_index = 0; +}; + +class Declarations { +public: + explicit Declarations(u32* bound) : stream{bound} {} + + void Reserve(size_t num_words) { + return stream.Reserve(num_words); + } + + std::span Words() const noexcept { + return stream.Words(); + } + + template + Declarations& operator<<(const T& value) { + stream << value; + return *this; + } + + // Declarations without an id don't exist + Declarations& operator<<(spv::Op) = delete; + + Declarations& operator<<(OpId op) { + id_index = op.result_type.value != 0 ? 2 : 1; + stream << op; + return *this; + } + + Id operator<<(EndOp) { + const auto begin = stream.words.data(); + std::vector declarations(begin + stream.op_index, begin + stream.insert_index); + + // Normalize result id for lookups + const u32 id = std::exchange(declarations[id_index], 0); + + const auto [entry, inserted] = existing_declarations.emplace(declarations, id); + if (inserted) { + return stream << EndOp{}; + } + // If the declaration already exists, undo the operation + stream.insert_index = stream.op_index; + --*stream.bound; + + return Id{entry->second}; + } + +private: + struct HashVector { + size_t operator()(const std::vector& vector) const noexcept { + size_t hash = std::hash{}(vector.size()); + for (const u32 value : vector) { + hash ^= std::hash{}(value); + } + return hash; + } + }; + + Stream stream; + std::unordered_map, u32, HashVector> existing_declarations; + size_t id_index = 0; }; } // namespace Sirit diff --git a/externals/sirit/tests/main.cpp b/externals/sirit/tests/main.cpp index 407dcd5c5..d119c2533 100755 --- a/externals/sirit/tests/main.cpp +++ b/externals/sirit/tests/main.cpp @@ -7,6 +7,7 @@ #include #include #include + #include class MyModule : public Sirit::Module { @@ -31,9 +32,9 @@ public: const auto gl_per_vertex_ptr = Name(TypePointer(spv::StorageClass::Output, gl_per_vertex), "out_gl_PerVertex"); - const auto in_pos = Name(OpVariable(in_float4, spv::StorageClass::Input), "in_pos"); + const auto in_pos = Name(AddGlobalVariable(in_float4, spv::StorageClass::Input), "in_pos"); const auto per_vertex = - Name(OpVariable(gl_per_vertex_ptr, spv::StorageClass::Output), "per_vertex"); + Name(AddGlobalVariable(gl_per_vertex_ptr, spv::StorageClass::Output), "per_vertex"); Decorate(in_pos, spv::Decoration::Location, 0); Decorate(gl_per_vertex, spv::Decoration::Block); @@ -41,9 +42,6 @@ public: MemberDecorate(gl_per_vertex, 0, spv::Decoration::BuiltIn, static_cast(spv::BuiltIn::Position)); - AddGlobalVariable(in_pos); - AddGlobalVariable(per_vertex); - const auto main_func = Name( OpFunction(t_void, spv::FunctionControlMask::MaskNone, TypeFunction(t_void)), "main"); AddLabel(); @@ -57,8 +55,8 @@ public: auto tmp_position = OpUndef(float4); tmp_position = OpCompositeInsert(float4, pos_x, tmp_position, 0); tmp_position = OpCompositeInsert(float4, pos_y, tmp_position, 1); - tmp_position = OpCompositeInsert(float4, Constant(t_float, 0.f), tmp_position, 2); - tmp_position = OpCompositeInsert(float4, Constant(t_float, 1.f), tmp_position, 3); + tmp_position = OpCompositeInsert(float4, Constant(t_float, 0.0f), tmp_position, 2); + tmp_position = OpCompositeInsert(float4, Constant(t_float, 1.0f), tmp_position, 3); const auto gl_position = OpAccessChain(out_float4, per_vertex, Constant(t_uint, 0u)); OpStore(gl_position, tmp_position); @@ -123,7 +121,8 @@ static constexpr std::uint8_t expected_binary[] = { 0x1b, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x41, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x1b, 0x00, 0x00, 0x00, - 0xfd, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00}; + 0xfd, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00, +}; int main(int argc, char** argv) { MyModule module; diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index f8ec8fea8..6e66dc1df 100755 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -142,6 +142,7 @@ add_subdirectory(core) add_subdirectory(audio_core) add_subdirectory(video_core) add_subdirectory(input_common) +add_subdirectory(shader_recompiler) add_subdirectory(tests) if (ENABLE_SDL2) diff --git a/src/common/CMakeLists.txt b/src/common/CMakeLists.txt index e03fffd8d..c92266a17 100755 --- a/src/common/CMakeLists.txt +++ b/src/common/CMakeLists.txt @@ -32,61 +32,7 @@ add_custom_command(OUTPUT scm_rev.cpp DEPENDS # WARNING! It was too much work to try and make a common location for this list, # so if you need to change it, please update CMakeModules/GenerateSCMRev.cmake as well - "${VIDEO_CORE}/renderer_opengl/gl_arb_decompiler.cpp" - "${VIDEO_CORE}/renderer_opengl/gl_arb_decompiler.h" - "${VIDEO_CORE}/renderer_opengl/gl_shader_cache.cpp" - "${VIDEO_CORE}/renderer_opengl/gl_shader_cache.h" - "${VIDEO_CORE}/renderer_opengl/gl_shader_decompiler.cpp" - "${VIDEO_CORE}/renderer_opengl/gl_shader_decompiler.h" - "${VIDEO_CORE}/renderer_opengl/gl_shader_disk_cache.cpp" - "${VIDEO_CORE}/renderer_opengl/gl_shader_disk_cache.h" - "${VIDEO_CORE}/shader/decode/arithmetic.cpp" - "${VIDEO_CORE}/shader/decode/arithmetic_half.cpp" - "${VIDEO_CORE}/shader/decode/arithmetic_half_immediate.cpp" - "${VIDEO_CORE}/shader/decode/arithmetic_immediate.cpp" - "${VIDEO_CORE}/shader/decode/arithmetic_integer.cpp" - "${VIDEO_CORE}/shader/decode/arithmetic_integer_immediate.cpp" - "${VIDEO_CORE}/shader/decode/bfe.cpp" - "${VIDEO_CORE}/shader/decode/bfi.cpp" - "${VIDEO_CORE}/shader/decode/conversion.cpp" - "${VIDEO_CORE}/shader/decode/ffma.cpp" - "${VIDEO_CORE}/shader/decode/float_set.cpp" - "${VIDEO_CORE}/shader/decode/float_set_predicate.cpp" - "${VIDEO_CORE}/shader/decode/half_set.cpp" - "${VIDEO_CORE}/shader/decode/half_set_predicate.cpp" - "${VIDEO_CORE}/shader/decode/hfma2.cpp" - "${VIDEO_CORE}/shader/decode/image.cpp" - "${VIDEO_CORE}/shader/decode/integer_set.cpp" - "${VIDEO_CORE}/shader/decode/integer_set_predicate.cpp" - "${VIDEO_CORE}/shader/decode/memory.cpp" - "${VIDEO_CORE}/shader/decode/texture.cpp" - "${VIDEO_CORE}/shader/decode/other.cpp" - "${VIDEO_CORE}/shader/decode/predicate_set_predicate.cpp" - "${VIDEO_CORE}/shader/decode/predicate_set_register.cpp" - "${VIDEO_CORE}/shader/decode/register_set_predicate.cpp" - "${VIDEO_CORE}/shader/decode/shift.cpp" - "${VIDEO_CORE}/shader/decode/video.cpp" - "${VIDEO_CORE}/shader/decode/warp.cpp" - "${VIDEO_CORE}/shader/decode/xmad.cpp" - "${VIDEO_CORE}/shader/ast.cpp" - "${VIDEO_CORE}/shader/ast.h" - "${VIDEO_CORE}/shader/compiler_settings.cpp" - "${VIDEO_CORE}/shader/compiler_settings.h" - "${VIDEO_CORE}/shader/control_flow.cpp" - "${VIDEO_CORE}/shader/control_flow.h" - "${VIDEO_CORE}/shader/decode.cpp" - "${VIDEO_CORE}/shader/expr.cpp" - "${VIDEO_CORE}/shader/expr.h" - "${VIDEO_CORE}/shader/node.h" - "${VIDEO_CORE}/shader/node_helper.cpp" - "${VIDEO_CORE}/shader/node_helper.h" - "${VIDEO_CORE}/shader/registry.cpp" - "${VIDEO_CORE}/shader/registry.h" - "${VIDEO_CORE}/shader/shader_ir.cpp" - "${VIDEO_CORE}/shader/shader_ir.h" - "${VIDEO_CORE}/shader/track.cpp" - "${VIDEO_CORE}/shader/transform_feedback.cpp" - "${VIDEO_CORE}/shader/transform_feedback.h" + # ... # and also check that the scm_rev files haven't changed "${CMAKE_CURRENT_SOURCE_DIR}/scm_rev.cpp.in" "${CMAKE_CURRENT_SOURCE_DIR}/scm_rev.h" diff --git a/src/common/logging/filter.cpp b/src/common/logging/filter.cpp index 4f2cc29e1..f055f0e11 100755 --- a/src/common/logging/filter.cpp +++ b/src/common/logging/filter.cpp @@ -144,6 +144,10 @@ bool ParseFilterRule(Filter& instance, Iterator begin, Iterator end) { SUB(Render, Software) \ SUB(Render, OpenGL) \ SUB(Render, Vulkan) \ + CLS(Shader) \ + SUB(Shader, SPIRV) \ + SUB(Shader, GLASM) \ + SUB(Shader, GLSL) \ CLS(Audio) \ SUB(Audio, DSP) \ SUB(Audio, Sink) \ diff --git a/src/common/logging/types.h b/src/common/logging/types.h index 88b0e9c01..7ad0334fc 100755 --- a/src/common/logging/types.h +++ b/src/common/logging/types.h @@ -114,6 +114,10 @@ enum class Class : u8 { Render_Software, ///< Software renderer backend Render_OpenGL, ///< OpenGL backend Render_Vulkan, ///< Vulkan backend + Shader, ///< Shader recompiler + Shader_SPIRV, ///< Shader SPIR-V code generation + Shader_GLASM, ///< Shader GLASM code generation + Shader_GLSL, ///< Shader GLSL code generation Audio, ///< Audio emulation Audio_DSP, ///< The HLE implementation of the DSP Audio_Sink, ///< Emulator audio output backend diff --git a/src/common/settings.cpp b/src/common/settings.cpp index e1973af85..38667351a 100755 --- a/src/common/settings.cpp +++ b/src/common/settings.cpp @@ -57,7 +57,7 @@ void LogSettings() { log_setting("Renderer_UseNvdecEmulation", values.use_nvdec_emulation.GetValue()); log_setting("Renderer_AccelerateASTC", values.accelerate_astc.GetValue()); log_setting("Renderer_UseVsync", values.use_vsync.GetValue()); - log_setting("Renderer_UseAssemblyShaders", values.use_assembly_shaders.GetValue()); + log_setting("Renderer_ShaderBackend", values.shader_backend.GetValue()); log_setting("Renderer_UseAsynchronousShaders", values.use_asynchronous_shaders.GetValue()); log_setting("Renderer_UseGarbageCollection", values.use_caches_gc.GetValue()); log_setting("Renderer_AnisotropicFilteringLevel", values.max_anisotropy.GetValue()); @@ -140,7 +140,7 @@ void RestoreGlobalState(bool is_powered_on) { values.use_nvdec_emulation.SetGlobal(true); values.accelerate_astc.SetGlobal(true); values.use_vsync.SetGlobal(true); - values.use_assembly_shaders.SetGlobal(true); + values.shader_backend.SetGlobal(true); values.use_asynchronous_shaders.SetGlobal(true); values.use_fast_gpu_time.SetGlobal(true); values.use_caches_gc.SetGlobal(true); diff --git a/src/common/settings.h b/src/common/settings.h index d2e91a2c9..14c511033 100755 --- a/src/common/settings.h +++ b/src/common/settings.h @@ -24,6 +24,12 @@ enum class RendererBackend : u32 { Vulkan = 1, }; +enum class ShaderBackend : u32 { + GLSL = 0, + GLASM = 1, + SPIRV = 2, +}; + enum class GPUAccuracy : u32 { Normal = 0, High = 1, @@ -308,6 +314,9 @@ struct Values { // Renderer Setting renderer_backend{RendererBackend::OpenGL, "backend"}; BasicSetting renderer_debug{false, "debug"}; + BasicSetting enable_nsight_aftermath{false, "nsight_aftermath"}; + BasicSetting disable_shader_loop_safety_checks{false, + "disable_shader_loop_safety_checks"}; Setting vulkan_device{0, "vulkan_device"}; Setting resolution_factor{1, "resolution_factor"}; @@ -331,7 +340,7 @@ struct Values { Setting accelerate_astc{true, "accelerate_astc"}; Setting use_vsync{true, "use_vsync"}; Setting disable_fps_limit{false, "disable_fps_limit"}; - Setting use_assembly_shaders{false, "use_assembly_shaders"}; + Setting shader_backend{ShaderBackend::GLASM, "shader_backend"}; Setting use_asynchronous_shaders{false, "use_asynchronous_shaders"}; Setting use_fast_gpu_time{true, "use_fast_gpu_time"}; Setting use_caches_gc{false, "use_caches_gc"}; diff --git a/src/common/thread_worker.h b/src/common/thread_worker.h index 8272985ff..cd0017726 100755 --- a/src/common/thread_worker.h +++ b/src/common/thread_worker.h @@ -5,6 +5,7 @@ #pragma once #include +#include #include #include #include @@ -39,7 +40,7 @@ public: const auto lambda = [this, func](std::stop_token stop_token) { Common::SetCurrentThreadName(thread_name.c_str()); { - std::conditional_t state{func()}; + [[maybe_unused]] std::conditional_t state{func()}; while (!stop_token.stop_requested()) { Task task; { diff --git a/src/core/telemetry_session.cpp b/src/core/telemetry_session.cpp index 066cb23e4..422de3a7d 100755 --- a/src/core/telemetry_session.cpp +++ b/src/core/telemetry_session.cpp @@ -233,8 +233,8 @@ void TelemetrySession::AddInitialInfo(Loader::AppLoader& app_loader, Settings::values.use_nvdec_emulation.GetValue()); AddField(field_type, "Renderer_AccelerateASTC", Settings::values.accelerate_astc.GetValue()); AddField(field_type, "Renderer_UseVsync", Settings::values.use_vsync.GetValue()); - AddField(field_type, "Renderer_UseAssemblyShaders", - Settings::values.use_assembly_shaders.GetValue()); + AddField(field_type, "Renderer_ShaderBackend", + static_cast(Settings::values.shader_backend.GetValue())); AddField(field_type, "Renderer_UseAsynchronousShaders", Settings::values.use_asynchronous_shaders.GetValue()); AddField(field_type, "System_UseDockedMode", Settings::values.use_docked_mode.GetValue()); diff --git a/src/shader_recompiler/CMakeLists.txt b/src/shader_recompiler/CMakeLists.txt new file mode 100755 index 000000000..b5b7e5e83 --- /dev/null +++ b/src/shader_recompiler/CMakeLists.txt @@ -0,0 +1,268 @@ +add_library(shader_recompiler STATIC + backend/bindings.h + backend/glasm/emit_context.cpp + backend/glasm/emit_context.h + backend/glasm/emit_glasm.cpp + backend/glasm/emit_glasm.h + backend/glasm/emit_glasm_barriers.cpp + backend/glasm/emit_glasm_bitwise_conversion.cpp + backend/glasm/emit_glasm_composite.cpp + backend/glasm/emit_glasm_context_get_set.cpp + backend/glasm/emit_glasm_control_flow.cpp + backend/glasm/emit_glasm_convert.cpp + backend/glasm/emit_glasm_floating_point.cpp + backend/glasm/emit_glasm_image.cpp + backend/glasm/emit_glasm_instructions.h + backend/glasm/emit_glasm_integer.cpp + backend/glasm/emit_glasm_logical.cpp + backend/glasm/emit_glasm_memory.cpp + backend/glasm/emit_glasm_not_implemented.cpp + backend/glasm/emit_glasm_select.cpp + backend/glasm/emit_glasm_shared_memory.cpp + backend/glasm/emit_glasm_special.cpp + backend/glasm/emit_glasm_undefined.cpp + backend/glasm/emit_glasm_warp.cpp + backend/glasm/reg_alloc.cpp + backend/glasm/reg_alloc.h + backend/glsl/emit_context.cpp + backend/glsl/emit_context.h + backend/glsl/emit_glsl.cpp + backend/glsl/emit_glsl.h + backend/glsl/emit_glsl_atomic.cpp + backend/glsl/emit_glsl_barriers.cpp + backend/glsl/emit_glsl_bitwise_conversion.cpp + backend/glsl/emit_glsl_composite.cpp + backend/glsl/emit_glsl_context_get_set.cpp + backend/glsl/emit_glsl_control_flow.cpp + backend/glsl/emit_glsl_convert.cpp + backend/glsl/emit_glsl_floating_point.cpp + backend/glsl/emit_glsl_image.cpp + backend/glsl/emit_glsl_instructions.h + backend/glsl/emit_glsl_integer.cpp + backend/glsl/emit_glsl_logical.cpp + backend/glsl/emit_glsl_memory.cpp + backend/glsl/emit_glsl_not_implemented.cpp + backend/glsl/emit_glsl_select.cpp + backend/glsl/emit_glsl_shared_memory.cpp + backend/glsl/emit_glsl_special.cpp + backend/glsl/emit_glsl_undefined.cpp + backend/glsl/emit_glsl_warp.cpp + backend/glsl/var_alloc.cpp + backend/glsl/var_alloc.h + backend/spirv/emit_context.cpp + backend/spirv/emit_context.h + backend/spirv/emit_spirv.cpp + backend/spirv/emit_spirv.h + backend/spirv/emit_spirv_atomic.cpp + backend/spirv/emit_spirv_barriers.cpp + backend/spirv/emit_spirv_bitwise_conversion.cpp + backend/spirv/emit_spirv_composite.cpp + backend/spirv/emit_spirv_context_get_set.cpp + backend/spirv/emit_spirv_control_flow.cpp + backend/spirv/emit_spirv_convert.cpp + backend/spirv/emit_spirv_floating_point.cpp + backend/spirv/emit_spirv_image.cpp + backend/spirv/emit_spirv_image_atomic.cpp + backend/spirv/emit_spirv_instructions.h + backend/spirv/emit_spirv_integer.cpp + backend/spirv/emit_spirv_logical.cpp + backend/spirv/emit_spirv_memory.cpp + backend/spirv/emit_spirv_select.cpp + backend/spirv/emit_spirv_shared_memory.cpp + backend/spirv/emit_spirv_special.cpp + backend/spirv/emit_spirv_undefined.cpp + backend/spirv/emit_spirv_warp.cpp + environment.h + exception.h + frontend/ir/abstract_syntax_list.h + frontend/ir/attribute.cpp + frontend/ir/attribute.h + frontend/ir/basic_block.cpp + frontend/ir/basic_block.h + frontend/ir/breadth_first_search.h + frontend/ir/condition.cpp + frontend/ir/condition.h + frontend/ir/flow_test.cpp + frontend/ir/flow_test.h + frontend/ir/ir_emitter.cpp + frontend/ir/ir_emitter.h + frontend/ir/microinstruction.cpp + frontend/ir/modifiers.h + frontend/ir/opcodes.cpp + frontend/ir/opcodes.h + frontend/ir/opcodes.inc + frontend/ir/patch.cpp + frontend/ir/patch.h + frontend/ir/post_order.cpp + frontend/ir/post_order.h + frontend/ir/pred.h + frontend/ir/program.cpp + frontend/ir/program.h + frontend/ir/reg.h + frontend/ir/type.cpp + frontend/ir/type.h + frontend/ir/value.cpp + frontend/ir/value.h + frontend/maxwell/control_flow.cpp + frontend/maxwell/control_flow.h + frontend/maxwell/decode.cpp + frontend/maxwell/decode.h + frontend/maxwell/indirect_branch_table_track.cpp + frontend/maxwell/indirect_branch_table_track.h + frontend/maxwell/instruction.h + frontend/maxwell/location.h + frontend/maxwell/maxwell.inc + frontend/maxwell/opcodes.cpp + frontend/maxwell/opcodes.h + frontend/maxwell/structured_control_flow.cpp + frontend/maxwell/structured_control_flow.h + frontend/maxwell/translate/impl/atomic_operations_global_memory.cpp + frontend/maxwell/translate/impl/atomic_operations_shared_memory.cpp + frontend/maxwell/translate/impl/attribute_memory_to_physical.cpp + frontend/maxwell/translate/impl/barrier_operations.cpp + frontend/maxwell/translate/impl/bitfield_extract.cpp + frontend/maxwell/translate/impl/bitfield_insert.cpp + frontend/maxwell/translate/impl/branch_indirect.cpp + frontend/maxwell/translate/impl/common_encoding.h + frontend/maxwell/translate/impl/common_funcs.cpp + frontend/maxwell/translate/impl/common_funcs.h + frontend/maxwell/translate/impl/condition_code_set.cpp + frontend/maxwell/translate/impl/double_add.cpp + frontend/maxwell/translate/impl/double_compare_and_set.cpp + frontend/maxwell/translate/impl/double_fused_multiply_add.cpp + frontend/maxwell/translate/impl/double_min_max.cpp + frontend/maxwell/translate/impl/double_multiply.cpp + frontend/maxwell/translate/impl/double_set_predicate.cpp + frontend/maxwell/translate/impl/exit_program.cpp + frontend/maxwell/translate/impl/find_leading_one.cpp + frontend/maxwell/translate/impl/floating_point_add.cpp + frontend/maxwell/translate/impl/floating_point_compare.cpp + frontend/maxwell/translate/impl/floating_point_compare_and_set.cpp + frontend/maxwell/translate/impl/floating_point_conversion_floating_point.cpp + frontend/maxwell/translate/impl/floating_point_conversion_integer.cpp + frontend/maxwell/translate/impl/floating_point_fused_multiply_add.cpp + frontend/maxwell/translate/impl/floating_point_min_max.cpp + frontend/maxwell/translate/impl/floating_point_multi_function.cpp + frontend/maxwell/translate/impl/floating_point_multiply.cpp + frontend/maxwell/translate/impl/floating_point_range_reduction.cpp + frontend/maxwell/translate/impl/floating_point_set_predicate.cpp + frontend/maxwell/translate/impl/floating_point_swizzled_add.cpp + frontend/maxwell/translate/impl/half_floating_point_add.cpp + frontend/maxwell/translate/impl/half_floating_point_fused_multiply_add.cpp + frontend/maxwell/translate/impl/half_floating_point_helper.cpp + frontend/maxwell/translate/impl/half_floating_point_helper.h + frontend/maxwell/translate/impl/half_floating_point_multiply.cpp + frontend/maxwell/translate/impl/half_floating_point_set.cpp + frontend/maxwell/translate/impl/half_floating_point_set_predicate.cpp + frontend/maxwell/translate/impl/impl.cpp + frontend/maxwell/translate/impl/impl.h + frontend/maxwell/translate/impl/integer_add.cpp + frontend/maxwell/translate/impl/integer_add_three_input.cpp + frontend/maxwell/translate/impl/integer_compare.cpp + frontend/maxwell/translate/impl/integer_compare_and_set.cpp + frontend/maxwell/translate/impl/integer_floating_point_conversion.cpp + frontend/maxwell/translate/impl/integer_funnel_shift.cpp + frontend/maxwell/translate/impl/integer_minimum_maximum.cpp + frontend/maxwell/translate/impl/integer_popcount.cpp + frontend/maxwell/translate/impl/integer_scaled_add.cpp + frontend/maxwell/translate/impl/integer_set_predicate.cpp + frontend/maxwell/translate/impl/integer_shift_left.cpp + frontend/maxwell/translate/impl/integer_shift_right.cpp + frontend/maxwell/translate/impl/integer_short_multiply_add.cpp + frontend/maxwell/translate/impl/integer_to_integer_conversion.cpp + frontend/maxwell/translate/impl/internal_stage_buffer_entry_read.cpp + frontend/maxwell/translate/impl/load_constant.cpp + frontend/maxwell/translate/impl/load_constant.h + frontend/maxwell/translate/impl/load_effective_address.cpp + frontend/maxwell/translate/impl/load_store_attribute.cpp + frontend/maxwell/translate/impl/load_store_local_shared.cpp + frontend/maxwell/translate/impl/load_store_memory.cpp + frontend/maxwell/translate/impl/logic_operation.cpp + frontend/maxwell/translate/impl/logic_operation_three_input.cpp + frontend/maxwell/translate/impl/move_predicate_to_register.cpp + frontend/maxwell/translate/impl/move_register.cpp + frontend/maxwell/translate/impl/move_register_to_predicate.cpp + frontend/maxwell/translate/impl/move_special_register.cpp + frontend/maxwell/translate/impl/not_implemented.cpp + frontend/maxwell/translate/impl/output_geometry.cpp + frontend/maxwell/translate/impl/pixel_load.cpp + frontend/maxwell/translate/impl/predicate_set_predicate.cpp + frontend/maxwell/translate/impl/predicate_set_register.cpp + frontend/maxwell/translate/impl/select_source_with_predicate.cpp + frontend/maxwell/translate/impl/surface_atomic_operations.cpp + frontend/maxwell/translate/impl/surface_load_store.cpp + frontend/maxwell/translate/impl/texture_fetch.cpp + frontend/maxwell/translate/impl/texture_fetch_swizzled.cpp + frontend/maxwell/translate/impl/texture_gather.cpp + frontend/maxwell/translate/impl/texture_gather_swizzled.cpp + frontend/maxwell/translate/impl/texture_gradient.cpp + frontend/maxwell/translate/impl/texture_load.cpp + frontend/maxwell/translate/impl/texture_load_swizzled.cpp + frontend/maxwell/translate/impl/texture_mipmap_level.cpp + frontend/maxwell/translate/impl/texture_query.cpp + frontend/maxwell/translate/impl/video_helper.cpp + frontend/maxwell/translate/impl/video_helper.h + frontend/maxwell/translate/impl/video_minimum_maximum.cpp + frontend/maxwell/translate/impl/video_multiply_add.cpp + frontend/maxwell/translate/impl/video_set_predicate.cpp + frontend/maxwell/translate/impl/vote.cpp + frontend/maxwell/translate/impl/warp_shuffle.cpp + frontend/maxwell/translate/translate.cpp + frontend/maxwell/translate/translate.h + frontend/maxwell/translate_program.cpp + frontend/maxwell/translate_program.h + host_translate_info.h + ir_opt/collect_shader_info_pass.cpp + ir_opt/constant_propagation_pass.cpp + ir_opt/dead_code_elimination_pass.cpp + ir_opt/dual_vertex_pass.cpp + ir_opt/global_memory_to_storage_buffer_pass.cpp + ir_opt/identity_removal_pass.cpp + ir_opt/lower_fp16_to_fp32.cpp + ir_opt/lower_int64_to_int32.cpp + ir_opt/passes.h + ir_opt/ssa_rewrite_pass.cpp + ir_opt/texture_pass.cpp + ir_opt/verification_pass.cpp + object_pool.h + profile.h + program_header.h + runtime_info.h + shader_info.h + varying_state.h +) + +target_link_libraries(shader_recompiler PUBLIC common fmt::fmt sirit) + +if (MSVC) + target_compile_options(shader_recompiler PRIVATE + /W4 + /WX + /we4018 # 'expression' : signed/unsigned mismatch + /we4244 # 'argument' : conversion from 'type1' to 'type2', possible loss of data (floating-point) + /we4245 # 'conversion' : conversion from 'type1' to 'type2', signed/unsigned mismatch + /we4254 # 'operator': conversion from 'type1:field_bits' to 'type2:field_bits', possible loss of data + /we4267 # 'var' : conversion from 'size_t' to 'type', possible loss of data + /we4305 # 'context' : truncation from 'type1' to 'type2' + /we4800 # Implicit conversion from 'type' to bool. Possible information loss + /we4826 # Conversion from 'type1' to 'type2' is sign-extended. This may cause unexpected runtime behavior. + ) +else() + target_compile_options(shader_recompiler PRIVATE + -Werror + -Werror=conversion + -Werror=ignored-qualifiers + -Werror=implicit-fallthrough + -Werror=shadow + -Werror=sign-compare + $<$:-Werror=unused-but-set-parameter> + $<$:-Werror=unused-but-set-variable> + -Werror=unused-variable + + # Bracket depth determines maximum size of a fold expression in Clang since 9c9974c3ccb6. + # And this in turns limits the size of a std::array. + $<$:-fbracket-depth=1024> + ) +endif() + +create_target_directory_groups(shader_recompiler) diff --git a/src/shader_recompiler/backend/bindings.h b/src/shader_recompiler/backend/bindings.h new file mode 100755 index 000000000..35503000c --- /dev/null +++ b/src/shader_recompiler/backend/bindings.h @@ -0,0 +1,19 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/common_types.h" + +namespace Shader::Backend { + +struct Bindings { + u32 unified{}; + u32 uniform_buffer{}; + u32 storage_buffer{}; + u32 texture{}; + u32 image{}; +}; + +} // namespace Shader::Backend diff --git a/src/shader_recompiler/backend/glasm/emit_context.cpp b/src/shader_recompiler/backend/glasm/emit_context.cpp new file mode 100755 index 000000000..069c019ad --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_context.cpp @@ -0,0 +1,154 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/bindings.h" +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/profile.h" +#include "shader_recompiler/runtime_info.h" + +namespace Shader::Backend::GLASM { +namespace { +std::string_view InterpDecorator(Interpolation interp) { + switch (interp) { + case Interpolation::Smooth: + return ""; + case Interpolation::Flat: + return "FLAT "; + case Interpolation::NoPerspective: + return "NOPERSPECTIVE "; + } + throw InvalidArgument("Invalid interpolation {}", interp); +} + +bool IsInputArray(Stage stage) { + return stage == Stage::Geometry || stage == Stage::TessellationControl || + stage == Stage::TessellationEval; +} +} // Anonymous namespace + +EmitContext::EmitContext(IR::Program& program, Bindings& bindings, const Profile& profile_, + const RuntimeInfo& runtime_info_) + : info{program.info}, profile{profile_}, runtime_info{runtime_info_} { + // FIXME: Temporary partial implementation + u32 cbuf_index{}; + for (const auto& desc : info.constant_buffer_descriptors) { + if (desc.count != 1) { + throw NotImplementedException("Constant buffer descriptor array"); + } + Add("CBUFFER c{}[]={{program.buffer[{}]}};", desc.index, cbuf_index); + ++cbuf_index; + } + u32 ssbo_index{}; + for (const auto& desc : info.storage_buffers_descriptors) { + if (desc.count != 1) { + throw NotImplementedException("Storage buffer descriptor array"); + } + if (runtime_info.glasm_use_storage_buffers) { + Add("STORAGE ssbo{}[]={{program.storage[{}]}};", ssbo_index, bindings.storage_buffer); + ++bindings.storage_buffer; + ++ssbo_index; + } + } + if (!runtime_info.glasm_use_storage_buffers) { + if (const size_t num = info.storage_buffers_descriptors.size(); num > 0) { + Add("PARAM c[{}]={{program.local[0..{}]}};", num, num - 1); + } + } + stage = program.stage; + switch (program.stage) { + case Stage::VertexA: + case Stage::VertexB: + stage_name = "vertex"; + attrib_name = "vertex"; + break; + case Stage::TessellationControl: + case Stage::TessellationEval: + stage_name = "primitive"; + attrib_name = "primitive"; + break; + case Stage::Geometry: + stage_name = "primitive"; + attrib_name = "vertex"; + break; + case Stage::Fragment: + stage_name = "fragment"; + attrib_name = "fragment"; + break; + case Stage::Compute: + stage_name = "invocation"; + break; + } + const std::string_view attr_stage{stage == Stage::Fragment ? "fragment" : "vertex"}; + const VaryingState loads{info.loads.mask | info.passthrough.mask}; + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + if (loads.Generic(index)) { + Add("{}ATTRIB in_attr{}[]={{{}.attrib[{}..{}]}};", + InterpDecorator(info.interpolation[index]), index, attr_stage, index, index); + } + } + if (IsInputArray(stage) && loads.AnyComponent(IR::Attribute::PositionX)) { + Add("ATTRIB vertex_position=vertex.position;"); + } + if (info.uses_invocation_id) { + Add("ATTRIB primitive_invocation=primitive.invocation;"); + } + if (info.stores_tess_level_outer) { + Add("OUTPUT result_patch_tessouter[]={{result.patch.tessouter[0..3]}};"); + } + if (info.stores_tess_level_inner) { + Add("OUTPUT result_patch_tessinner[]={{result.patch.tessinner[0..1]}};"); + } + if (info.stores.ClipDistances()) { + Add("OUTPUT result_clip[]={{result.clip[0..7]}};"); + } + for (size_t index = 0; index < info.uses_patches.size(); ++index) { + if (!info.uses_patches[index]) { + continue; + } + if (stage == Stage::TessellationControl) { + Add("OUTPUT result_patch_attrib{}[]={{result.patch.attrib[{}..{}]}};" + "ATTRIB primitive_out_patch_attrib{}[]={{primitive.out.patch.attrib[{}..{}]}};", + index, index, index, index, index, index); + } else { + Add("ATTRIB primitive_patch_attrib{}[]={{primitive.patch.attrib[{}..{}]}};", index, + index, index); + } + } + if (stage == Stage::Fragment) { + Add("OUTPUT frag_color0=result.color;"); + for (size_t index = 1; index < info.stores_frag_color.size(); ++index) { + Add("OUTPUT frag_color{}=result.color[{}];", index, index); + } + } + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + if (info.stores.Generic(index)) { + Add("OUTPUT out_attr{}[]={{result.attrib[{}..{}]}};", index, index, index); + } + } + image_buffer_bindings.reserve(info.image_buffer_descriptors.size()); + for (const auto& desc : info.image_buffer_descriptors) { + image_buffer_bindings.push_back(bindings.image); + bindings.image += desc.count; + } + image_bindings.reserve(info.image_descriptors.size()); + for (const auto& desc : info.image_descriptors) { + image_bindings.push_back(bindings.image); + bindings.image += desc.count; + } + texture_buffer_bindings.reserve(info.texture_buffer_descriptors.size()); + for (const auto& desc : info.texture_buffer_descriptors) { + texture_buffer_bindings.push_back(bindings.texture); + bindings.texture += desc.count; + } + texture_bindings.reserve(info.texture_descriptors.size()); + for (const auto& desc : info.texture_descriptors) { + texture_bindings.push_back(bindings.texture); + bindings.texture += desc.count; + } +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_context.h b/src/shader_recompiler/backend/glasm/emit_context.h new file mode 100755 index 000000000..1da51a996 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_context.h @@ -0,0 +1,80 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include + +#include + +#include "shader_recompiler/backend/glasm/reg_alloc.h" +#include "shader_recompiler/stage.h" + +namespace Shader { +struct Info; +struct Profile; +struct RuntimeInfo; +} // namespace Shader + +namespace Shader::Backend { +struct Bindings; +} + +namespace Shader::IR { +class Inst; +struct Program; +} // namespace Shader::IR + +namespace Shader::Backend::GLASM { + +class EmitContext { +public: + explicit EmitContext(IR::Program& program, Bindings& bindings, const Profile& profile_, + const RuntimeInfo& runtime_info_); + + template + void Add(const char* format_str, IR::Inst& inst, Args&&... args) { + code += fmt::format(fmt::runtime(format_str), reg_alloc.Define(inst), + std::forward(args)...); + // TODO: Remove this + code += '\n'; + } + + template + void LongAdd(const char* format_str, IR::Inst& inst, Args&&... args) { + code += fmt::format(fmt::runtime(format_str), reg_alloc.LongDefine(inst), + std::forward(args)...); + // TODO: Remove this + code += '\n'; + } + + template + void Add(const char* format_str, Args&&... args) { + code += fmt::format(fmt::runtime(format_str), std::forward(args)...); + // TODO: Remove this + code += '\n'; + } + + std::string code; + RegAlloc reg_alloc{*this}; + const Info& info; + const Profile& profile; + const RuntimeInfo& runtime_info; + + std::vector texture_buffer_bindings; + std::vector image_buffer_bindings; + std::vector texture_bindings; + std::vector image_bindings; + + Stage stage{}; + std::string_view stage_name = "invalid"; + std::string_view attrib_name = "invalid"; + + u32 num_safety_loop_vars{}; + bool uses_y_direction{}; +}; + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm.cpp b/src/shader_recompiler/backend/glasm/emit_glasm.cpp new file mode 100755 index 000000000..64787b353 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm.cpp @@ -0,0 +1,489 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include + +#include "common/div_ceil.h" +#include "common/settings.h" +#include "shader_recompiler/backend/bindings.h" +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/profile.h" +#include "shader_recompiler/runtime_info.h" + +namespace Shader::Backend::GLASM { +namespace { +template +struct FuncTraits {}; + +template +struct FuncTraits { + using ReturnType = ReturnType_; + + static constexpr size_t NUM_ARGS = sizeof...(Args); + + template + using ArgType = std::tuple_element_t>; +}; + +template +struct Identity { + Identity(T data_) : data{data_} {} + + T Extract() { + return data; + } + + T data; +}; + +template +class RegWrapper { +public: + RegWrapper(EmitContext& ctx, const IR::Value& ir_value) : reg_alloc{ctx.reg_alloc} { + const Value value{reg_alloc.Peek(ir_value)}; + if (value.type == Type::Register) { + inst = ir_value.InstRecursive(); + reg = Register{value}; + } else { + reg = value.type == Type::U64 ? reg_alloc.AllocLongReg() : reg_alloc.AllocReg(); + } + switch (value.type) { + case Type::Register: + case Type::Void: + break; + case Type::U32: + ctx.Add("MOV.U {}.x,{};", reg, value.imm_u32); + break; + case Type::U64: + ctx.Add("MOV.U64 {}.x,{};", reg, value.imm_u64); + break; + } + } + + auto Extract() { + if (inst) { + reg_alloc.Unref(*inst); + } else { + reg_alloc.FreeReg(reg); + } + return std::conditional_t{Value{reg}}; + } + +private: + RegAlloc& reg_alloc; + IR::Inst* inst{}; + Register reg{}; +}; + +template +class ValueWrapper { +public: + ValueWrapper(EmitContext& ctx, const IR::Value& ir_value_) + : reg_alloc{ctx.reg_alloc}, ir_value{ir_value_}, value{reg_alloc.Peek(ir_value)} {} + + ArgType Extract() { + if (!ir_value.IsImmediate()) { + reg_alloc.Unref(*ir_value.InstRecursive()); + } + return value; + } + +private: + RegAlloc& reg_alloc; + const IR::Value& ir_value; + ArgType value; +}; + +template +auto Arg(EmitContext& ctx, const IR::Value& arg) { + if constexpr (std::is_same_v) { + return RegWrapper{ctx, arg}; + } else if constexpr (std::is_same_v) { + return RegWrapper{ctx, arg}; + } else if constexpr (std::is_base_of_v) { + return ValueWrapper{ctx, arg}; + } else if constexpr (std::is_same_v) { + return Identity{arg}; + } else if constexpr (std::is_same_v) { + return Identity{arg.U32()}; + } else if constexpr (std::is_same_v) { + return Identity{arg.Attribute()}; + } else if constexpr (std::is_same_v) { + return Identity{arg.Patch()}; + } else if constexpr (std::is_same_v) { + return Identity{arg.Reg()}; + } +} + +template +struct InvokeCall { + template + InvokeCall(EmitContext& ctx, IR::Inst* inst, Args&&... args) { + if constexpr (is_first_arg_inst) { + func(ctx, *inst, args.Extract()...); + } else { + func(ctx, args.Extract()...); + } + } +}; + +template +void Invoke(EmitContext& ctx, IR::Inst* inst, std::index_sequence) { + using Traits = FuncTraits; + if constexpr (is_first_arg_inst) { + InvokeCall{ + ctx, inst, Arg>(ctx, inst->Arg(I))...}; + } else { + InvokeCall{ + ctx, inst, Arg>(ctx, inst->Arg(I))...}; + } +} + +template +void Invoke(EmitContext& ctx, IR::Inst* inst) { + using Traits = FuncTraits; + static_assert(Traits::NUM_ARGS >= 1, "Insufficient arguments"); + if constexpr (Traits::NUM_ARGS == 1) { + Invoke(ctx, inst, std::make_index_sequence<0>{}); + } else { + using FirstArgType = typename Traits::template ArgType<1>; + static constexpr bool is_first_arg_inst = std::is_same_v; + using Indices = std::make_index_sequence; + Invoke(ctx, inst, Indices{}); + } +} + +void EmitInst(EmitContext& ctx, IR::Inst* inst) { + switch (inst->GetOpcode()) { +#define OPCODE(name, result_type, ...) \ + case IR::Opcode::name: \ + return Invoke<&Emit##name>(ctx, inst); +#include "shader_recompiler/frontend/ir/opcodes.inc" +#undef OPCODE + } + throw LogicError("Invalid opcode {}", inst->GetOpcode()); +} + +bool IsReference(IR::Inst& inst) { + return inst.GetOpcode() == IR::Opcode::Reference; +} + +void PrecolorInst(IR::Inst& phi) { + // Insert phi moves before references to avoid overwritting other phis + const size_t num_args{phi.NumArgs()}; + for (size_t i = 0; i < num_args; ++i) { + IR::Block& phi_block{*phi.PhiBlock(i)}; + auto it{std::find_if_not(phi_block.rbegin(), phi_block.rend(), IsReference).base()}; + IR::IREmitter ir{phi_block, it}; + const IR::Value arg{phi.Arg(i)}; + if (arg.IsImmediate()) { + ir.PhiMove(phi, arg); + } else { + ir.PhiMove(phi, IR::Value{&RegAlloc::AliasInst(*arg.Inst())}); + } + } + for (size_t i = 0; i < num_args; ++i) { + IR::IREmitter{*phi.PhiBlock(i)}.Reference(IR::Value{&phi}); + } +} + +void Precolor(const IR::Program& program) { + for (IR::Block* const block : program.blocks) { + for (IR::Inst& phi : block->Instructions() | std::views::take_while(IR::IsPhi)) { + PrecolorInst(phi); + } + } +} + +void EmitCode(EmitContext& ctx, const IR::Program& program) { + const auto eval{ + [&](const IR::U1& cond) { return ScalarS32{ctx.reg_alloc.Consume(IR::Value{cond})}; }}; + for (const IR::AbstractSyntaxNode& node : program.syntax_list) { + switch (node.type) { + case IR::AbstractSyntaxNode::Type::Block: + for (IR::Inst& inst : node.data.block->Instructions()) { + EmitInst(ctx, &inst); + } + break; + case IR::AbstractSyntaxNode::Type::If: + ctx.Add("MOV.S.CC RC,{};" + "IF NE.x;", + eval(node.data.if_node.cond)); + break; + case IR::AbstractSyntaxNode::Type::EndIf: + ctx.Add("ENDIF;"); + break; + case IR::AbstractSyntaxNode::Type::Loop: + ctx.Add("REP;"); + break; + case IR::AbstractSyntaxNode::Type::Repeat: + if (!Settings::values.disable_shader_loop_safety_checks) { + const u32 loop_index{ctx.num_safety_loop_vars++}; + const u32 vector_index{loop_index / 4}; + const char component{"xyzw"[loop_index % 4]}; + ctx.Add("SUB.S.CC loop{}.{},loop{}.{},1;" + "BRK(LT.{});", + vector_index, component, vector_index, component, component); + } + if (node.data.repeat.cond.IsImmediate()) { + if (node.data.repeat.cond.U1()) { + ctx.Add("ENDREP;"); + } else { + ctx.Add("BRK;" + "ENDREP;"); + } + } else { + ctx.Add("MOV.S.CC RC,{};" + "BRK(EQ.x);" + "ENDREP;", + eval(node.data.repeat.cond)); + } + break; + case IR::AbstractSyntaxNode::Type::Break: + if (node.data.break_node.cond.IsImmediate()) { + if (node.data.break_node.cond.U1()) { + ctx.Add("BRK;"); + } + } else { + ctx.Add("MOV.S.CC RC,{};" + "BRK (NE.x);", + eval(node.data.break_node.cond)); + } + break; + case IR::AbstractSyntaxNode::Type::Return: + case IR::AbstractSyntaxNode::Type::Unreachable: + ctx.Add("RET;"); + break; + } + } + if (!ctx.reg_alloc.IsEmpty()) { + LOG_WARNING(Shader_GLASM, "Register leak after generating code"); + } +} + +void SetupOptions(const IR::Program& program, const Profile& profile, + const RuntimeInfo& runtime_info, std::string& header) { + const Info& info{program.info}; + const Stage stage{program.stage}; + + // TODO: Track the shared atomic ops + header += "OPTION NV_internal;" + "OPTION NV_shader_storage_buffer;" + "OPTION NV_gpu_program_fp64;"; + if (info.uses_int64_bit_atomics) { + header += "OPTION NV_shader_atomic_int64;"; + } + if (info.uses_atomic_f32_add) { + header += "OPTION NV_shader_atomic_float;"; + } + if (info.uses_atomic_f16x2_add || info.uses_atomic_f16x2_min || info.uses_atomic_f16x2_max) { + header += "OPTION NV_shader_atomic_fp16_vector;"; + } + if (info.uses_subgroup_invocation_id || info.uses_subgroup_mask || info.uses_subgroup_vote || + info.uses_fswzadd) { + header += "OPTION NV_shader_thread_group;"; + } + if (info.uses_subgroup_shuffles) { + header += "OPTION NV_shader_thread_shuffle;"; + } + if (info.uses_sparse_residency) { + header += "OPTION EXT_sparse_texture2;"; + } + const bool stores_viewport_layer{info.stores[IR::Attribute::ViewportIndex] || + info.stores[IR::Attribute::Layer]}; + if ((stage != Stage::Geometry && stores_viewport_layer) || + info.stores[IR::Attribute::ViewportMask]) { + if (profile.support_viewport_index_layer_non_geometry) { + header += "OPTION NV_viewport_array2;"; + } + } + if (program.is_geometry_passthrough && profile.support_geometry_shader_passthrough) { + header += "OPTION NV_geometry_shader_passthrough;"; + } + if (info.uses_typeless_image_reads && profile.support_typeless_image_loads) { + header += "OPTION EXT_shader_image_load_formatted;"; + } + if (profile.support_derivative_control) { + header += "OPTION ARB_derivative_control;"; + } + if (stage == Stage::Fragment && runtime_info.force_early_z != 0) { + header += "OPTION NV_early_fragment_tests;"; + } + if (stage == Stage::Fragment) { + header += "OPTION ARB_draw_buffers;"; + } +} + +std::string_view StageHeader(Stage stage) { + switch (stage) { + case Stage::VertexA: + case Stage::VertexB: + return "!!NVvp5.0\n"; + case Stage::TessellationControl: + return "!!NVtcp5.0\n"; + case Stage::TessellationEval: + return "!!NVtep5.0\n"; + case Stage::Geometry: + return "!!NVgp5.0\n"; + case Stage::Fragment: + return "!!NVfp5.0\n"; + case Stage::Compute: + return "!!NVcp5.0\n"; + } + throw InvalidArgument("Invalid stage {}", stage); +} + +std::string_view InputPrimitive(InputTopology topology) { + switch (topology) { + case InputTopology::Points: + return "POINTS"; + case InputTopology::Lines: + return "LINES"; + case InputTopology::LinesAdjacency: + return "LINESS_ADJACENCY"; + case InputTopology::Triangles: + return "TRIANGLES"; + case InputTopology::TrianglesAdjacency: + return "TRIANGLES_ADJACENCY"; + } + throw InvalidArgument("Invalid input topology {}", topology); +} + +std::string_view OutputPrimitive(OutputTopology topology) { + switch (topology) { + case OutputTopology::PointList: + return "POINTS"; + case OutputTopology::LineStrip: + return "LINE_STRIP"; + case OutputTopology::TriangleStrip: + return "TRIANGLE_STRIP"; + } + throw InvalidArgument("Invalid output topology {}", topology); +} + +std::string_view GetTessMode(TessPrimitive primitive) { + switch (primitive) { + case TessPrimitive::Triangles: + return "TRIANGLES"; + case TessPrimitive::Quads: + return "QUADS"; + case TessPrimitive::Isolines: + return "ISOLINES"; + } + throw InvalidArgument("Invalid tessellation primitive {}", primitive); +} + +std::string_view GetTessSpacing(TessSpacing spacing) { + switch (spacing) { + case TessSpacing::Equal: + return "EQUAL"; + case TessSpacing::FractionalOdd: + return "FRACTIONAL_ODD"; + case TessSpacing::FractionalEven: + return "FRACTIONAL_EVEN"; + } + throw InvalidArgument("Invalid tessellation spacing {}", spacing); +} +} // Anonymous namespace + +std::string EmitGLASM(const Profile& profile, const RuntimeInfo& runtime_info, IR::Program& program, + Bindings& bindings) { + EmitContext ctx{program, bindings, profile, runtime_info}; + Precolor(program); + EmitCode(ctx, program); + std::string header{StageHeader(program.stage)}; + SetupOptions(program, profile, runtime_info, header); + switch (program.stage) { + case Stage::TessellationControl: + header += fmt::format("VERTICES_OUT {};", program.invocations); + break; + case Stage::TessellationEval: + header += fmt::format("TESS_MODE {};" + "TESS_SPACING {};" + "TESS_VERTEX_ORDER {};", + GetTessMode(runtime_info.tess_primitive), + GetTessSpacing(runtime_info.tess_spacing), + runtime_info.tess_clockwise ? "CW" : "CCW"); + break; + case Stage::Geometry: + header += fmt::format("PRIMITIVE_IN {};", InputPrimitive(runtime_info.input_topology)); + if (program.is_geometry_passthrough) { + if (profile.support_geometry_shader_passthrough) { + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + if (program.info.passthrough.Generic(index)) { + header += fmt::format("PASSTHROUGH result.attrib[{}];", index); + } + } + if (program.info.passthrough.AnyComponent(IR::Attribute::PositionX)) { + header += "PASSTHROUGH result.position;"; + } + } else { + LOG_WARNING(Shader_GLASM, "Passthrough geometry program used but not supported"); + } + } else { + header += + fmt::format("VERTICES_OUT {};" + "PRIMITIVE_OUT {};", + program.output_vertices, OutputPrimitive(program.output_topology)); + } + break; + case Stage::Compute: + header += fmt::format("GROUP_SIZE {} {} {};", program.workgroup_size[0], + program.workgroup_size[1], program.workgroup_size[2]); + break; + default: + break; + } + if (program.shared_memory_size > 0) { + header += fmt::format("SHARED_MEMORY {};", program.shared_memory_size); + header += fmt::format("SHARED shared_mem[]={{program.sharedmem}};"); + } + header += "TEMP "; + for (size_t index = 0; index < ctx.reg_alloc.NumUsedRegisters(); ++index) { + header += fmt::format("R{},", index); + } + if (program.local_memory_size > 0) { + header += fmt::format("lmem[{}],", program.local_memory_size); + } + if (program.info.uses_fswzadd) { + header += "FSWZA[4],FSWZB[4],"; + } + const u32 num_safety_loop_vectors{Common::DivCeil(ctx.num_safety_loop_vars, 4u)}; + for (u32 index = 0; index < num_safety_loop_vectors; ++index) { + header += fmt::format("loop{},", index); + } + header += "RC;" + "LONG TEMP "; + for (size_t index = 0; index < ctx.reg_alloc.NumUsedLongRegisters(); ++index) { + header += fmt::format("D{},", index); + } + header += "DC;"; + if (program.info.uses_fswzadd) { + header += "MOV.F FSWZA[0],-1;" + "MOV.F FSWZA[1],1;" + "MOV.F FSWZA[2],-1;" + "MOV.F FSWZA[3],0;" + "MOV.F FSWZB[0],-1;" + "MOV.F FSWZB[1],-1;" + "MOV.F FSWZB[2],1;" + "MOV.F FSWZB[3],-1;"; + } + for (u32 index = 0; index < num_safety_loop_vectors; ++index) { + header += fmt::format("MOV.S loop{},{{0x2000,0x2000,0x2000,0x2000}};", index); + } + if (ctx.uses_y_direction) { + header += "PARAM y_direction[1]={state.material.front.ambient};"; + } + ctx.code.insert(0, header); + ctx.code += "END"; + return ctx.code; +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm.h b/src/shader_recompiler/backend/glasm/emit_glasm.h new file mode 100755 index 000000000..bcb55f062 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm.h @@ -0,0 +1,25 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include "shader_recompiler/backend/bindings.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/profile.h" +#include "shader_recompiler/runtime_info.h" + +namespace Shader::Backend::GLASM { + +[[nodiscard]] std::string EmitGLASM(const Profile& profile, const RuntimeInfo& runtime_info, + IR::Program& program, Bindings& bindings); + +[[nodiscard]] inline std::string EmitGLASM(const Profile& profile, const RuntimeInfo& runtime_info, + IR::Program& program) { + Bindings binding; + return EmitGLASM(profile, runtime_info, program, binding); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_barriers.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_barriers.cpp new file mode 100755 index 000000000..e69de29bb diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_bitwise_conversion.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_bitwise_conversion.cpp new file mode 100755 index 000000000..9201ccd39 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_bitwise_conversion.cpp @@ -0,0 +1,91 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLASM { + +static void Alias(IR::Inst& inst, const IR::Value& value) { + if (value.IsImmediate()) { + return; + } + IR::Inst& value_inst{RegAlloc::AliasInst(*value.Inst())}; + value_inst.DestructiveAddUsage(inst.UseCount()); + value_inst.DestructiveRemoveUsage(); + inst.SetDefinition(value_inst.Definition()); +} + +void EmitIdentity(EmitContext&, IR::Inst& inst, const IR::Value& value) { + Alias(inst, value); +} + +void EmitConditionRef(EmitContext& ctx, IR::Inst& inst, const IR::Value& value) { + // Fake one usage to get a real register out of the condition + inst.DestructiveAddUsage(1); + const Register ret{ctx.reg_alloc.Define(inst)}; + const ScalarS32 input{ctx.reg_alloc.Consume(value)}; + if (ret != input) { + ctx.Add("MOV.S {},{};", ret, input); + } +} + +void EmitBitCastU16F16(EmitContext&, IR::Inst& inst, const IR::Value& value) { + Alias(inst, value); +} + +void EmitBitCastU32F32(EmitContext&, IR::Inst& inst, const IR::Value& value) { + Alias(inst, value); +} + +void EmitBitCastU64F64(EmitContext&, IR::Inst& inst, const IR::Value& value) { + Alias(inst, value); +} + +void EmitBitCastF16U16(EmitContext&, IR::Inst& inst, const IR::Value& value) { + Alias(inst, value); +} + +void EmitBitCastF32U32(EmitContext&, IR::Inst& inst, const IR::Value& value) { + Alias(inst, value); +} + +void EmitBitCastF64U64(EmitContext&, IR::Inst& inst, const IR::Value& value) { + Alias(inst, value); +} + +void EmitPackUint2x32(EmitContext& ctx, IR::Inst& inst, Register value) { + ctx.LongAdd("PK64.U {}.x,{};", inst, value); +} + +void EmitUnpackUint2x32(EmitContext& ctx, IR::Inst& inst, Register value) { + ctx.Add("UP64.U {}.xy,{}.x;", inst, value); +} + +void EmitPackFloat2x16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitUnpackFloat2x16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitPackHalf2x16(EmitContext& ctx, IR::Inst& inst, Register value) { + ctx.Add("PK2H {}.x,{};", inst, value); +} + +void EmitUnpackHalf2x16(EmitContext& ctx, IR::Inst& inst, Register value) { + ctx.Add("UP2H {}.xy,{}.x;", inst, value); +} + +void EmitPackDouble2x32(EmitContext& ctx, IR::Inst& inst, Register value) { + ctx.LongAdd("PK64 {}.x,{};", inst, value); +} + +void EmitUnpackDouble2x32(EmitContext& ctx, IR::Inst& inst, Register value) { + ctx.Add("UP64 {}.xy,{}.x;", inst, value); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_composite.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_composite.cpp new file mode 100755 index 000000000..bff0b7c1c --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_composite.cpp @@ -0,0 +1,244 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLASM { +namespace { +template +void CompositeConstruct(EmitContext& ctx, IR::Inst& inst, Values&&... elements) { + const Register ret{ctx.reg_alloc.Define(inst)}; + if (std::ranges::any_of(std::array{elements...}, + [](const IR::Value& value) { return value.IsImmediate(); })) { + using Type = std::invoke_result_t; + const std::array values{(elements.IsImmediate() ? (elements.*read_imm)() : 0)...}; + ctx.Add("MOV.{} {},{{{},{},{},{}}};", type, ret, fmt::to_string(values[0]), + fmt::to_string(values[1]), fmt::to_string(values[2]), fmt::to_string(values[3])); + } + size_t index{}; + for (const IR::Value& element : {elements...}) { + if (!element.IsImmediate()) { + const ScalarU32 value{ctx.reg_alloc.Consume(element)}; + ctx.Add("MOV.{} {}.{},{};", type, ret, "xyzw"[index], value); + } + ++index; + } +} + +void CompositeExtract(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index, char type) { + const Register ret{ctx.reg_alloc.Define(inst)}; + if (ret == composite && index == 0) { + // No need to do anything here, the source and destination are the same register + return; + } + ctx.Add("MOV.{} {}.x,{}.{};", type, ret, composite, "xyzw"[index]); +} + +template +void CompositeInsert(EmitContext& ctx, IR::Inst& inst, Register composite, ObjectType object, + u32 index, char type) { + const Register ret{ctx.reg_alloc.Define(inst)}; + const char swizzle{"xyzw"[index]}; + if (ret != composite && ret == object) { + // The object is aliased with the return value, so we have to use a temporary to insert + ctx.Add("MOV.{} RC,{};" + "MOV.{} RC.{},{};" + "MOV.{} {},RC;", + type, composite, type, swizzle, object, type, ret); + } else if (ret != composite) { + // The input composite is not aliased with the return value so we have to copy it before + // hand. But the insert object is not aliased with the return value, so we don't have to + // worry about that + ctx.Add("MOV.{} {},{};" + "MOV.{} {}.{},{};", + type, ret, composite, type, ret, swizzle, object); + } else { + // The return value is alised so we can just insert the object, it doesn't matter if it's + // aliased + ctx.Add("MOV.{} {}.{},{};", type, ret, swizzle, object); + } +} +} // Anonymous namespace + +void EmitCompositeConstructU32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2) { + CompositeConstruct<&IR::Value::U32, 'U'>(ctx, inst, e1, e2); +} + +void EmitCompositeConstructU32x3(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2, const IR::Value& e3) { + CompositeConstruct<&IR::Value::U32, 'U'>(ctx, inst, e1, e2, e3); +} + +void EmitCompositeConstructU32x4(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2, const IR::Value& e3, const IR::Value& e4) { + CompositeConstruct<&IR::Value::U32, 'U'>(ctx, inst, e1, e2, e3, e4); +} + +void EmitCompositeExtractU32x2(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index) { + CompositeExtract(ctx, inst, composite, index, 'U'); +} + +void EmitCompositeExtractU32x3(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index) { + CompositeExtract(ctx, inst, composite, index, 'U'); +} + +void EmitCompositeExtractU32x4(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index) { + CompositeExtract(ctx, inst, composite, index, 'U'); +} + +void EmitCompositeInsertU32x2([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, + [[maybe_unused]] ScalarU32 object, [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeInsertU32x3([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, + [[maybe_unused]] ScalarU32 object, [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeInsertU32x4([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, + [[maybe_unused]] ScalarU32 object, [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeConstructF16x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register e1, + [[maybe_unused]] Register e2) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeConstructF16x3([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register e1, + [[maybe_unused]] Register e2, [[maybe_unused]] Register e3) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeConstructF16x4([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register e1, + [[maybe_unused]] Register e2, [[maybe_unused]] Register e3, + [[maybe_unused]] Register e4) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeExtractF16x2([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeExtractF16x3([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeExtractF16x4([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeInsertF16x2([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, [[maybe_unused]] Register object, + [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeInsertF16x3([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, [[maybe_unused]] Register object, + [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeInsertF16x4([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, [[maybe_unused]] Register object, + [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeConstructF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2) { + CompositeConstruct<&IR::Value::F32, 'F'>(ctx, inst, e1, e2); +} + +void EmitCompositeConstructF32x3(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2, const IR::Value& e3) { + CompositeConstruct<&IR::Value::F32, 'F'>(ctx, inst, e1, e2, e3); +} + +void EmitCompositeConstructF32x4(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2, const IR::Value& e3, const IR::Value& e4) { + CompositeConstruct<&IR::Value::F32, 'F'>(ctx, inst, e1, e2, e3, e4); +} + +void EmitCompositeExtractF32x2(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index) { + CompositeExtract(ctx, inst, composite, index, 'F'); +} + +void EmitCompositeExtractF32x3(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index) { + CompositeExtract(ctx, inst, composite, index, 'F'); +} + +void EmitCompositeExtractF32x4(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index) { + CompositeExtract(ctx, inst, composite, index, 'F'); +} + +void EmitCompositeInsertF32x2(EmitContext& ctx, IR::Inst& inst, Register composite, + ScalarF32 object, u32 index) { + CompositeInsert(ctx, inst, composite, object, index, 'F'); +} + +void EmitCompositeInsertF32x3(EmitContext& ctx, IR::Inst& inst, Register composite, + ScalarF32 object, u32 index) { + CompositeInsert(ctx, inst, composite, object, index, 'F'); +} + +void EmitCompositeInsertF32x4(EmitContext& ctx, IR::Inst& inst, Register composite, + ScalarF32 object, u32 index) { + CompositeInsert(ctx, inst, composite, object, index, 'F'); +} + +void EmitCompositeConstructF64x2([[maybe_unused]] EmitContext& ctx) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeConstructF64x3([[maybe_unused]] EmitContext& ctx) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeConstructF64x4([[maybe_unused]] EmitContext& ctx) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeExtractF64x2([[maybe_unused]] EmitContext& ctx) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeExtractF64x3([[maybe_unused]] EmitContext& ctx) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeExtractF64x4([[maybe_unused]] EmitContext& ctx) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeInsertF64x2([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, [[maybe_unused]] Register object, + [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeInsertF64x3([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, [[maybe_unused]] Register object, + [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitCompositeInsertF64x4([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] Register composite, [[maybe_unused]] Register object, + [[maybe_unused]] u32 index) { + throw NotImplementedException("GLASM instruction"); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_context_get_set.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_context_get_set.cpp new file mode 100755 index 000000000..02c9dc6d7 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_context_get_set.cpp @@ -0,0 +1,346 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/profile.h" +#include "shader_recompiler/shader_info.h" + +namespace Shader::Backend::GLASM { +namespace { +void GetCbuf(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset, + std::string_view size) { + if (!binding.IsImmediate()) { + throw NotImplementedException("Indirect constant buffer loading"); + } + const Register ret{ctx.reg_alloc.Define(inst)}; + if (offset.type == Type::U32) { + // Avoid reading arrays out of bounds, matching hardware's behavior + if (offset.imm_u32 >= 0x10'000) { + ctx.Add("MOV.S {},0;", ret); + return; + } + } + ctx.Add("LDC.{} {},c{}[{}];", size, ret, binding.U32(), offset); +} + +bool IsInputArray(Stage stage) { + return stage == Stage::Geometry || stage == Stage::TessellationControl || + stage == Stage::TessellationEval; +} + +std::string VertexIndex(EmitContext& ctx, ScalarU32 vertex) { + return IsInputArray(ctx.stage) ? fmt::format("[{}]", vertex) : ""; +} + +u32 TexCoordIndex(IR::Attribute attr) { + return (static_cast(attr) - static_cast(IR::Attribute::FixedFncTexture0S)) / 4; +} +} // Anonymous namespace + +void EmitGetCbufU8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset) { + GetCbuf(ctx, inst, binding, offset, "U8"); +} + +void EmitGetCbufS8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset) { + GetCbuf(ctx, inst, binding, offset, "S8"); +} + +void EmitGetCbufU16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset) { + GetCbuf(ctx, inst, binding, offset, "U16"); +} + +void EmitGetCbufS16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset) { + GetCbuf(ctx, inst, binding, offset, "S16"); +} + +void EmitGetCbufU32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset) { + GetCbuf(ctx, inst, binding, offset, "U32"); +} + +void EmitGetCbufF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset) { + GetCbuf(ctx, inst, binding, offset, "F32"); +} + +void EmitGetCbufU32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset) { + GetCbuf(ctx, inst, binding, offset, "U32X2"); +} + +void EmitGetAttribute(EmitContext& ctx, IR::Inst& inst, IR::Attribute attr, ScalarU32 vertex) { + const u32 element{static_cast(attr) % 4}; + const char swizzle{"xyzw"[element]}; + if (IR::IsGeneric(attr)) { + const u32 index{IR::GenericAttributeIndex(attr)}; + ctx.Add("MOV.F {}.x,in_attr{}{}[0].{};", inst, index, VertexIndex(ctx, vertex), swizzle); + return; + } + if (attr >= IR::Attribute::FixedFncTexture0S && attr <= IR::Attribute::FixedFncTexture9Q) { + const u32 index{TexCoordIndex(attr)}; + ctx.Add("MOV.F {}.x,{}.texcoord[{}].{};", inst, ctx.attrib_name, index, swizzle); + return; + } + switch (attr) { + case IR::Attribute::PrimitiveId: + ctx.Add("MOV.S {}.x,primitive.id;", inst); + break; + case IR::Attribute::PositionX: + case IR::Attribute::PositionY: + case IR::Attribute::PositionZ: + case IR::Attribute::PositionW: + if (IsInputArray(ctx.stage)) { + ctx.Add("MOV.F {}.x,vertex_position{}.{};", inst, VertexIndex(ctx, vertex), swizzle); + } else { + ctx.Add("MOV.F {}.x,{}.position.{};", inst, ctx.attrib_name, swizzle); + } + break; + case IR::Attribute::ColorFrontDiffuseR: + case IR::Attribute::ColorFrontDiffuseG: + case IR::Attribute::ColorFrontDiffuseB: + case IR::Attribute::ColorFrontDiffuseA: + ctx.Add("MOV.F {}.x,{}.color.{};", inst, ctx.attrib_name, swizzle); + break; + case IR::Attribute::PointSpriteS: + case IR::Attribute::PointSpriteT: + ctx.Add("MOV.F {}.x,{}.pointcoord.{};", inst, ctx.attrib_name, swizzle); + break; + case IR::Attribute::TessellationEvaluationPointU: + case IR::Attribute::TessellationEvaluationPointV: + ctx.Add("MOV.F {}.x,vertex.tesscoord.{};", inst, swizzle); + break; + case IR::Attribute::InstanceId: + ctx.Add("MOV.S {}.x,{}.instance;", inst, ctx.attrib_name); + break; + case IR::Attribute::VertexId: + ctx.Add("MOV.S {}.x,{}.id;", inst, ctx.attrib_name); + break; + case IR::Attribute::FrontFace: + ctx.Add("CMP.S {}.x,{}.facing.x,0,-1;", inst, ctx.attrib_name); + break; + default: + throw NotImplementedException("Get attribute {}", attr); + } +} + +void EmitSetAttribute(EmitContext& ctx, IR::Attribute attr, ScalarF32 value, + [[maybe_unused]] ScalarU32 vertex) { + const u32 element{static_cast(attr) % 4}; + const char swizzle{"xyzw"[element]}; + if (IR::IsGeneric(attr)) { + const u32 index{IR::GenericAttributeIndex(attr)}; + ctx.Add("MOV.F out_attr{}[0].{},{};", index, swizzle, value); + return; + } + if (attr >= IR::Attribute::FixedFncTexture0S && attr <= IR::Attribute::FixedFncTexture9R) { + const u32 index{TexCoordIndex(attr)}; + ctx.Add("MOV.F result.texcoord[{}].{},{};", index, swizzle, value); + return; + } + switch (attr) { + case IR::Attribute::Layer: + if (ctx.stage == Stage::Geometry || ctx.profile.support_viewport_index_layer_non_geometry) { + ctx.Add("MOV.F result.layer.x,{};", value); + } else { + LOG_WARNING(Shader_GLASM, + "Layer stored outside of geometry shader not supported by device"); + } + break; + case IR::Attribute::ViewportIndex: + if (ctx.stage == Stage::Geometry || ctx.profile.support_viewport_index_layer_non_geometry) { + ctx.Add("MOV.F result.viewport.x,{};", value); + } else { + LOG_WARNING(Shader_GLASM, + "Viewport stored outside of geometry shader not supported by device"); + } + break; + case IR::Attribute::ViewportMask: + // NV_viewport_array2 is required to access result.viewportmask, regardless of shader stage. + if (ctx.profile.support_viewport_index_layer_non_geometry) { + ctx.Add("MOV.F result.viewportmask[0].x,{};", value); + } else { + LOG_WARNING(Shader_GLASM, "Device does not support storing to ViewportMask"); + } + break; + case IR::Attribute::PointSize: + ctx.Add("MOV.F result.pointsize.x,{};", value); + break; + case IR::Attribute::PositionX: + case IR::Attribute::PositionY: + case IR::Attribute::PositionZ: + case IR::Attribute::PositionW: + ctx.Add("MOV.F result.position.{},{};", swizzle, value); + break; + case IR::Attribute::ColorFrontDiffuseR: + case IR::Attribute::ColorFrontDiffuseG: + case IR::Attribute::ColorFrontDiffuseB: + case IR::Attribute::ColorFrontDiffuseA: + ctx.Add("MOV.F result.color.{},{};", swizzle, value); + break; + case IR::Attribute::ColorFrontSpecularR: + case IR::Attribute::ColorFrontSpecularG: + case IR::Attribute::ColorFrontSpecularB: + case IR::Attribute::ColorFrontSpecularA: + ctx.Add("MOV.F result.color.secondary.{},{};", swizzle, value); + break; + case IR::Attribute::ColorBackDiffuseR: + case IR::Attribute::ColorBackDiffuseG: + case IR::Attribute::ColorBackDiffuseB: + case IR::Attribute::ColorBackDiffuseA: + ctx.Add("MOV.F result.color.back.{},{};", swizzle, value); + break; + case IR::Attribute::ColorBackSpecularR: + case IR::Attribute::ColorBackSpecularG: + case IR::Attribute::ColorBackSpecularB: + case IR::Attribute::ColorBackSpecularA: + ctx.Add("MOV.F result.color.back.secondary.{},{};", swizzle, value); + break; + case IR::Attribute::FogCoordinate: + ctx.Add("MOV.F result.fogcoord.x,{};", value); + break; + case IR::Attribute::ClipDistance0: + case IR::Attribute::ClipDistance1: + case IR::Attribute::ClipDistance2: + case IR::Attribute::ClipDistance3: + case IR::Attribute::ClipDistance4: + case IR::Attribute::ClipDistance5: + case IR::Attribute::ClipDistance6: + case IR::Attribute::ClipDistance7: { + const u32 index{static_cast(attr) - static_cast(IR::Attribute::ClipDistance0)}; + ctx.Add("MOV.F result.clip[{}].x,{};", index, value); + break; + } + default: + throw NotImplementedException("Set attribute {}", attr); + } +} + +void EmitGetAttributeIndexed(EmitContext& ctx, IR::Inst& inst, ScalarS32 offset, ScalarU32 vertex) { + // RC.x = base_index + // RC.y = masked_index + // RC.z = compare_index + ctx.Add("SHR.S RC.x,{},2;" + "AND.S RC.y,RC.x,3;" + "SHR.S RC.z,{},4;", + offset, offset); + + const Register ret{ctx.reg_alloc.Define(inst)}; + u32 num_endifs{}; + const auto read{[&](u32 compare_index, const std::array& values) { + ++num_endifs; + ctx.Add("SEQ.S.CC RC.w,RC.z,{};" // compare_index + "IF NE.w;" + // X + "SEQ.S.CC RC.w,RC.y,0;" + "IF NE.w;" + "MOV {}.x,{};" + "ELSE;" + // Y + "SEQ.S.CC RC.w,RC.y,1;" + "IF NE.w;" + "MOV {}.x,{};" + "ELSE;" + // Z + "SEQ.S.CC RC.w,RC.y,2;" + "IF NE.w;" + "MOV {}.x,{};" + "ELSE;" + // W + "MOV {}.x,{};" + "ENDIF;" + "ENDIF;" + "ENDIF;" + "ELSE;", + compare_index, ret, values[0], ret, values[1], ret, values[2], ret, values[3]); + }}; + const auto read_swizzled{[&](u32 compare_index, std::string_view value) { + const std::array values{fmt::format("{}.x", value), fmt::format("{}.y", value), + fmt::format("{}.z", value), fmt::format("{}.w", value)}; + read(compare_index, values); + }}; + if (ctx.info.loads.AnyComponent(IR::Attribute::PositionX)) { + const u32 index{static_cast(IR::Attribute::PositionX)}; + if (IsInputArray(ctx.stage)) { + read_swizzled(index, fmt::format("vertex_position{}", VertexIndex(ctx, vertex))); + } else { + read_swizzled(index, fmt::format("{}.position", ctx.attrib_name)); + } + } + for (u32 index = 0; index < static_cast(IR::NUM_GENERICS); ++index) { + if (!ctx.info.loads.Generic(index)) { + continue; + } + read_swizzled(index, fmt::format("in_attr{}{}[0]", index, VertexIndex(ctx, vertex))); + } + for (u32 i = 0; i < num_endifs; ++i) { + ctx.Add("ENDIF;"); + } +} + +void EmitSetAttributeIndexed([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] ScalarU32 offset, + [[maybe_unused]] ScalarF32 value, [[maybe_unused]] ScalarU32 vertex) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGetPatch(EmitContext& ctx, IR::Inst& inst, IR::Patch patch) { + if (!IR::IsGeneric(patch)) { + throw NotImplementedException("Non-generic patch load"); + } + const u32 index{IR::GenericPatchIndex(patch)}; + const u32 element{IR::GenericPatchElement(patch)}; + const char swizzle{"xyzw"[element]}; + const std::string_view out{ctx.stage == Stage::TessellationControl ? ".out" : ""}; + ctx.Add("MOV.F {},primitive{}.patch.attrib[{}].{};", inst, out, index, swizzle); +} + +void EmitSetPatch(EmitContext& ctx, IR::Patch patch, ScalarF32 value) { + if (IR::IsGeneric(patch)) { + const u32 index{IR::GenericPatchIndex(patch)}; + const u32 element{IR::GenericPatchElement(patch)}; + ctx.Add("MOV.F result.patch.attrib[{}].{},{};", index, "xyzw"[element], value); + return; + } + switch (patch) { + case IR::Patch::TessellationLodLeft: + case IR::Patch::TessellationLodRight: + case IR::Patch::TessellationLodTop: + case IR::Patch::TessellationLodBottom: { + const u32 index{static_cast(patch) - u32(IR::Patch::TessellationLodLeft)}; + ctx.Add("MOV.F result.patch.tessouter[{}].x,{};", index, value); + break; + } + case IR::Patch::TessellationLodInteriorU: + ctx.Add("MOV.F result.patch.tessinner[0].x,{};", value); + break; + case IR::Patch::TessellationLodInteriorV: + ctx.Add("MOV.F result.patch.tessinner[1].x,{};", value); + break; + default: + throw NotImplementedException("Patch {}", patch); + } +} + +void EmitSetFragColor(EmitContext& ctx, u32 index, u32 component, ScalarF32 value) { + ctx.Add("MOV.F frag_color{}.{},{};", index, "xyzw"[component], value); +} + +void EmitSetSampleMask(EmitContext& ctx, ScalarS32 value) { + ctx.Add("MOV.S result.samplemask.x,{};", value); +} + +void EmitSetFragDepth(EmitContext& ctx, ScalarF32 value) { + ctx.Add("MOV.F result.depth.z,{};", value); +} + +void EmitLoadLocal(EmitContext& ctx, IR::Inst& inst, ScalarU32 word_offset) { + ctx.Add("MOV.U {},lmem[{}].x;", inst, word_offset); +} + +void EmitWriteLocal(EmitContext& ctx, ScalarU32 word_offset, ScalarU32 value) { + ctx.Add("MOV.U lmem[{}].x,{};", word_offset, value); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_control_flow.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_control_flow.cpp new file mode 100755 index 000000000..e69de29bb diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_convert.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_convert.cpp new file mode 100755 index 000000000..ccdf1cbc8 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_convert.cpp @@ -0,0 +1,231 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLASM { +namespace { +std::string_view FpRounding(IR::FpRounding fp_rounding) { + switch (fp_rounding) { + case IR::FpRounding::DontCare: + return ""; + case IR::FpRounding::RN: + return ".ROUND"; + case IR::FpRounding::RZ: + return ".TRUNC"; + case IR::FpRounding::RM: + return ".FLR"; + case IR::FpRounding::RP: + return ".CEIL"; + } + throw InvalidArgument("Invalid floating-point rounding {}", fp_rounding); +} + +template +void Convert(EmitContext& ctx, IR::Inst& inst, InputType value, std::string_view dest, + std::string_view src, bool is_long_result) { + const std::string_view fp_rounding{FpRounding(inst.Flags().rounding)}; + const auto ret{is_long_result ? ctx.reg_alloc.LongDefine(inst) : ctx.reg_alloc.Define(inst)}; + ctx.Add("CVT.{}.{}{} {}.x,{};", dest, src, fp_rounding, ret, value); +} +} // Anonymous namespace + +void EmitConvertS16F16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "S16", "F16", false); +} + +void EmitConvertS16F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + Convert(ctx, inst, value, "S16", "F32", false); +} + +void EmitConvertS16F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + Convert(ctx, inst, value, "S16", "F64", false); +} + +void EmitConvertS32F16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "S32", "F16", false); +} + +void EmitConvertS32F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + Convert(ctx, inst, value, "S32", "F32", false); +} + +void EmitConvertS32F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + Convert(ctx, inst, value, "S32", "F64", false); +} + +void EmitConvertS64F16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "S64", "F16", true); +} + +void EmitConvertS64F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + Convert(ctx, inst, value, "S64", "F32", true); +} + +void EmitConvertS64F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + Convert(ctx, inst, value, "S64", "F64", true); +} + +void EmitConvertU16F16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "U16", "F16", false); +} + +void EmitConvertU16F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + Convert(ctx, inst, value, "U16", "F32", false); +} + +void EmitConvertU16F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + Convert(ctx, inst, value, "U16", "F64", false); +} + +void EmitConvertU32F16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "U32", "F16", false); +} + +void EmitConvertU32F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + Convert(ctx, inst, value, "U32", "F32", false); +} + +void EmitConvertU32F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + Convert(ctx, inst, value, "U32", "F64", false); +} + +void EmitConvertU64F16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "U64", "F16", true); +} + +void EmitConvertU64F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + Convert(ctx, inst, value, "U64", "F32", true); +} + +void EmitConvertU64F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + Convert(ctx, inst, value, "U64", "F64", true); +} + +void EmitConvertU64U32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value) { + Convert(ctx, inst, value, "U64", "U32", true); +} + +void EmitConvertU32U64(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "U32", "U64", false); +} + +void EmitConvertF16F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + Convert(ctx, inst, value, "F16", "F32", false); +} + +void EmitConvertF32F16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F32", "F16", false); +} + +void EmitConvertF32F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + Convert(ctx, inst, value, "F32", "F64", false); +} + +void EmitConvertF64F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + Convert(ctx, inst, value, "F64", "F32", true); +} + +void EmitConvertF16S8(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F16", "S8", false); +} + +void EmitConvertF16S16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F16", "S16", false); +} + +void EmitConvertF16S32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value) { + Convert(ctx, inst, value, "F16", "S32", false); +} + +void EmitConvertF16S64(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F16", "S64", false); +} + +void EmitConvertF16U8(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F16", "U8", false); +} + +void EmitConvertF16U16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F16", "U16", false); +} + +void EmitConvertF16U32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value) { + Convert(ctx, inst, value, "F16", "U32", false); +} + +void EmitConvertF16U64(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F16", "U64", false); +} + +void EmitConvertF32S8(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F32", "S8", false); +} + +void EmitConvertF32S16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F32", "S16", false); +} + +void EmitConvertF32S32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value) { + Convert(ctx, inst, value, "F32", "S32", false); +} + +void EmitConvertF32S64(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F32", "S64", false); +} + +void EmitConvertF32U8(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F32", "U8", false); +} + +void EmitConvertF32U16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F32", "U16", false); +} + +void EmitConvertF32U32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value) { + Convert(ctx, inst, value, "F32", "U32", false); +} + +void EmitConvertF32U64(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F32", "U64", false); +} + +void EmitConvertF64S8(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F64", "S8", true); +} + +void EmitConvertF64S16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F64", "S16", true); +} + +void EmitConvertF64S32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value) { + Convert(ctx, inst, value, "F64", "S32", true); +} + +void EmitConvertF64S64(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F64", "S64", true); +} + +void EmitConvertF64U8(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F64", "U8", true); +} + +void EmitConvertF64U16(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F64", "U16", true); +} + +void EmitConvertF64U32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value) { + Convert(ctx, inst, value, "F64", "U32", true); +} + +void EmitConvertF64U64(EmitContext& ctx, IR::Inst& inst, Register value) { + Convert(ctx, inst, value, "F64", "U64", true); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_floating_point.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_floating_point.cpp new file mode 100755 index 000000000..4ed58619d --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_floating_point.cpp @@ -0,0 +1,414 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLASM { +namespace { +template +void Compare(EmitContext& ctx, IR::Inst& inst, InputType lhs, InputType rhs, std::string_view op, + std::string_view type, bool ordered, bool inequality = false) { + const Register ret{ctx.reg_alloc.Define(inst)}; + ctx.Add("{}.{} RC.x,{},{};", op, type, lhs, rhs); + if (ordered && inequality) { + ctx.Add("SEQ.{} RC.y,{},{};" + "SEQ.{} RC.z,{},{};" + "AND.U RC.x,RC.x,RC.y;" + "AND.U RC.x,RC.x,RC.z;" + "SNE.S {}.x,RC.x,0;", + type, lhs, lhs, type, rhs, rhs, ret); + } else if (ordered) { + ctx.Add("SNE.S {}.x,RC.x,0;", ret); + } else { + ctx.Add("SNE.{} RC.y,{},{};" + "SNE.{} RC.z,{},{};" + "OR.U RC.x,RC.x,RC.y;" + "OR.U RC.x,RC.x,RC.z;" + "SNE.S {}.x,RC.x,0;", + type, lhs, lhs, type, rhs, rhs, ret); + } +} + +template +void Clamp(EmitContext& ctx, Register ret, InputType value, InputType min_value, + InputType max_value, std::string_view type) { + // Call MAX first to properly clamp nan to min_value instead + ctx.Add("MAX.{} RC.x,{},{};" + "MIN.{} {}.x,RC.x,{};", + type, min_value, value, type, ret, max_value); +} + +std::string_view Precise(IR::Inst& inst) { + const bool precise{inst.Flags().no_contraction}; + return precise ? ".PREC" : ""; +} +} // Anonymous namespace + +void EmitFPAbs16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPAbs32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("MOV.F {}.x,|{}|;", inst, value); +} + +void EmitFPAbs64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + ctx.LongAdd("MOV.F64 {}.x,|{}|;", inst, value); +} + +void EmitFPAdd16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] Register a, [[maybe_unused]] Register b) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPAdd32(EmitContext& ctx, IR::Inst& inst, ScalarF32 a, ScalarF32 b) { + ctx.Add("ADD.F{} {}.x,{},{};", Precise(inst), ctx.reg_alloc.Define(inst), a, b); +} + +void EmitFPAdd64(EmitContext& ctx, IR::Inst& inst, ScalarF64 a, ScalarF64 b) { + ctx.Add("ADD.F64{} {}.x,{},{};", Precise(inst), ctx.reg_alloc.LongDefine(inst), a, b); +} + +void EmitFPFma16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] Register a, [[maybe_unused]] Register b, + [[maybe_unused]] Register c) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPFma32(EmitContext& ctx, IR::Inst& inst, ScalarF32 a, ScalarF32 b, ScalarF32 c) { + ctx.Add("MAD.F{} {}.x,{},{},{};", Precise(inst), ctx.reg_alloc.Define(inst), a, b, c); +} + +void EmitFPFma64(EmitContext& ctx, IR::Inst& inst, ScalarF64 a, ScalarF64 b, ScalarF64 c) { + ctx.Add("MAD.F64{} {}.x,{},{},{};", Precise(inst), ctx.reg_alloc.LongDefine(inst), a, b, c); +} + +void EmitFPMax32(EmitContext& ctx, IR::Inst& inst, ScalarF32 a, ScalarF32 b) { + ctx.Add("MAX.F {}.x,{},{};", inst, a, b); +} + +void EmitFPMax64(EmitContext& ctx, IR::Inst& inst, ScalarF64 a, ScalarF64 b) { + ctx.LongAdd("MAX.F64 {}.x,{},{};", inst, a, b); +} + +void EmitFPMin32(EmitContext& ctx, IR::Inst& inst, ScalarF32 a, ScalarF32 b) { + ctx.Add("MIN.F {}.x,{},{};", inst, a, b); +} + +void EmitFPMin64(EmitContext& ctx, IR::Inst& inst, ScalarF64 a, ScalarF64 b) { + ctx.LongAdd("MIN.F64 {}.x,{},{};", inst, a, b); +} + +void EmitFPMul16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] Register a, [[maybe_unused]] Register b) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPMul32(EmitContext& ctx, IR::Inst& inst, ScalarF32 a, ScalarF32 b) { + ctx.Add("MUL.F{} {}.x,{},{};", Precise(inst), ctx.reg_alloc.Define(inst), a, b); +} + +void EmitFPMul64(EmitContext& ctx, IR::Inst& inst, ScalarF64 a, ScalarF64 b) { + ctx.Add("MUL.F64{} {}.x,{},{};", Precise(inst), ctx.reg_alloc.LongDefine(inst), a, b); +} + +void EmitFPNeg16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPNeg32(EmitContext& ctx, IR::Inst& inst, ScalarRegister value) { + ctx.Add("MOV.F {}.x,-{};", inst, value); +} + +void EmitFPNeg64(EmitContext& ctx, IR::Inst& inst, Register value) { + ctx.LongAdd("MOV.F64 {}.x,-{};", inst, value); +} + +void EmitFPSin(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("SIN {}.x,{};", inst, value); +} + +void EmitFPCos(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("COS {}.x,{};", inst, value); +} + +void EmitFPExp2(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("EX2 {}.x,{};", inst, value); +} + +void EmitFPLog2(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("LG2 {}.x,{};", inst, value); +} + +void EmitFPRecip32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("RCP {}.x,{};", inst, value); +} + +void EmitFPRecip64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPRecipSqrt32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("RSQ {}.x,{};", inst, value); +} + +void EmitFPRecipSqrt64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPSqrt(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + const Register ret{ctx.reg_alloc.Define(inst)}; + ctx.Add("RSQ RC.x,{};RCP {}.x,RC.x;", value, ret); +} + +void EmitFPSaturate16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPSaturate32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("MOV.F.SAT {}.x,{};", inst, value); +} + +void EmitFPSaturate64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPClamp16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value, + [[maybe_unused]] Register min_value, [[maybe_unused]] Register max_value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPClamp32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value, ScalarF32 min_value, + ScalarF32 max_value) { + Clamp(ctx, ctx.reg_alloc.Define(inst), value, min_value, max_value, "F"); +} + +void EmitFPClamp64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value, ScalarF64 min_value, + ScalarF64 max_value) { + Clamp(ctx, ctx.reg_alloc.LongDefine(inst), value, min_value, max_value, "F64"); +} + +void EmitFPRoundEven16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPRoundEven32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("ROUND.F {}.x,{};", inst, value); +} + +void EmitFPRoundEven64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + ctx.LongAdd("ROUND.F64 {}.x,{};", inst, value); +} + +void EmitFPFloor16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPFloor32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("FLR.F {}.x,{};", inst, value); +} + +void EmitFPFloor64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + ctx.LongAdd("FLR.F64 {}.x,{};", inst, value); +} + +void EmitFPCeil16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPCeil32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("CEIL.F {}.x,{};", inst, value); +} + +void EmitFPCeil64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + ctx.LongAdd("CEIL.F64 {}.x,{};", inst, value); +} + +void EmitFPTrunc16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPTrunc32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + ctx.Add("TRUNC.F {}.x,{};", inst, value); +} + +void EmitFPTrunc64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + ctx.LongAdd("TRUNC.F64 {}.x,{};", inst, value); +} + +void EmitFPOrdEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPOrdEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SEQ", "F", true); +} + +void EmitFPOrdEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SEQ", "F64", true); +} + +void EmitFPUnordEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPUnordEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SEQ", "F", false); +} + +void EmitFPUnordEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SEQ", "F64", false); +} + +void EmitFPOrdNotEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPOrdNotEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SNE", "F", true, true); +} + +void EmitFPOrdNotEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SNE", "F64", true, true); +} + +void EmitFPUnordNotEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPUnordNotEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SNE", "F", false, true); +} + +void EmitFPUnordNotEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SNE", "F64", false, true); +} + +void EmitFPOrdLessThan16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPOrdLessThan32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SLT", "F", true); +} + +void EmitFPOrdLessThan64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SLT", "F64", true); +} + +void EmitFPUnordLessThan16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPUnordLessThan32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SLT", "F", false); +} + +void EmitFPUnordLessThan64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SLT", "F64", false); +} + +void EmitFPOrdGreaterThan16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPOrdGreaterThan32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SGT", "F", true); +} + +void EmitFPOrdGreaterThan64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SGT", "F64", true); +} + +void EmitFPUnordGreaterThan16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPUnordGreaterThan32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SGT", "F", false); +} + +void EmitFPUnordGreaterThan64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SGT", "F64", false); +} + +void EmitFPOrdLessThanEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPOrdLessThanEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SLE", "F", true); +} + +void EmitFPOrdLessThanEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SLE", "F64", true); +} + +void EmitFPUnordLessThanEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPUnordLessThanEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SLE", "F", false); +} + +void EmitFPUnordLessThanEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SLE", "F64", false); +} + +void EmitFPOrdGreaterThanEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPOrdGreaterThanEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SGE", "F", true); +} + +void EmitFPOrdGreaterThanEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SGE", "F64", true); +} + +void EmitFPUnordGreaterThanEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register lhs, + [[maybe_unused]] Register rhs) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPUnordGreaterThanEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs) { + Compare(ctx, inst, lhs, rhs, "SGE", "F", false); +} + +void EmitFPUnordGreaterThanEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs) { + Compare(ctx, inst, lhs, rhs, "SGE", "F64", false); +} + +void EmitFPIsNan16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitFPIsNan32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value) { + Compare(ctx, inst, value, value, "SNE", "F", true, false); +} + +void EmitFPIsNan64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value) { + Compare(ctx, inst, value, value, "SNE", "F64", true, false); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_image.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_image.cpp new file mode 100755 index 000000000..09e3a9b82 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_image.cpp @@ -0,0 +1,850 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLASM { +namespace { +struct ScopedRegister { + ScopedRegister() = default; + ScopedRegister(RegAlloc& reg_alloc_) : reg_alloc{®_alloc_}, reg{reg_alloc->AllocReg()} {} + + ~ScopedRegister() { + if (reg_alloc) { + reg_alloc->FreeReg(reg); + } + } + + ScopedRegister& operator=(ScopedRegister&& rhs) noexcept { + if (reg_alloc) { + reg_alloc->FreeReg(reg); + } + reg_alloc = std::exchange(rhs.reg_alloc, nullptr); + reg = rhs.reg; + return *this; + } + + ScopedRegister(ScopedRegister&& rhs) noexcept + : reg_alloc{std::exchange(rhs.reg_alloc, nullptr)}, reg{rhs.reg} {} + + ScopedRegister& operator=(const ScopedRegister&) = delete; + ScopedRegister(const ScopedRegister&) = delete; + + RegAlloc* reg_alloc{}; + Register reg; +}; + +std::string Texture(EmitContext& ctx, IR::TextureInstInfo info, + [[maybe_unused]] const IR::Value& index) { + // FIXME: indexed reads + if (info.type == TextureType::Buffer) { + return fmt::format("texture[{}]", ctx.texture_buffer_bindings.at(info.descriptor_index)); + } else { + return fmt::format("texture[{}]", ctx.texture_bindings.at(info.descriptor_index)); + } +} + +std::string Image(EmitContext& ctx, IR::TextureInstInfo info, + [[maybe_unused]] const IR::Value& index) { + // FIXME: indexed reads + if (info.type == TextureType::Buffer) { + return fmt::format("image[{}]", ctx.image_buffer_bindings.at(info.descriptor_index)); + } else { + return fmt::format("image[{}]", ctx.image_bindings.at(info.descriptor_index)); + } +} + +std::string_view TextureType(IR::TextureInstInfo info) { + if (info.is_depth) { + switch (info.type) { + case TextureType::Color1D: + return "SHADOW1D"; + case TextureType::ColorArray1D: + return "SHADOWARRAY1D"; + case TextureType::Color2D: + return "SHADOW2D"; + case TextureType::ColorArray2D: + return "SHADOWARRAY2D"; + case TextureType::Color3D: + return "SHADOW3D"; + case TextureType::ColorCube: + return "SHADOWCUBE"; + case TextureType::ColorArrayCube: + return "SHADOWARRAYCUBE"; + case TextureType::Buffer: + return "SHADOWBUFFER"; + } + } else { + switch (info.type) { + case TextureType::Color1D: + return "1D"; + case TextureType::ColorArray1D: + return "ARRAY1D"; + case TextureType::Color2D: + return "2D"; + case TextureType::ColorArray2D: + return "ARRAY2D"; + case TextureType::Color3D: + return "3D"; + case TextureType::ColorCube: + return "CUBE"; + case TextureType::ColorArrayCube: + return "ARRAYCUBE"; + case TextureType::Buffer: + return "BUFFER"; + } + } + throw InvalidArgument("Invalid texture type {}", info.type.Value()); +} + +std::string Offset(EmitContext& ctx, const IR::Value& offset) { + if (offset.IsEmpty()) { + return ""; + } + return fmt::format(",offset({})", Register{ctx.reg_alloc.Consume(offset)}); +} + +std::pair AllocOffsetsRegs(EmitContext& ctx, + const IR::Value& offset2) { + if (offset2.IsEmpty()) { + return {}; + } else { + return {ctx.reg_alloc, ctx.reg_alloc}; + } +} + +void SwizzleOffsets(EmitContext& ctx, Register off_x, Register off_y, const IR::Value& offset1, + const IR::Value& offset2) { + const Register offsets_a{ctx.reg_alloc.Consume(offset1)}; + const Register offsets_b{ctx.reg_alloc.Consume(offset2)}; + // Input swizzle: [XYXY] [XYXY] + // Output swizzle: [XXXX] [YYYY] + ctx.Add("MOV {}.x,{}.x;" + "MOV {}.y,{}.z;" + "MOV {}.z,{}.x;" + "MOV {}.w,{}.z;" + "MOV {}.x,{}.y;" + "MOV {}.y,{}.w;" + "MOV {}.z,{}.y;" + "MOV {}.w,{}.w;", + off_x, offsets_a, off_x, offsets_a, off_x, offsets_b, off_x, offsets_b, off_y, + offsets_a, off_y, offsets_a, off_y, offsets_b, off_y, offsets_b); +} + +std::string GradOffset(const IR::Value& offset) { + if (offset.IsImmediate()) { + LOG_WARNING(Shader_GLASM, "Gradient offset is a scalar immediate"); + return ""; + } + IR::Inst* const vector{offset.InstRecursive()}; + if (!vector->AreAllArgsImmediates()) { + LOG_WARNING(Shader_GLASM, "Gradient offset vector is not immediate"); + return ""; + } + switch (vector->NumArgs()) { + case 1: + return fmt::format(",({})", static_cast(vector->Arg(0).U32())); + case 2: + return fmt::format(",({},{})", static_cast(vector->Arg(0).U32()), + static_cast(vector->Arg(1).U32())); + default: + throw LogicError("Invalid number of gradient offsets {}", vector->NumArgs()); + } +} + +std::pair Coord(EmitContext& ctx, const IR::Value& coord) { + if (coord.IsImmediate()) { + ScopedRegister scoped_reg(ctx.reg_alloc); + ctx.Add("MOV.U {}.x,{};", scoped_reg.reg, ScalarU32{ctx.reg_alloc.Consume(coord)}); + return {fmt::to_string(scoped_reg.reg), std::move(scoped_reg)}; + } + std::string coord_vec{fmt::to_string(Register{ctx.reg_alloc.Consume(coord)})}; + if (coord.InstRecursive()->HasUses()) { + // Move non-dead coords to a separate register, although this should never happen because + // vectors are only assembled for immediate texture instructions + ctx.Add("MOV.F RC,{};", coord_vec); + coord_vec = "RC"; + } + return {std::move(coord_vec), ScopedRegister{}}; +} + +void StoreSparse(EmitContext& ctx, IR::Inst* sparse_inst) { + if (!sparse_inst) { + return; + } + const Register sparse_ret{ctx.reg_alloc.Define(*sparse_inst)}; + ctx.Add("MOV.S {},-1;" + "MOV.S {}(NONRESIDENT),0;", + sparse_ret, sparse_ret); +} + +std::string_view FormatStorage(ImageFormat format) { + switch (format) { + case ImageFormat::Typeless: + return "U"; + case ImageFormat::R8_UINT: + return "U8"; + case ImageFormat::R8_SINT: + return "S8"; + case ImageFormat::R16_UINT: + return "U16"; + case ImageFormat::R16_SINT: + return "S16"; + case ImageFormat::R32_UINT: + return "U32"; + case ImageFormat::R32G32_UINT: + return "U32X2"; + case ImageFormat::R32G32B32A32_UINT: + return "U32X4"; + } + throw InvalidArgument("Invalid image format {}", format); +} + +template +void ImageAtomic(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, T value, + std::string_view op) { + const auto info{inst.Flags()}; + const std::string_view type{TextureType(info)}; + const std::string image{Image(ctx, info, index)}; + const Register ret{ctx.reg_alloc.Define(inst)}; + ctx.Add("ATOMIM.{} {},{},{},{},{};", op, ret, value, coord, image, type); +} + +IR::Inst* PrepareSparse(IR::Inst& inst) { + const auto sparse_inst{inst.GetAssociatedPseudoOperation(IR::Opcode::GetSparseFromOp)}; + if (sparse_inst) { + sparse_inst->Invalidate(); + } + return sparse_inst; +} +} // Anonymous namespace + +void EmitImageSampleImplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, Register bias_lc, const IR::Value& offset) { + const auto info{inst.Flags()}; + const auto sparse_inst{PrepareSparse(inst)}; + const std::string_view sparse_mod{sparse_inst ? ".SPARSE" : ""}; + const std::string_view lod_clamp_mod{info.has_lod_clamp ? ".LODCLAMP" : ""}; + const std::string_view type{TextureType(info)}; + const std::string texture{Texture(ctx, info, index)}; + const std::string offset_vec{Offset(ctx, offset)}; + const auto [coord_vec, coord_alloc]{Coord(ctx, coord)}; + const Register ret{ctx.reg_alloc.Define(inst)}; + if (info.has_bias) { + if (info.type == TextureType::ColorArrayCube) { + ctx.Add("TXB.F{}{} {},{},{},{},ARRAYCUBE{};", lod_clamp_mod, sparse_mod, ret, coord_vec, + bias_lc, texture, offset_vec); + } else { + if (info.has_lod_clamp) { + ctx.Add("MOV.F {}.w,{}.x;" + "TXB.F.LODCLAMP{} {},{},{}.y,{},{}{};", + coord_vec, bias_lc, sparse_mod, ret, coord_vec, bias_lc, texture, type, + offset_vec); + } else { + ctx.Add("MOV.F {}.w,{}.x;" + "TXB.F{} {},{},{},{}{};", + coord_vec, bias_lc, sparse_mod, ret, coord_vec, texture, type, offset_vec); + } + } + } else { + if (info.has_lod_clamp && info.type == TextureType::ColorArrayCube) { + ctx.Add("TEX.F.LODCLAMP{} {},{},{},{},ARRAYCUBE{};", sparse_mod, ret, coord_vec, + bias_lc, texture, offset_vec); + } else { + ctx.Add("TEX.F{}{} {},{},{},{}{};", lod_clamp_mod, sparse_mod, ret, coord_vec, texture, + type, offset_vec); + } + } + StoreSparse(ctx, sparse_inst); +} + +void EmitImageSampleExplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, ScalarF32 lod, const IR::Value& offset) { + const auto info{inst.Flags()}; + const auto sparse_inst{PrepareSparse(inst)}; + const std::string_view sparse_mod{sparse_inst ? ".SPARSE" : ""}; + const std::string_view type{TextureType(info)}; + const std::string texture{Texture(ctx, info, index)}; + const std::string offset_vec{Offset(ctx, offset)}; + const auto [coord_vec, coord_alloc]{Coord(ctx, coord)}; + const Register ret{ctx.reg_alloc.Define(inst)}; + if (info.type == TextureType::ColorArrayCube) { + ctx.Add("TXL.F{} {},{},{},{},ARRAYCUBE{};", sparse_mod, ret, coord_vec, lod, texture, + offset_vec); + } else { + ctx.Add("MOV.F {}.w,{};" + "TXL.F{} {},{},{},{}{};", + coord_vec, lod, sparse_mod, ret, coord_vec, texture, type, offset_vec); + } + StoreSparse(ctx, sparse_inst); +} + +void EmitImageSampleDrefImplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& dref, + const IR::Value& bias_lc, const IR::Value& offset) { + // Allocate early to avoid aliases + const auto info{inst.Flags()}; + ScopedRegister staging; + if (info.type == TextureType::ColorArrayCube) { + staging = ScopedRegister{ctx.reg_alloc}; + } + const ScalarF32 dref_val{ctx.reg_alloc.Consume(dref)}; + const Register bias_lc_vec{ctx.reg_alloc.Consume(bias_lc)}; + const auto sparse_inst{PrepareSparse(inst)}; + const std::string_view sparse_mod{sparse_inst ? ".SPARSE" : ""}; + const std::string_view type{TextureType(info)}; + const std::string texture{Texture(ctx, info, index)}; + const std::string offset_vec{Offset(ctx, offset)}; + const auto [coord_vec, coord_alloc]{Coord(ctx, coord)}; + const Register ret{ctx.reg_alloc.Define(inst)}; + if (info.has_bias) { + if (info.has_lod_clamp) { + switch (info.type) { + case TextureType::Color1D: + case TextureType::ColorArray1D: + case TextureType::Color2D: + ctx.Add("MOV.F {}.z,{};" + "MOV.F {}.w,{}.x;" + "TXB.F.LODCLAMP{} {},{},{}.y,{},{}{};", + coord_vec, dref_val, coord_vec, bias_lc_vec, sparse_mod, ret, coord_vec, + bias_lc_vec, texture, type, offset_vec); + break; + case TextureType::ColorArray2D: + case TextureType::ColorCube: + ctx.Add("MOV.F {}.w,{};" + "TXB.F.LODCLAMP{} {},{},{},{},{}{};", + coord_vec, dref_val, sparse_mod, ret, coord_vec, bias_lc_vec, texture, type, + offset_vec); + break; + default: + throw NotImplementedException("Invalid type {} with bias and lod clamp", + info.type.Value()); + } + } else { + switch (info.type) { + case TextureType::Color1D: + case TextureType::ColorArray1D: + case TextureType::Color2D: + ctx.Add("MOV.F {}.z,{};" + "MOV.F {}.w,{}.x;" + "TXB.F{} {},{},{},{}{};", + coord_vec, dref_val, coord_vec, bias_lc_vec, sparse_mod, ret, coord_vec, + texture, type, offset_vec); + break; + case TextureType::ColorArray2D: + case TextureType::ColorCube: + ctx.Add("MOV.F {}.w,{};" + "TXB.F{} {},{},{},{},{}{};", + coord_vec, dref_val, sparse_mod, ret, coord_vec, bias_lc_vec, texture, type, + offset_vec); + break; + case TextureType::ColorArrayCube: + ctx.Add("MOV.F {}.x,{};" + "MOV.F {}.y,{}.x;" + "TXB.F{} {},{},{},{},{}{};", + staging.reg, dref_val, staging.reg, bias_lc_vec, sparse_mod, ret, coord_vec, + staging.reg, texture, type, offset_vec); + break; + default: + throw NotImplementedException("Invalid type {}", info.type.Value()); + } + } + } else { + if (info.has_lod_clamp) { + if (info.type != TextureType::ColorArrayCube) { + const bool w_swizzle{info.type == TextureType::ColorArray2D || + info.type == TextureType::ColorCube}; + const char dref_swizzle{w_swizzle ? 'w' : 'z'}; + ctx.Add("MOV.F {}.{},{};" + "TEX.F.LODCLAMP{} {},{},{},{},{}{};", + coord_vec, dref_swizzle, dref_val, sparse_mod, ret, coord_vec, bias_lc_vec, + texture, type, offset_vec); + } else { + ctx.Add("MOV.F {}.x,{};" + "MOV.F {}.y,{};" + "TEX.F.LODCLAMP{} {},{},{},{},{}{};", + staging.reg, dref_val, staging.reg, bias_lc_vec, sparse_mod, ret, coord_vec, + staging.reg, texture, type, offset_vec); + } + } else { + if (info.type != TextureType::ColorArrayCube) { + const bool w_swizzle{info.type == TextureType::ColorArray2D || + info.type == TextureType::ColorCube}; + const char dref_swizzle{w_swizzle ? 'w' : 'z'}; + ctx.Add("MOV.F {}.{},{};" + "TEX.F{} {},{},{},{}{};", + coord_vec, dref_swizzle, dref_val, sparse_mod, ret, coord_vec, texture, + type, offset_vec); + } else { + ctx.Add("TEX.F{} {},{},{},{},{}{};", sparse_mod, ret, coord_vec, dref_val, texture, + type, offset_vec); + } + } + } + StoreSparse(ctx, sparse_inst); +} + +void EmitImageSampleDrefExplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& dref, + const IR::Value& lod, const IR::Value& offset) { + // Allocate early to avoid aliases + const auto info{inst.Flags()}; + ScopedRegister staging; + if (info.type == TextureType::ColorArrayCube) { + staging = ScopedRegister{ctx.reg_alloc}; + } + const ScalarF32 dref_val{ctx.reg_alloc.Consume(dref)}; + const ScalarF32 lod_val{ctx.reg_alloc.Consume(lod)}; + const auto sparse_inst{PrepareSparse(inst)}; + const std::string_view sparse_mod{sparse_inst ? ".SPARSE" : ""}; + const std::string_view type{TextureType(info)}; + const std::string texture{Texture(ctx, info, index)}; + const std::string offset_vec{Offset(ctx, offset)}; + const auto [coord_vec, coord_alloc]{Coord(ctx, coord)}; + const Register ret{ctx.reg_alloc.Define(inst)}; + switch (info.type) { + case TextureType::Color1D: + case TextureType::ColorArray1D: + case TextureType::Color2D: + ctx.Add("MOV.F {}.z,{};" + "MOV.F {}.w,{};" + "TXL.F{} {},{},{},{}{};", + coord_vec, dref_val, coord_vec, lod_val, sparse_mod, ret, coord_vec, texture, type, + offset_vec); + break; + case TextureType::ColorArray2D: + case TextureType::ColorCube: + ctx.Add("MOV.F {}.w,{};" + "TXL.F{} {},{},{},{},{}{};", + coord_vec, dref_val, sparse_mod, ret, coord_vec, lod_val, texture, type, + offset_vec); + break; + case TextureType::ColorArrayCube: + ctx.Add("MOV.F {}.x,{};" + "MOV.F {}.y,{};" + "TXL.F{} {},{},{},{},{}{};", + staging.reg, dref_val, staging.reg, lod_val, sparse_mod, ret, coord_vec, + staging.reg, texture, type, offset_vec); + break; + default: + throw NotImplementedException("Invalid type {}", info.type.Value()); + } + StoreSparse(ctx, sparse_inst); +} + +void EmitImageGather(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& offset, const IR::Value& offset2) { + // Allocate offsets early so they don't overwrite any consumed register + const auto [off_x, off_y]{AllocOffsetsRegs(ctx, offset2)}; + const auto info{inst.Flags()}; + const char comp{"xyzw"[info.gather_component]}; + const auto sparse_inst{PrepareSparse(inst)}; + const std::string_view sparse_mod{sparse_inst ? ".SPARSE" : ""}; + const std::string_view type{TextureType(info)}; + const std::string texture{Texture(ctx, info, index)}; + const Register coord_vec{ctx.reg_alloc.Consume(coord)}; + const Register ret{ctx.reg_alloc.Define(inst)}; + if (offset2.IsEmpty()) { + const std::string offset_vec{Offset(ctx, offset)}; + ctx.Add("TXG.F{} {},{},{}.{},{}{};", sparse_mod, ret, coord_vec, texture, comp, type, + offset_vec); + } else { + SwizzleOffsets(ctx, off_x.reg, off_y.reg, offset, offset2); + ctx.Add("TXGO.F{} {},{},{},{},{}.{},{};", sparse_mod, ret, coord_vec, off_x.reg, off_y.reg, + texture, comp, type); + } + StoreSparse(ctx, sparse_inst); +} + +void EmitImageGatherDref(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& offset, const IR::Value& offset2, + const IR::Value& dref) { + // FIXME: This instruction is not working as expected + + // Allocate offsets early so they don't overwrite any consumed register + const auto [off_x, off_y]{AllocOffsetsRegs(ctx, offset2)}; + const auto info{inst.Flags()}; + const auto sparse_inst{PrepareSparse(inst)}; + const std::string_view sparse_mod{sparse_inst ? ".SPARSE" : ""}; + const std::string_view type{TextureType(info)}; + const std::string texture{Texture(ctx, info, index)}; + const Register coord_vec{ctx.reg_alloc.Consume(coord)}; + const ScalarF32 dref_value{ctx.reg_alloc.Consume(dref)}; + const Register ret{ctx.reg_alloc.Define(inst)}; + std::string args; + switch (info.type) { + case TextureType::Color2D: + ctx.Add("MOV.F {}.z,{};", coord_vec, dref_value); + args = fmt::to_string(coord_vec); + break; + case TextureType::ColorArray2D: + case TextureType::ColorCube: + ctx.Add("MOV.F {}.w,{};", coord_vec, dref_value); + args = fmt::to_string(coord_vec); + break; + case TextureType::ColorArrayCube: + args = fmt::format("{},{}", coord_vec, dref_value); + break; + default: + throw NotImplementedException("Invalid type {}", info.type.Value()); + } + if (offset2.IsEmpty()) { + const std::string offset_vec{Offset(ctx, offset)}; + ctx.Add("TXG.F{} {},{},{},{}{};", sparse_mod, ret, args, texture, type, offset_vec); + } else { + SwizzleOffsets(ctx, off_x.reg, off_y.reg, offset, offset2); + ctx.Add("TXGO.F{} {},{},{},{},{},{};", sparse_mod, ret, args, off_x.reg, off_y.reg, texture, + type); + } + StoreSparse(ctx, sparse_inst); +} + +void EmitImageFetch(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& offset, ScalarS32 lod, ScalarS32 ms) { + const auto info{inst.Flags()}; + const auto sparse_inst{PrepareSparse(inst)}; + const std::string_view sparse_mod{sparse_inst ? ".SPARSE" : ""}; + const std::string_view type{TextureType(info)}; + const std::string texture{Texture(ctx, info, index)}; + const std::string offset_vec{Offset(ctx, offset)}; + const auto [coord_vec, coord_alloc]{Coord(ctx, coord)}; + const Register ret{ctx.reg_alloc.Define(inst)}; + if (info.type == TextureType::Buffer) { + ctx.Add("TXF.F{} {},{},{},{}{};", sparse_mod, ret, coord_vec, texture, type, offset_vec); + } else if (ms.type != Type::Void) { + ctx.Add("MOV.S {}.w,{};" + "TXFMS.F{} {},{},{},{}{};", + coord_vec, ms, sparse_mod, ret, coord_vec, texture, type, offset_vec); + } else { + ctx.Add("MOV.S {}.w,{};" + "TXF.F{} {},{},{},{}{};", + coord_vec, lod, sparse_mod, ret, coord_vec, texture, type, offset_vec); + } + StoreSparse(ctx, sparse_inst); +} + +void EmitImageQueryDimensions(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + ScalarS32 lod) { + const auto info{inst.Flags()}; + const std::string texture{Texture(ctx, info, index)}; + const std::string_view type{TextureType(info)}; + ctx.Add("TXQ {},{},{},{};", inst, lod, texture, type); +} + +void EmitImageQueryLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord) { + const auto info{inst.Flags()}; + const std::string texture{Texture(ctx, info, index)}; + const std::string_view type{TextureType(info)}; + ctx.Add("LOD.F {},{},{},{};", inst, coord, texture, type); +} + +void EmitImageGradient(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& derivatives, + const IR::Value& offset, const IR::Value& lod_clamp) { + const auto info{inst.Flags()}; + ScopedRegister dpdx, dpdy; + const bool multi_component{info.num_derivates > 1 || info.has_lod_clamp}; + if (multi_component) { + // Allocate this early to avoid aliasing other registers + dpdx = ScopedRegister{ctx.reg_alloc}; + dpdy = ScopedRegister{ctx.reg_alloc}; + } + const auto sparse_inst{PrepareSparse(inst)}; + const std::string_view sparse_mod{sparse_inst ? ".SPARSE" : ""}; + const std::string_view type{TextureType(info)}; + const std::string texture{Texture(ctx, info, index)}; + const std::string offset_vec{GradOffset(offset)}; + const Register coord_vec{ctx.reg_alloc.Consume(coord)}; + const Register derivatives_vec{ctx.reg_alloc.Consume(derivatives)}; + const Register ret{ctx.reg_alloc.Define(inst)}; + if (multi_component) { + ctx.Add("MOV.F {}.x,{}.x;" + "MOV.F {}.y,{}.z;" + "MOV.F {}.x,{}.y;" + "MOV.F {}.y,{}.w;", + dpdx.reg, derivatives_vec, dpdx.reg, derivatives_vec, dpdy.reg, derivatives_vec, + dpdy.reg, derivatives_vec); + if (info.has_lod_clamp) { + const ScalarF32 lod_clamp_value{ctx.reg_alloc.Consume(lod_clamp)}; + ctx.Add("MOV.F {}.w,{};" + "TXD.F.LODCLAMP{} {},{},{},{},{},{}{};", + dpdy.reg, lod_clamp_value, sparse_mod, ret, coord_vec, dpdx.reg, dpdy.reg, + texture, type, offset_vec); + } else { + ctx.Add("TXD.F{} {},{},{},{},{},{}{};", sparse_mod, ret, coord_vec, dpdx.reg, dpdy.reg, + texture, type, offset_vec); + } + } else { + ctx.Add("TXD.F{} {},{},{}.x,{}.y,{},{}{};", sparse_mod, ret, coord_vec, derivatives_vec, + derivatives_vec, texture, type, offset_vec); + } + StoreSparse(ctx, sparse_inst); +} + +void EmitImageRead(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord) { + const auto info{inst.Flags()}; + const auto sparse_inst{PrepareSparse(inst)}; + const std::string_view format{FormatStorage(info.image_format)}; + const std::string_view sparse_mod{sparse_inst ? ".SPARSE" : ""}; + const std::string_view type{TextureType(info)}; + const std::string image{Image(ctx, info, index)}; + const Register ret{ctx.reg_alloc.Define(inst)}; + ctx.Add("LOADIM.{}{} {},{},{},{};", format, sparse_mod, ret, coord, image, type); + StoreSparse(ctx, sparse_inst); +} + +void EmitImageWrite(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + Register color) { + const auto info{inst.Flags()}; + const std::string_view format{FormatStorage(info.image_format)}; + const std::string_view type{TextureType(info)}; + const std::string image{Image(ctx, info, index)}; + ctx.Add("STOREIM.{} {},{},{},{};", format, image, color, coord, type); +} + +void EmitImageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value) { + ImageAtomic(ctx, inst, index, coord, value, "ADD.U32"); +} + +void EmitImageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarS32 value) { + ImageAtomic(ctx, inst, index, coord, value, "MIN.S32"); +} + +void EmitImageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value) { + ImageAtomic(ctx, inst, index, coord, value, "MIN.U32"); +} + +void EmitImageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarS32 value) { + ImageAtomic(ctx, inst, index, coord, value, "MAX.S32"); +} + +void EmitImageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value) { + ImageAtomic(ctx, inst, index, coord, value, "MAX.U32"); +} + +void EmitImageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value) { + ImageAtomic(ctx, inst, index, coord, value, "IWRAP.U32"); +} + +void EmitImageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value) { + ImageAtomic(ctx, inst, index, coord, value, "DWRAP.U32"); +} + +void EmitImageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value) { + ImageAtomic(ctx, inst, index, coord, value, "AND.U32"); +} + +void EmitImageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value) { + ImageAtomic(ctx, inst, index, coord, value, "OR.U32"); +} + +void EmitImageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value) { + ImageAtomic(ctx, inst, index, coord, value, "XOR.U32"); +} + +void EmitImageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + Register coord, ScalarU32 value) { + ImageAtomic(ctx, inst, index, coord, value, "EXCH.U32"); +} + +void EmitBindlessImageSampleImplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageSampleExplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageSampleDrefImplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageSampleDrefExplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageGather(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageGatherDref(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageFetch(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageQueryDimensions(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageQueryLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageGradient(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageRead(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageWrite(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageSampleImplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageSampleExplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageSampleDrefImplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageSampleDrefExplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageGather(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageGatherDref(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageFetch(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageQueryDimensions(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageQueryLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageGradient(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageRead(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageWrite(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicIAdd32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicSMin32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicUMin32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicSMax32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicUMax32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicInc32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicDec32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicAnd32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicOr32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicXor32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBindlessImageAtomicExchange32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicIAdd32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicSMin32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicUMin32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicSMax32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicUMax32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicInc32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicDec32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicAnd32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicOr32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicXor32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitBoundImageAtomicExchange32(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_instructions.h b/src/shader_recompiler/backend/glasm/emit_glasm_instructions.h new file mode 100755 index 000000000..12afda43b --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_instructions.h @@ -0,0 +1,625 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/common_types.h" +#include "shader_recompiler/backend/glasm/reg_alloc.h" + +namespace Shader::IR { +enum class Attribute : u64; +enum class Patch : u64; +class Inst; +class Value; +} // namespace Shader::IR + +namespace Shader::Backend::GLASM { + +class EmitContext; + +// Microinstruction emitters +void EmitPhi(EmitContext& ctx, IR::Inst& inst); +void EmitVoid(EmitContext& ctx); +void EmitIdentity(EmitContext& ctx, IR::Inst& inst, const IR::Value& value); +void EmitConditionRef(EmitContext& ctx, IR::Inst& inst, const IR::Value& value); +void EmitReference(EmitContext&, const IR::Value& value); +void EmitPhiMove(EmitContext& ctx, const IR::Value& phi, const IR::Value& value); +void EmitJoin(EmitContext& ctx); +void EmitDemoteToHelperInvocation(EmitContext& ctx); +void EmitBarrier(EmitContext& ctx); +void EmitWorkgroupMemoryBarrier(EmitContext& ctx); +void EmitDeviceMemoryBarrier(EmitContext& ctx); +void EmitPrologue(EmitContext& ctx); +void EmitEpilogue(EmitContext& ctx); +void EmitEmitVertex(EmitContext& ctx, ScalarS32 stream); +void EmitEndPrimitive(EmitContext& ctx, const IR::Value& stream); +void EmitGetRegister(EmitContext& ctx); +void EmitSetRegister(EmitContext& ctx); +void EmitGetPred(EmitContext& ctx); +void EmitSetPred(EmitContext& ctx); +void EmitSetGotoVariable(EmitContext& ctx); +void EmitGetGotoVariable(EmitContext& ctx); +void EmitSetIndirectBranchVariable(EmitContext& ctx); +void EmitGetIndirectBranchVariable(EmitContext& ctx); +void EmitGetCbufU8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset); +void EmitGetCbufS8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset); +void EmitGetCbufU16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset); +void EmitGetCbufS16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset); +void EmitGetCbufU32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset); +void EmitGetCbufF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset); +void EmitGetCbufU32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset); +void EmitGetAttribute(EmitContext& ctx, IR::Inst& inst, IR::Attribute attr, ScalarU32 vertex); +void EmitSetAttribute(EmitContext& ctx, IR::Attribute attr, ScalarF32 value, ScalarU32 vertex); +void EmitGetAttributeIndexed(EmitContext& ctx, IR::Inst& inst, ScalarS32 offset, ScalarU32 vertex); +void EmitSetAttributeIndexed(EmitContext& ctx, ScalarU32 offset, ScalarF32 value, ScalarU32 vertex); +void EmitGetPatch(EmitContext& ctx, IR::Inst& inst, IR::Patch patch); +void EmitSetPatch(EmitContext& ctx, IR::Patch patch, ScalarF32 value); +void EmitSetFragColor(EmitContext& ctx, u32 index, u32 component, ScalarF32 value); +void EmitSetSampleMask(EmitContext& ctx, ScalarS32 value); +void EmitSetFragDepth(EmitContext& ctx, ScalarF32 value); +void EmitGetZFlag(EmitContext& ctx); +void EmitGetSFlag(EmitContext& ctx); +void EmitGetCFlag(EmitContext& ctx); +void EmitGetOFlag(EmitContext& ctx); +void EmitSetZFlag(EmitContext& ctx); +void EmitSetSFlag(EmitContext& ctx); +void EmitSetCFlag(EmitContext& ctx); +void EmitSetOFlag(EmitContext& ctx); +void EmitWorkgroupId(EmitContext& ctx, IR::Inst& inst); +void EmitLocalInvocationId(EmitContext& ctx, IR::Inst& inst); +void EmitInvocationId(EmitContext& ctx, IR::Inst& inst); +void EmitSampleId(EmitContext& ctx, IR::Inst& inst); +void EmitIsHelperInvocation(EmitContext& ctx, IR::Inst& inst); +void EmitYDirection(EmitContext& ctx, IR::Inst& inst); +void EmitLoadLocal(EmitContext& ctx, IR::Inst& inst, ScalarU32 word_offset); +void EmitWriteLocal(EmitContext& ctx, ScalarU32 word_offset, ScalarU32 value); +void EmitUndefU1(EmitContext& ctx, IR::Inst& inst); +void EmitUndefU8(EmitContext& ctx, IR::Inst& inst); +void EmitUndefU16(EmitContext& ctx, IR::Inst& inst); +void EmitUndefU32(EmitContext& ctx, IR::Inst& inst); +void EmitUndefU64(EmitContext& ctx, IR::Inst& inst); +void EmitLoadGlobalU8(EmitContext& ctx, IR::Inst& inst, Register address); +void EmitLoadGlobalS8(EmitContext& ctx, IR::Inst& inst, Register address); +void EmitLoadGlobalU16(EmitContext& ctx, IR::Inst& inst, Register address); +void EmitLoadGlobalS16(EmitContext& ctx, IR::Inst& inst, Register address); +void EmitLoadGlobal32(EmitContext& ctx, IR::Inst& inst, Register address); +void EmitLoadGlobal64(EmitContext& ctx, IR::Inst& inst, Register address); +void EmitLoadGlobal128(EmitContext& ctx, IR::Inst& inst, Register address); +void EmitWriteGlobalU8(EmitContext& ctx, Register address, Register value); +void EmitWriteGlobalS8(EmitContext& ctx, Register address, Register value); +void EmitWriteGlobalU16(EmitContext& ctx, Register address, Register value); +void EmitWriteGlobalS16(EmitContext& ctx, Register address, Register value); +void EmitWriteGlobal32(EmitContext& ctx, Register address, ScalarU32 value); +void EmitWriteGlobal64(EmitContext& ctx, Register address, Register value); +void EmitWriteGlobal128(EmitContext& ctx, Register address, Register value); +void EmitLoadStorageU8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset); +void EmitLoadStorageS8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset); +void EmitLoadStorageU16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset); +void EmitLoadStorageS16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset); +void EmitLoadStorage32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset); +void EmitLoadStorage64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset); +void EmitLoadStorage128(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset); +void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + ScalarU32 value); +void EmitWriteStorageS8(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + ScalarS32 value); +void EmitWriteStorageU16(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + ScalarU32 value); +void EmitWriteStorageS16(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + ScalarS32 value); +void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + ScalarU32 value); +void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + Register value); +void EmitWriteStorage128(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + Register value); +void EmitLoadSharedU8(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset); +void EmitLoadSharedS8(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset); +void EmitLoadSharedU16(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset); +void EmitLoadSharedS16(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset); +void EmitLoadSharedU32(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset); +void EmitLoadSharedU64(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset); +void EmitLoadSharedU128(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset); +void EmitWriteSharedU8(EmitContext& ctx, ScalarU32 offset, ScalarU32 value); +void EmitWriteSharedU16(EmitContext& ctx, ScalarU32 offset, ScalarU32 value); +void EmitWriteSharedU32(EmitContext& ctx, ScalarU32 offset, ScalarU32 value); +void EmitWriteSharedU64(EmitContext& ctx, ScalarU32 offset, Register value); +void EmitWriteSharedU128(EmitContext& ctx, ScalarU32 offset, Register value); +void EmitCompositeConstructU32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2); +void EmitCompositeConstructU32x3(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2, const IR::Value& e3); +void EmitCompositeConstructU32x4(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2, const IR::Value& e3, const IR::Value& e4); +void EmitCompositeExtractU32x2(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index); +void EmitCompositeExtractU32x3(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index); +void EmitCompositeExtractU32x4(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index); +void EmitCompositeInsertU32x2(EmitContext& ctx, Register composite, ScalarU32 object, u32 index); +void EmitCompositeInsertU32x3(EmitContext& ctx, Register composite, ScalarU32 object, u32 index); +void EmitCompositeInsertU32x4(EmitContext& ctx, Register composite, ScalarU32 object, u32 index); +void EmitCompositeConstructF16x2(EmitContext& ctx, Register e1, Register e2); +void EmitCompositeConstructF16x3(EmitContext& ctx, Register e1, Register e2, Register e3); +void EmitCompositeConstructF16x4(EmitContext& ctx, Register e1, Register e2, Register e3, + Register e4); +void EmitCompositeExtractF16x2(EmitContext& ctx, Register composite, u32 index); +void EmitCompositeExtractF16x3(EmitContext& ctx, Register composite, u32 index); +void EmitCompositeExtractF16x4(EmitContext& ctx, Register composite, u32 index); +void EmitCompositeInsertF16x2(EmitContext& ctx, Register composite, Register object, u32 index); +void EmitCompositeInsertF16x3(EmitContext& ctx, Register composite, Register object, u32 index); +void EmitCompositeInsertF16x4(EmitContext& ctx, Register composite, Register object, u32 index); +void EmitCompositeConstructF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2); +void EmitCompositeConstructF32x3(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2, const IR::Value& e3); +void EmitCompositeConstructF32x4(EmitContext& ctx, IR::Inst& inst, const IR::Value& e1, + const IR::Value& e2, const IR::Value& e3, const IR::Value& e4); +void EmitCompositeExtractF32x2(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index); +void EmitCompositeExtractF32x3(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index); +void EmitCompositeExtractF32x4(EmitContext& ctx, IR::Inst& inst, Register composite, u32 index); +void EmitCompositeInsertF32x2(EmitContext& ctx, IR::Inst& inst, Register composite, + ScalarF32 object, u32 index); +void EmitCompositeInsertF32x3(EmitContext& ctx, IR::Inst& inst, Register composite, + ScalarF32 object, u32 index); +void EmitCompositeInsertF32x4(EmitContext& ctx, IR::Inst& inst, Register composite, + ScalarF32 object, u32 index); +void EmitCompositeConstructF64x2(EmitContext& ctx); +void EmitCompositeConstructF64x3(EmitContext& ctx); +void EmitCompositeConstructF64x4(EmitContext& ctx); +void EmitCompositeExtractF64x2(EmitContext& ctx); +void EmitCompositeExtractF64x3(EmitContext& ctx); +void EmitCompositeExtractF64x4(EmitContext& ctx); +void EmitCompositeInsertF64x2(EmitContext& ctx, Register composite, Register object, u32 index); +void EmitCompositeInsertF64x3(EmitContext& ctx, Register composite, Register object, u32 index); +void EmitCompositeInsertF64x4(EmitContext& ctx, Register composite, Register object, u32 index); +void EmitSelectU1(EmitContext& ctx, IR::Inst& inst, ScalarS32 cond, ScalarS32 true_value, + ScalarS32 false_value); +void EmitSelectU8(EmitContext& ctx, ScalarS32 cond, ScalarS32 true_value, ScalarS32 false_value); +void EmitSelectU16(EmitContext& ctx, ScalarS32 cond, ScalarS32 true_value, ScalarS32 false_value); +void EmitSelectU32(EmitContext& ctx, IR::Inst& inst, ScalarS32 cond, ScalarS32 true_value, + ScalarS32 false_value); +void EmitSelectU64(EmitContext& ctx, IR::Inst& inst, ScalarS32 cond, Register true_value, + Register false_value); +void EmitSelectF16(EmitContext& ctx, ScalarS32 cond, Register true_value, Register false_value); +void EmitSelectF32(EmitContext& ctx, IR::Inst& inst, ScalarS32 cond, ScalarS32 true_value, + ScalarS32 false_value); +void EmitSelectF64(EmitContext& ctx, ScalarS32 cond, Register true_value, Register false_value); +void EmitBitCastU16F16(EmitContext& ctx, IR::Inst& inst, const IR::Value& value); +void EmitBitCastU32F32(EmitContext& ctx, IR::Inst& inst, const IR::Value& value); +void EmitBitCastU64F64(EmitContext& ctx, IR::Inst& inst, const IR::Value& value); +void EmitBitCastF16U16(EmitContext& ctx, IR::Inst& inst, const IR::Value& value); +void EmitBitCastF32U32(EmitContext& ctx, IR::Inst& inst, const IR::Value& value); +void EmitBitCastF64U64(EmitContext& ctx, IR::Inst& inst, const IR::Value& value); +void EmitPackUint2x32(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitUnpackUint2x32(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitPackFloat2x16(EmitContext& ctx, Register value); +void EmitUnpackFloat2x16(EmitContext& ctx, Register value); +void EmitPackHalf2x16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitUnpackHalf2x16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitPackDouble2x32(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitUnpackDouble2x32(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitGetZeroFromOp(EmitContext& ctx); +void EmitGetSignFromOp(EmitContext& ctx); +void EmitGetCarryFromOp(EmitContext& ctx); +void EmitGetOverflowFromOp(EmitContext& ctx); +void EmitGetSparseFromOp(EmitContext& ctx); +void EmitGetInBoundsFromOp(EmitContext& ctx); +void EmitFPAbs16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitFPAbs32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPAbs64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitFPAdd16(EmitContext& ctx, IR::Inst& inst, Register a, Register b); +void EmitFPAdd32(EmitContext& ctx, IR::Inst& inst, ScalarF32 a, ScalarF32 b); +void EmitFPAdd64(EmitContext& ctx, IR::Inst& inst, ScalarF64 a, ScalarF64 b); +void EmitFPFma16(EmitContext& ctx, IR::Inst& inst, Register a, Register b, Register c); +void EmitFPFma32(EmitContext& ctx, IR::Inst& inst, ScalarF32 a, ScalarF32 b, ScalarF32 c); +void EmitFPFma64(EmitContext& ctx, IR::Inst& inst, ScalarF64 a, ScalarF64 b, ScalarF64 c); +void EmitFPMax32(EmitContext& ctx, IR::Inst& inst, ScalarF32 a, ScalarF32 b); +void EmitFPMax64(EmitContext& ctx, IR::Inst& inst, ScalarF64 a, ScalarF64 b); +void EmitFPMin32(EmitContext& ctx, IR::Inst& inst, ScalarF32 a, ScalarF32 b); +void EmitFPMin64(EmitContext& ctx, IR::Inst& inst, ScalarF64 a, ScalarF64 b); +void EmitFPMul16(EmitContext& ctx, IR::Inst& inst, Register a, Register b); +void EmitFPMul32(EmitContext& ctx, IR::Inst& inst, ScalarF32 a, ScalarF32 b); +void EmitFPMul64(EmitContext& ctx, IR::Inst& inst, ScalarF64 a, ScalarF64 b); +void EmitFPNeg16(EmitContext& ctx, Register value); +void EmitFPNeg32(EmitContext& ctx, IR::Inst& inst, ScalarRegister value); +void EmitFPNeg64(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitFPSin(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPCos(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPExp2(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPLog2(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPRecip32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPRecip64(EmitContext& ctx, Register value); +void EmitFPRecipSqrt32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPRecipSqrt64(EmitContext& ctx, Register value); +void EmitFPSqrt(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPSaturate16(EmitContext& ctx, Register value); +void EmitFPSaturate32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPSaturate64(EmitContext& ctx, Register value); +void EmitFPClamp16(EmitContext& ctx, Register value, Register min_value, Register max_value); +void EmitFPClamp32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value, ScalarF32 min_value, + ScalarF32 max_value); +void EmitFPClamp64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value, ScalarF64 min_value, + ScalarF64 max_value); +void EmitFPRoundEven16(EmitContext& ctx, Register value); +void EmitFPRoundEven32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPRoundEven64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitFPFloor16(EmitContext& ctx, Register value); +void EmitFPFloor32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPFloor64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitFPCeil16(EmitContext& ctx, Register value); +void EmitFPCeil32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPCeil64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitFPTrunc16(EmitContext& ctx, Register value); +void EmitFPTrunc32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPTrunc64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitFPOrdEqual16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPOrdEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPOrdEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPUnordEqual16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPUnordEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPUnordEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPOrdNotEqual16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPOrdNotEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPOrdNotEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPUnordNotEqual16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPUnordNotEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPUnordNotEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPOrdLessThan16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPOrdLessThan32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPOrdLessThan64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPUnordLessThan16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPUnordLessThan32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPUnordLessThan64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPOrdGreaterThan16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPOrdGreaterThan32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPOrdGreaterThan64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPUnordGreaterThan16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPUnordGreaterThan32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPUnordGreaterThan64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPOrdLessThanEqual16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPOrdLessThanEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPOrdLessThanEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPUnordLessThanEqual16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPUnordLessThanEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPUnordLessThanEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPOrdGreaterThanEqual16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPOrdGreaterThanEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPOrdGreaterThanEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPUnordGreaterThanEqual16(EmitContext& ctx, Register lhs, Register rhs); +void EmitFPUnordGreaterThanEqual32(EmitContext& ctx, IR::Inst& inst, ScalarF32 lhs, ScalarF32 rhs); +void EmitFPUnordGreaterThanEqual64(EmitContext& ctx, IR::Inst& inst, ScalarF64 lhs, ScalarF64 rhs); +void EmitFPIsNan16(EmitContext& ctx, Register value); +void EmitFPIsNan32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitFPIsNan64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitIAdd32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitIAdd64(EmitContext& ctx, IR::Inst& inst, Register a, Register b); +void EmitISub32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitISub64(EmitContext& ctx, IR::Inst& inst, Register a, Register b); +void EmitIMul32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitINeg32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value); +void EmitINeg64(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitIAbs32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value); +void EmitShiftLeftLogical32(EmitContext& ctx, IR::Inst& inst, ScalarU32 base, ScalarU32 shift); +void EmitShiftLeftLogical64(EmitContext& ctx, IR::Inst& inst, ScalarRegister base, ScalarU32 shift); +void EmitShiftRightLogical32(EmitContext& ctx, IR::Inst& inst, ScalarU32 base, ScalarU32 shift); +void EmitShiftRightLogical64(EmitContext& ctx, IR::Inst& inst, ScalarRegister base, + ScalarU32 shift); +void EmitShiftRightArithmetic32(EmitContext& ctx, IR::Inst& inst, ScalarS32 base, ScalarS32 shift); +void EmitShiftRightArithmetic64(EmitContext& ctx, IR::Inst& inst, ScalarRegister base, + ScalarS32 shift); +void EmitBitwiseAnd32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitBitwiseOr32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitBitwiseXor32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitBitFieldInsert(EmitContext& ctx, IR::Inst& inst, ScalarS32 base, ScalarS32 insert, + ScalarS32 offset, ScalarS32 count); +void EmitBitFieldSExtract(EmitContext& ctx, IR::Inst& inst, ScalarS32 base, ScalarS32 offset, + ScalarS32 count); +void EmitBitFieldUExtract(EmitContext& ctx, IR::Inst& inst, ScalarU32 base, ScalarU32 offset, + ScalarU32 count); +void EmitBitReverse32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value); +void EmitBitCount32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value); +void EmitBitwiseNot32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value); +void EmitFindSMsb32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value); +void EmitFindUMsb32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value); +void EmitSMin32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitUMin32(EmitContext& ctx, IR::Inst& inst, ScalarU32 a, ScalarU32 b); +void EmitSMax32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitUMax32(EmitContext& ctx, IR::Inst& inst, ScalarU32 a, ScalarU32 b); +void EmitSClamp32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value, ScalarS32 min, ScalarS32 max); +void EmitUClamp32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 min, ScalarU32 max); +void EmitSLessThan(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs); +void EmitULessThan(EmitContext& ctx, IR::Inst& inst, ScalarU32 lhs, ScalarU32 rhs); +void EmitIEqual(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs); +void EmitSLessThanEqual(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs); +void EmitULessThanEqual(EmitContext& ctx, IR::Inst& inst, ScalarU32 lhs, ScalarU32 rhs); +void EmitSGreaterThan(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs); +void EmitUGreaterThan(EmitContext& ctx, IR::Inst& inst, ScalarU32 lhs, ScalarU32 rhs); +void EmitINotEqual(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs); +void EmitSGreaterThanEqual(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs); +void EmitUGreaterThanEqual(EmitContext& ctx, IR::Inst& inst, ScalarU32 lhs, ScalarU32 rhs); +void EmitSharedAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value); +void EmitSharedAtomicSMin32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarS32 value); +void EmitSharedAtomicUMin32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value); +void EmitSharedAtomicSMax32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarS32 value); +void EmitSharedAtomicUMax32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value); +void EmitSharedAtomicInc32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value); +void EmitSharedAtomicDec32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value); +void EmitSharedAtomicAnd32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value); +void EmitSharedAtomicOr32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value); +void EmitSharedAtomicXor32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value); +void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value); +void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + Register value); +void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value); +void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarS32 value); +void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value); +void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarS32 value); +void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value); +void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value); +void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value); +void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value); +void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value); +void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value); +void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value); +void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarF32 value); +void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicAddF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicMinF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicMinF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicMaxF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitStorageAtomicMaxF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value); +void EmitGlobalAtomicIAdd32(EmitContext& ctx); +void EmitGlobalAtomicSMin32(EmitContext& ctx); +void EmitGlobalAtomicUMin32(EmitContext& ctx); +void EmitGlobalAtomicSMax32(EmitContext& ctx); +void EmitGlobalAtomicUMax32(EmitContext& ctx); +void EmitGlobalAtomicInc32(EmitContext& ctx); +void EmitGlobalAtomicDec32(EmitContext& ctx); +void EmitGlobalAtomicAnd32(EmitContext& ctx); +void EmitGlobalAtomicOr32(EmitContext& ctx); +void EmitGlobalAtomicXor32(EmitContext& ctx); +void EmitGlobalAtomicExchange32(EmitContext& ctx); +void EmitGlobalAtomicIAdd64(EmitContext& ctx); +void EmitGlobalAtomicSMin64(EmitContext& ctx); +void EmitGlobalAtomicUMin64(EmitContext& ctx); +void EmitGlobalAtomicSMax64(EmitContext& ctx); +void EmitGlobalAtomicUMax64(EmitContext& ctx); +void EmitGlobalAtomicInc64(EmitContext& ctx); +void EmitGlobalAtomicDec64(EmitContext& ctx); +void EmitGlobalAtomicAnd64(EmitContext& ctx); +void EmitGlobalAtomicOr64(EmitContext& ctx); +void EmitGlobalAtomicXor64(EmitContext& ctx); +void EmitGlobalAtomicExchange64(EmitContext& ctx); +void EmitGlobalAtomicAddF32(EmitContext& ctx); +void EmitGlobalAtomicAddF16x2(EmitContext& ctx); +void EmitGlobalAtomicAddF32x2(EmitContext& ctx); +void EmitGlobalAtomicMinF16x2(EmitContext& ctx); +void EmitGlobalAtomicMinF32x2(EmitContext& ctx); +void EmitGlobalAtomicMaxF16x2(EmitContext& ctx); +void EmitGlobalAtomicMaxF32x2(EmitContext& ctx); +void EmitLogicalOr(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitLogicalAnd(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitLogicalXor(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b); +void EmitLogicalNot(EmitContext& ctx, IR::Inst& inst, ScalarS32 value); +void EmitConvertS16F16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertS16F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitConvertS16F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitConvertS32F16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertS32F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitConvertS32F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitConvertS64F16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertS64F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitConvertS64F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitConvertU16F16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertU16F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitConvertU16F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitConvertU32F16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertU32F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitConvertU32F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitConvertU64F16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertU64F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitConvertU64F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitConvertU64U32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value); +void EmitConvertU32U64(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF16F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitConvertF32F16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF32F64(EmitContext& ctx, IR::Inst& inst, ScalarF64 value); +void EmitConvertF64F32(EmitContext& ctx, IR::Inst& inst, ScalarF32 value); +void EmitConvertF16S8(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF16S16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF16S32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value); +void EmitConvertF16S64(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF16U8(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF16U16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF16U32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value); +void EmitConvertF16U64(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF32S8(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF32S16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF32S32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value); +void EmitConvertF32S64(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF32U8(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF32U16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF32U32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value); +void EmitConvertF32U64(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF64S8(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF64S16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF64S32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value); +void EmitConvertF64S64(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF64U8(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF64U16(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitConvertF64U32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value); +void EmitConvertF64U64(EmitContext& ctx, IR::Inst& inst, Register value); +void EmitBindlessImageSampleImplicitLod(EmitContext&); +void EmitBindlessImageSampleExplicitLod(EmitContext&); +void EmitBindlessImageSampleDrefImplicitLod(EmitContext&); +void EmitBindlessImageSampleDrefExplicitLod(EmitContext&); +void EmitBindlessImageGather(EmitContext&); +void EmitBindlessImageGatherDref(EmitContext&); +void EmitBindlessImageFetch(EmitContext&); +void EmitBindlessImageQueryDimensions(EmitContext&); +void EmitBindlessImageQueryLod(EmitContext&); +void EmitBindlessImageGradient(EmitContext&); +void EmitBindlessImageRead(EmitContext&); +void EmitBindlessImageWrite(EmitContext&); +void EmitBoundImageSampleImplicitLod(EmitContext&); +void EmitBoundImageSampleExplicitLod(EmitContext&); +void EmitBoundImageSampleDrefImplicitLod(EmitContext&); +void EmitBoundImageSampleDrefExplicitLod(EmitContext&); +void EmitBoundImageGather(EmitContext&); +void EmitBoundImageGatherDref(EmitContext&); +void EmitBoundImageFetch(EmitContext&); +void EmitBoundImageQueryDimensions(EmitContext&); +void EmitBoundImageQueryLod(EmitContext&); +void EmitBoundImageGradient(EmitContext&); +void EmitBoundImageRead(EmitContext&); +void EmitBoundImageWrite(EmitContext&); +void EmitImageSampleImplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, Register bias_lc, const IR::Value& offset); +void EmitImageSampleExplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, ScalarF32 lod, const IR::Value& offset); +void EmitImageSampleDrefImplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& dref, + const IR::Value& bias_lc, const IR::Value& offset); +void EmitImageSampleDrefExplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& dref, + const IR::Value& lod, const IR::Value& offset); +void EmitImageGather(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& offset, const IR::Value& offset2); +void EmitImageGatherDref(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& offset, const IR::Value& offset2, + const IR::Value& dref); +void EmitImageFetch(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& offset, ScalarS32 lod, ScalarS32 ms); +void EmitImageQueryDimensions(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + ScalarS32 lod); +void EmitImageQueryLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord); +void EmitImageGradient(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + const IR::Value& coord, const IR::Value& derivatives, + const IR::Value& offset, const IR::Value& lod_clamp); +void EmitImageRead(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord); +void EmitImageWrite(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + Register color); +void EmitBindlessImageAtomicIAdd32(EmitContext&); +void EmitBindlessImageAtomicSMin32(EmitContext&); +void EmitBindlessImageAtomicUMin32(EmitContext&); +void EmitBindlessImageAtomicSMax32(EmitContext&); +void EmitBindlessImageAtomicUMax32(EmitContext&); +void EmitBindlessImageAtomicInc32(EmitContext&); +void EmitBindlessImageAtomicDec32(EmitContext&); +void EmitBindlessImageAtomicAnd32(EmitContext&); +void EmitBindlessImageAtomicOr32(EmitContext&); +void EmitBindlessImageAtomicXor32(EmitContext&); +void EmitBindlessImageAtomicExchange32(EmitContext&); +void EmitBoundImageAtomicIAdd32(EmitContext&); +void EmitBoundImageAtomicSMin32(EmitContext&); +void EmitBoundImageAtomicUMin32(EmitContext&); +void EmitBoundImageAtomicSMax32(EmitContext&); +void EmitBoundImageAtomicUMax32(EmitContext&); +void EmitBoundImageAtomicInc32(EmitContext&); +void EmitBoundImageAtomicDec32(EmitContext&); +void EmitBoundImageAtomicAnd32(EmitContext&); +void EmitBoundImageAtomicOr32(EmitContext&); +void EmitBoundImageAtomicXor32(EmitContext&); +void EmitBoundImageAtomicExchange32(EmitContext&); +void EmitImageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value); +void EmitImageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarS32 value); +void EmitImageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value); +void EmitImageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarS32 value); +void EmitImageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value); +void EmitImageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value); +void EmitImageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value); +void EmitImageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value); +void EmitImageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value); +void EmitImageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, Register coord, + ScalarU32 value); +void EmitImageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + Register coord, ScalarU32 value); +void EmitLaneId(EmitContext& ctx, IR::Inst& inst); +void EmitVoteAll(EmitContext& ctx, IR::Inst& inst, ScalarS32 pred); +void EmitVoteAny(EmitContext& ctx, IR::Inst& inst, ScalarS32 pred); +void EmitVoteEqual(EmitContext& ctx, IR::Inst& inst, ScalarS32 pred); +void EmitSubgroupBallot(EmitContext& ctx, IR::Inst& inst, ScalarS32 pred); +void EmitSubgroupEqMask(EmitContext& ctx, IR::Inst& inst); +void EmitSubgroupLtMask(EmitContext& ctx, IR::Inst& inst); +void EmitSubgroupLeMask(EmitContext& ctx, IR::Inst& inst); +void EmitSubgroupGtMask(EmitContext& ctx, IR::Inst& inst); +void EmitSubgroupGeMask(EmitContext& ctx, IR::Inst& inst); +void EmitShuffleIndex(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 index, + const IR::Value& clamp, const IR::Value& segmentation_mask); +void EmitShuffleUp(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 index, + const IR::Value& clamp, const IR::Value& segmentation_mask); +void EmitShuffleDown(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 index, + const IR::Value& clamp, const IR::Value& segmentation_mask); +void EmitShuffleButterfly(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 index, + const IR::Value& clamp, const IR::Value& segmentation_mask); +void EmitFSwizzleAdd(EmitContext& ctx, IR::Inst& inst, ScalarF32 op_a, ScalarF32 op_b, + ScalarU32 swizzle); +void EmitDPdxFine(EmitContext& ctx, IR::Inst& inst, ScalarF32 op_a); +void EmitDPdyFine(EmitContext& ctx, IR::Inst& inst, ScalarF32 op_a); +void EmitDPdxCoarse(EmitContext& ctx, IR::Inst& inst, ScalarF32 op_a); +void EmitDPdyCoarse(EmitContext& ctx, IR::Inst& inst, ScalarF32 op_a); + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_integer.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_integer.cpp new file mode 100755 index 000000000..f55c26b76 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_integer.cpp @@ -0,0 +1,294 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLASM { +namespace { +void BitwiseLogicalOp(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b, + std::string_view lop) { + const auto zero = inst.GetAssociatedPseudoOperation(IR::Opcode::GetZeroFromOp); + const auto sign = inst.GetAssociatedPseudoOperation(IR::Opcode::GetSignFromOp); + if (zero) { + zero->Invalidate(); + } + if (sign) { + sign->Invalidate(); + } + if (zero || sign) { + ctx.reg_alloc.InvalidateConditionCodes(); + } + const auto ret{ctx.reg_alloc.Define(inst)}; + ctx.Add("{}.S {}.x,{},{};", lop, ret, a, b); + if (zero) { + ctx.Add("SEQ.S {},{},0;", *zero, ret); + } + if (sign) { + ctx.Add("SLT.S {},{},0;", *sign, ret); + } +} +} // Anonymous namespace + +void EmitIAdd32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + const std::array flags{ + inst.GetAssociatedPseudoOperation(IR::Opcode::GetZeroFromOp), + inst.GetAssociatedPseudoOperation(IR::Opcode::GetSignFromOp), + inst.GetAssociatedPseudoOperation(IR::Opcode::GetCarryFromOp), + inst.GetAssociatedPseudoOperation(IR::Opcode::GetOverflowFromOp), + }; + for (IR::Inst* const flag_inst : flags) { + if (flag_inst) { + flag_inst->Invalidate(); + } + } + const bool cc{inst.HasAssociatedPseudoOperation()}; + const std::string_view cc_mod{cc ? ".CC" : ""}; + if (cc) { + ctx.reg_alloc.InvalidateConditionCodes(); + } + const auto ret{ctx.reg_alloc.Define(inst)}; + ctx.Add("ADD.S{} {}.x,{},{};", cc_mod, ret, a, b); + if (!cc) { + return; + } + static constexpr std::array masks{"", "SF", "CF", "OF"}; + for (size_t flag_index = 0; flag_index < flags.size(); ++flag_index) { + if (!flags[flag_index]) { + continue; + } + const auto flag_ret{ctx.reg_alloc.Define(*flags[flag_index])}; + if (flag_index == 0) { + ctx.Add("SEQ.S {}.x,{}.x,0;", flag_ret, ret); + } else { + // We could use conditional execution here, but it's broken on Nvidia's compiler + ctx.Add("IF {}.x;" + "MOV.S {}.x,-1;" + "ELSE;" + "MOV.S {}.x,0;" + "ENDIF;", + masks[flag_index], flag_ret, flag_ret); + } + } +} + +void EmitIAdd64(EmitContext& ctx, IR::Inst& inst, Register a, Register b) { + ctx.LongAdd("ADD.S64 {}.x,{}.x,{}.x;", inst, a, b); +} + +void EmitISub32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + ctx.Add("SUB.S {}.x,{},{};", inst, a, b); +} + +void EmitISub64(EmitContext& ctx, IR::Inst& inst, Register a, Register b) { + ctx.LongAdd("SUB.S64 {}.x,{}.x,{}.x;", inst, a, b); +} + +void EmitIMul32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + ctx.Add("MUL.S {}.x,{},{};", inst, a, b); +} + +void EmitINeg32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value) { + if (value.type != Type::Register && static_cast(value.imm_u32) < 0) { + ctx.Add("MOV.S {},{};", inst, -static_cast(value.imm_u32)); + } else { + ctx.Add("MOV.S {},-{};", inst, value); + } +} + +void EmitINeg64(EmitContext& ctx, IR::Inst& inst, Register value) { + ctx.LongAdd("MOV.S64 {},-{};", inst, value); +} + +void EmitIAbs32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value) { + ctx.Add("ABS.S {},{};", inst, value); +} + +void EmitShiftLeftLogical32(EmitContext& ctx, IR::Inst& inst, ScalarU32 base, ScalarU32 shift) { + ctx.Add("SHL.U {}.x,{},{};", inst, base, shift); +} + +void EmitShiftLeftLogical64(EmitContext& ctx, IR::Inst& inst, ScalarRegister base, + ScalarU32 shift) { + ctx.LongAdd("SHL.U64 {}.x,{},{};", inst, base, shift); +} + +void EmitShiftRightLogical32(EmitContext& ctx, IR::Inst& inst, ScalarU32 base, ScalarU32 shift) { + ctx.Add("SHR.U {}.x,{},{};", inst, base, shift); +} + +void EmitShiftRightLogical64(EmitContext& ctx, IR::Inst& inst, ScalarRegister base, + ScalarU32 shift) { + ctx.LongAdd("SHR.U64 {}.x,{},{};", inst, base, shift); +} + +void EmitShiftRightArithmetic32(EmitContext& ctx, IR::Inst& inst, ScalarS32 base, ScalarS32 shift) { + ctx.Add("SHR.S {}.x,{},{};", inst, base, shift); +} + +void EmitShiftRightArithmetic64(EmitContext& ctx, IR::Inst& inst, ScalarRegister base, + ScalarS32 shift) { + ctx.LongAdd("SHR.S64 {}.x,{},{};", inst, base, shift); +} + +void EmitBitwiseAnd32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + BitwiseLogicalOp(ctx, inst, a, b, "AND"); +} + +void EmitBitwiseOr32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + BitwiseLogicalOp(ctx, inst, a, b, "OR"); +} + +void EmitBitwiseXor32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + BitwiseLogicalOp(ctx, inst, a, b, "XOR"); +} + +void EmitBitFieldInsert(EmitContext& ctx, IR::Inst& inst, ScalarS32 base, ScalarS32 insert, + ScalarS32 offset, ScalarS32 count) { + const Register ret{ctx.reg_alloc.Define(inst)}; + if (count.type != Type::Register && offset.type != Type::Register) { + ctx.Add("BFI.S {},{{{},{},0,0}},{},{};", ret, count, offset, insert, base); + } else { + ctx.Add("MOV.S RC.x,{};" + "MOV.S RC.y,{};" + "BFI.S {},RC,{},{};", + count, offset, ret, insert, base); + } +} + +void EmitBitFieldSExtract(EmitContext& ctx, IR::Inst& inst, ScalarS32 base, ScalarS32 offset, + ScalarS32 count) { + const Register ret{ctx.reg_alloc.Define(inst)}; + if (count.type != Type::Register && offset.type != Type::Register) { + ctx.Add("BFE.S {},{{{},{},0,0}},{};", ret, count, offset, base); + } else { + ctx.Add("MOV.S RC.x,{};" + "MOV.S RC.y,{};" + "BFE.S {},RC,{};", + count, offset, ret, base); + } +} + +void EmitBitFieldUExtract(EmitContext& ctx, IR::Inst& inst, ScalarU32 base, ScalarU32 offset, + ScalarU32 count) { + const auto zero = inst.GetAssociatedPseudoOperation(IR::Opcode::GetZeroFromOp); + const auto sign = inst.GetAssociatedPseudoOperation(IR::Opcode::GetSignFromOp); + if (zero) { + zero->Invalidate(); + } + if (sign) { + sign->Invalidate(); + } + if (zero || sign) { + ctx.reg_alloc.InvalidateConditionCodes(); + } + const Register ret{ctx.reg_alloc.Define(inst)}; + if (count.type != Type::Register && offset.type != Type::Register) { + ctx.Add("BFE.U {},{{{},{},0,0}},{};", ret, count, offset, base); + } else { + ctx.Add("MOV.U RC.x,{};" + "MOV.U RC.y,{};" + "BFE.U {},RC,{};", + count, offset, ret, base); + } + if (zero) { + ctx.Add("SEQ.S {},{},0;", *zero, ret); + } + if (sign) { + ctx.Add("SLT.S {},{},0;", *sign, ret); + } +} + +void EmitBitReverse32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value) { + ctx.Add("BFR {},{};", inst, value); +} + +void EmitBitCount32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value) { + ctx.Add("BTC {},{};", inst, value); +} + +void EmitBitwiseNot32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value) { + ctx.Add("NOT.S {},{};", inst, value); +} + +void EmitFindSMsb32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value) { + ctx.Add("BTFM.S {},{};", inst, value); +} + +void EmitFindUMsb32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value) { + ctx.Add("BTFM.U {},{};", inst, value); +} + +void EmitSMin32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + ctx.Add("MIN.S {},{},{};", inst, a, b); +} + +void EmitUMin32(EmitContext& ctx, IR::Inst& inst, ScalarU32 a, ScalarU32 b) { + ctx.Add("MIN.U {},{},{};", inst, a, b); +} + +void EmitSMax32(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + ctx.Add("MAX.S {},{},{};", inst, a, b); +} + +void EmitUMax32(EmitContext& ctx, IR::Inst& inst, ScalarU32 a, ScalarU32 b) { + ctx.Add("MAX.U {},{},{};", inst, a, b); +} + +void EmitSClamp32(EmitContext& ctx, IR::Inst& inst, ScalarS32 value, ScalarS32 min, ScalarS32 max) { + const Register ret{ctx.reg_alloc.Define(inst)}; + ctx.Add("MIN.S RC.x,{},{};" + "MAX.S {}.x,RC.x,{};", + max, value, ret, min); +} + +void EmitUClamp32(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 min, ScalarU32 max) { + const Register ret{ctx.reg_alloc.Define(inst)}; + ctx.Add("MIN.U RC.x,{},{};" + "MAX.U {}.x,RC.x,{};", + max, value, ret, min); +} + +void EmitSLessThan(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs) { + ctx.Add("SLT.S {}.x,{},{};", inst, lhs, rhs); +} + +void EmitULessThan(EmitContext& ctx, IR::Inst& inst, ScalarU32 lhs, ScalarU32 rhs) { + ctx.Add("SLT.U {}.x,{},{};", inst, lhs, rhs); +} + +void EmitIEqual(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs) { + ctx.Add("SEQ.S {}.x,{},{};", inst, lhs, rhs); +} + +void EmitSLessThanEqual(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs) { + ctx.Add("SLE.S {}.x,{},{};", inst, lhs, rhs); +} + +void EmitULessThanEqual(EmitContext& ctx, IR::Inst& inst, ScalarU32 lhs, ScalarU32 rhs) { + ctx.Add("SLE.U {}.x,{},{};", inst, lhs, rhs); +} + +void EmitSGreaterThan(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs) { + ctx.Add("SGT.S {}.x,{},{};", inst, lhs, rhs); +} + +void EmitUGreaterThan(EmitContext& ctx, IR::Inst& inst, ScalarU32 lhs, ScalarU32 rhs) { + ctx.Add("SGT.U {}.x,{},{};", inst, lhs, rhs); +} + +void EmitINotEqual(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs) { + ctx.Add("SNE.U {}.x,{},{};", inst, lhs, rhs); +} + +void EmitSGreaterThanEqual(EmitContext& ctx, IR::Inst& inst, ScalarS32 lhs, ScalarS32 rhs) { + ctx.Add("SGE.S {}.x,{},{};", inst, lhs, rhs); +} + +void EmitUGreaterThanEqual(EmitContext& ctx, IR::Inst& inst, ScalarU32 lhs, ScalarU32 rhs) { + ctx.Add("SGE.U {}.x,{},{};", inst, lhs, rhs); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_logical.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_logical.cpp new file mode 100755 index 000000000..e69de29bb diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_memory.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_memory.cpp new file mode 100755 index 000000000..af9fac7c1 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_memory.cpp @@ -0,0 +1,568 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/runtime_info.h" + +namespace Shader::Backend::GLASM { +namespace { +void StorageOp(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + std::string_view then_expr, std::string_view else_expr = {}) { + // Operate on bindless SSBO, call the expression with bounds checking + // address = c[binding].xy + // length = c[binding].z + const u32 sb_binding{binding.U32()}; + ctx.Add("PK64.U DC,c[{}];" // pointer = address + "CVT.U64.U32 DC.z,{};" // offset = uint64_t(offset) + "ADD.U64 DC.x,DC.x,DC.z;" // pointer += offset + "SLT.U.CC RC.x,{},c[{}].z;", // cc = offset < length + sb_binding, offset, offset, sb_binding); + if (else_expr.empty()) { + ctx.Add("IF NE.x;{}ENDIF;", then_expr); + } else { + ctx.Add("IF NE.x;{}ELSE;{}ENDIF;", then_expr, else_expr); + } +} + +void GlobalStorageOp(EmitContext& ctx, Register address, bool pointer_based, std::string_view expr, + std::string_view else_expr = {}) { + const size_t num_buffers{ctx.info.storage_buffers_descriptors.size()}; + for (size_t index = 0; index < num_buffers; ++index) { + if (!ctx.info.nvn_buffer_used[index]) { + continue; + } + const auto& ssbo{ctx.info.storage_buffers_descriptors[index]}; + ctx.Add("LDC.U64 DC.x,c{}[{}];" // ssbo_addr + "LDC.U32 RC.x,c{}[{}];" // ssbo_size_u32 + "CVT.U64.U32 DC.y,RC.x;" // ssbo_size = ssbo_size_u32 + "ADD.U64 DC.y,DC.y,DC.x;" // ssbo_end = ssbo_addr + ssbo_size + "SGE.U64 RC.x,{}.x,DC.x;" // a = input_addr >= ssbo_addr ? -1 : 0 + "SLT.U64 RC.y,{}.x,DC.y;" // b = input_addr < ssbo_end ? -1 : 0 + "AND.U.CC RC.x,RC.x,RC.y;" // cond = a && b + "IF NE.x;" // if cond + "SUB.U64 DC.x,{}.x,DC.x;", // offset = input_addr - ssbo_addr + ssbo.cbuf_index, ssbo.cbuf_offset, ssbo.cbuf_index, ssbo.cbuf_offset + 8, address, + address, address); + if (pointer_based) { + ctx.Add("PK64.U DC.y,c[{}];" // host_ssbo = cbuf + "ADD.U64 DC.x,DC.x,DC.y;" // host_addr = host_ssbo + offset + "{}" + "ELSE;", + index, expr); + } else { + ctx.Add("CVT.U32.U64 RC.x,DC.x;" + "{},ssbo{}[RC.x];" + "ELSE;", + expr, index); + } + } + if (!else_expr.empty()) { + ctx.Add("{}", else_expr); + } + const size_t num_used_buffers{ctx.info.nvn_buffer_used.count()}; + for (size_t index = 0; index < num_used_buffers; ++index) { + ctx.Add("ENDIF;"); + } +} + +template +void Write(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, ValueType value, + std::string_view size) { + if (ctx.runtime_info.glasm_use_storage_buffers) { + ctx.Add("STB.{} {},ssbo{}[{}];", size, value, binding.U32(), offset); + } else { + StorageOp(ctx, binding, offset, fmt::format("STORE.{} {},DC.x;", size, value)); + } +} + +void Load(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset, + std::string_view size) { + const Register ret{ctx.reg_alloc.Define(inst)}; + if (ctx.runtime_info.glasm_use_storage_buffers) { + ctx.Add("LDB.{} {},ssbo{}[{}];", size, ret, binding.U32(), offset); + } else { + StorageOp(ctx, binding, offset, fmt::format("LOAD.{} {},DC.x;", size, ret), + fmt::format("MOV.U {},{{0,0,0,0}};", ret)); + } +} + +template +void GlobalWrite(EmitContext& ctx, Register address, ValueType value, std::string_view size) { + if (ctx.runtime_info.glasm_use_storage_buffers) { + GlobalStorageOp(ctx, address, false, fmt::format("STB.{} {}", size, value)); + } else { + GlobalStorageOp(ctx, address, true, fmt::format("STORE.{} {},DC.x;", size, value)); + } +} + +void GlobalLoad(EmitContext& ctx, IR::Inst& inst, Register address, std::string_view size) { + const Register ret{ctx.reg_alloc.Define(inst)}; + if (ctx.runtime_info.glasm_use_storage_buffers) { + GlobalStorageOp(ctx, address, false, fmt::format("LDB.{} {}", size, ret)); + } else { + GlobalStorageOp(ctx, address, true, fmt::format("LOAD.{} {},DC.x;", size, ret), + fmt::format("MOV.S {},0;", ret)); + } +} + +template +void Atom(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset, + ValueType value, std::string_view operation, std::string_view size) { + const Register ret{ctx.reg_alloc.Define(inst)}; + if (ctx.runtime_info.glasm_use_storage_buffers) { + ctx.Add("ATOMB.{}.{} {},{},ssbo{}[{}];", operation, size, ret, value, binding.U32(), + offset); + } else { + StorageOp(ctx, binding, offset, + fmt::format("ATOM.{}.{} {},{},DC.x;", operation, size, ret, value)); + } +} +} // Anonymous namespace + +void EmitLoadGlobalU8(EmitContext& ctx, IR::Inst& inst, Register address) { + GlobalLoad(ctx, inst, address, "U8"); +} + +void EmitLoadGlobalS8(EmitContext& ctx, IR::Inst& inst, Register address) { + GlobalLoad(ctx, inst, address, "S8"); +} + +void EmitLoadGlobalU16(EmitContext& ctx, IR::Inst& inst, Register address) { + GlobalLoad(ctx, inst, address, "U16"); +} + +void EmitLoadGlobalS16(EmitContext& ctx, IR::Inst& inst, Register address) { + GlobalLoad(ctx, inst, address, "S16"); +} + +void EmitLoadGlobal32(EmitContext& ctx, IR::Inst& inst, Register address) { + GlobalLoad(ctx, inst, address, "U32"); +} + +void EmitLoadGlobal64(EmitContext& ctx, IR::Inst& inst, Register address) { + GlobalLoad(ctx, inst, address, "U32X2"); +} + +void EmitLoadGlobal128(EmitContext& ctx, IR::Inst& inst, Register address) { + GlobalLoad(ctx, inst, address, "U32X4"); +} + +void EmitWriteGlobalU8(EmitContext& ctx, Register address, Register value) { + GlobalWrite(ctx, address, value, "U8"); +} + +void EmitWriteGlobalS8(EmitContext& ctx, Register address, Register value) { + GlobalWrite(ctx, address, value, "S8"); +} + +void EmitWriteGlobalU16(EmitContext& ctx, Register address, Register value) { + GlobalWrite(ctx, address, value, "U16"); +} + +void EmitWriteGlobalS16(EmitContext& ctx, Register address, Register value) { + GlobalWrite(ctx, address, value, "S16"); +} + +void EmitWriteGlobal32(EmitContext& ctx, Register address, ScalarU32 value) { + GlobalWrite(ctx, address, value, "U32"); +} + +void EmitWriteGlobal64(EmitContext& ctx, Register address, Register value) { + GlobalWrite(ctx, address, value, "U32X2"); +} + +void EmitWriteGlobal128(EmitContext& ctx, Register address, Register value) { + GlobalWrite(ctx, address, value, "U32X4"); +} + +void EmitLoadStorageU8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset) { + Load(ctx, inst, binding, offset, "U8"); +} + +void EmitLoadStorageS8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset) { + Load(ctx, inst, binding, offset, "S8"); +} + +void EmitLoadStorageU16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset) { + Load(ctx, inst, binding, offset, "U16"); +} + +void EmitLoadStorageS16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset) { + Load(ctx, inst, binding, offset, "S16"); +} + +void EmitLoadStorage32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset) { + Load(ctx, inst, binding, offset, "U32"); +} + +void EmitLoadStorage64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset) { + Load(ctx, inst, binding, offset, "U32X2"); +} + +void EmitLoadStorage128(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset) { + Load(ctx, inst, binding, offset, "U32X4"); +} + +void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + ScalarU32 value) { + Write(ctx, binding, offset, value, "U8"); +} + +void EmitWriteStorageS8(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + ScalarS32 value) { + Write(ctx, binding, offset, value, "S8"); +} + +void EmitWriteStorageU16(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + ScalarU32 value) { + Write(ctx, binding, offset, value, "U16"); +} + +void EmitWriteStorageS16(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + ScalarS32 value) { + Write(ctx, binding, offset, value, "S16"); +} + +void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + ScalarU32 value) { + Write(ctx, binding, offset, value, "U32"); +} + +void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + Register value) { + Write(ctx, binding, offset, value, "U32X2"); +} + +void EmitWriteStorage128(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset, + Register value) { + Write(ctx, binding, offset, value, "U32X4"); +} + +void EmitSharedAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value) { + ctx.Add("ATOMS.ADD.U32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicSMin32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarS32 value) { + ctx.Add("ATOMS.MIN.S32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicUMin32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value) { + ctx.Add("ATOMS.MIN.U32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicSMax32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarS32 value) { + ctx.Add("ATOMS.MAX.S32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicUMax32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value) { + ctx.Add("ATOMS.MAX.U32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicInc32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value) { + ctx.Add("ATOMS.IWRAP.U32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicDec32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value) { + ctx.Add("ATOMS.DWRAP.U32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicAnd32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value) { + ctx.Add("ATOMS.AND.U32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicOr32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value) { + ctx.Add("ATOMS.OR.U32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicXor32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value) { + ctx.Add("ATOMS.XOR.U32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + ScalarU32 value) { + ctx.Add("ATOMS.EXCH.U32 {},{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, + Register value) { + ctx.LongAdd("ATOMS.EXCH.U64 {}.x,{},shared_mem[{}];", inst, value, pointer_offset); +} + +void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value) { + Atom(ctx, inst, binding, offset, value, "ADD", "U32"); +} + +void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarS32 value) { + Atom(ctx, inst, binding, offset, value, "MIN", "S32"); +} + +void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value) { + Atom(ctx, inst, binding, offset, value, "MIN", "U32"); +} + +void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarS32 value) { + Atom(ctx, inst, binding, offset, value, "MAX", "S32"); +} + +void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value) { + Atom(ctx, inst, binding, offset, value, "MAX", "U32"); +} + +void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value) { + Atom(ctx, inst, binding, offset, value, "IWRAP", "U32"); +} + +void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value) { + Atom(ctx, inst, binding, offset, value, "DWRAP", "U32"); +} + +void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value) { + Atom(ctx, inst, binding, offset, value, "AND", "U32"); +} + +void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value) { + Atom(ctx, inst, binding, offset, value, "OR", "U32"); +} + +void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value) { + Atom(ctx, inst, binding, offset, value, "XOR", "U32"); +} + +void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarU32 value) { + Atom(ctx, inst, binding, offset, value, "EXCH", "U32"); +} + +void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "ADD", "U64"); +} + +void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "MIN", "S64"); +} + +void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "MIN", "U64"); +} + +void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "MAX", "S64"); +} + +void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "MAX", "U64"); +} + +void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "AND", "U64"); +} + +void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "OR", "U64"); +} + +void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "XOR", "U64"); +} + +void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "EXCH", "U64"); +} + +void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, ScalarF32 value) { + Atom(ctx, inst, binding, offset, value, "ADD", "F32"); +} + +void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "ADD", "F16x2"); +} + +void EmitStorageAtomicAddF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] const IR::Value& binding, + [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitStorageAtomicMinF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "MIN", "F16x2"); +} + +void EmitStorageAtomicMinF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] const IR::Value& binding, + [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitStorageAtomicMaxF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + ScalarU32 offset, Register value) { + Atom(ctx, inst, binding, offset, value, "MAX", "F16x2"); +} + +void EmitStorageAtomicMaxF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] const IR::Value& binding, + [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicIAdd32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicSMin32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicUMin32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicSMax32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicUMax32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicInc32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicDec32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicAnd32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicOr32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicXor32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicExchange32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicIAdd64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicSMin64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicUMin64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicSMax64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicUMax64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicInc64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicDec64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicAnd64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicOr64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicXor64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicExchange64(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicAddF32(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicAddF16x2(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicAddF32x2(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicMinF16x2(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicMinF32x2(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicMaxF16x2(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitGlobalAtomicMaxF32x2(EmitContext&) { + throw NotImplementedException("GLASM instruction"); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_not_implemented.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_not_implemented.cpp new file mode 100755 index 000000000..ff64c6924 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_not_implemented.cpp @@ -0,0 +1,273 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/frontend/ir/value.h" + +#ifdef _MSC_VER +#pragma warning(disable : 4100) +#endif + +namespace Shader::Backend::GLASM { + +#define NotImplemented() throw NotImplementedException("GLASM instruction {}", __LINE__) + +static void DefinePhi(EmitContext& ctx, IR::Inst& phi) { + switch (phi.Arg(0).Type()) { + case IR::Type::U1: + case IR::Type::U32: + case IR::Type::F32: + ctx.reg_alloc.Define(phi); + break; + case IR::Type::U64: + case IR::Type::F64: + ctx.reg_alloc.LongDefine(phi); + break; + default: + throw NotImplementedException("Phi node type {}", phi.Type()); + } +} + +void EmitPhi(EmitContext& ctx, IR::Inst& phi) { + const size_t num_args{phi.NumArgs()}; + for (size_t i = 0; i < num_args; ++i) { + ctx.reg_alloc.Consume(phi.Arg(i)); + } + if (!phi.Definition().is_valid) { + // The phi node wasn't forward defined + DefinePhi(ctx, phi); + } +} + +void EmitVoid(EmitContext&) {} + +void EmitReference(EmitContext& ctx, const IR::Value& value) { + ctx.reg_alloc.Consume(value); +} + +void EmitPhiMove(EmitContext& ctx, const IR::Value& phi_value, const IR::Value& value) { + IR::Inst& phi{RegAlloc::AliasInst(*phi_value.Inst())}; + if (!phi.Definition().is_valid) { + // The phi node wasn't forward defined + DefinePhi(ctx, phi); + } + const Register phi_reg{ctx.reg_alloc.Consume(IR::Value{&phi})}; + const Value eval_value{ctx.reg_alloc.Consume(value)}; + + if (phi_reg == eval_value) { + return; + } + switch (phi.Flags()) { + case IR::Type::U1: + case IR::Type::U32: + case IR::Type::F32: + ctx.Add("MOV.S {}.x,{};", phi_reg, ScalarS32{eval_value}); + break; + case IR::Type::U64: + case IR::Type::F64: + ctx.Add("MOV.U64 {}.x,{};", phi_reg, ScalarRegister{eval_value}); + break; + default: + throw NotImplementedException("Phi node type {}", phi.Type()); + } +} + +void EmitJoin(EmitContext& ctx) { + NotImplemented(); +} + +void EmitDemoteToHelperInvocation(EmitContext& ctx) { + ctx.Add("KIL TR.x;"); +} + +void EmitBarrier(EmitContext& ctx) { + ctx.Add("BAR;"); +} + +void EmitWorkgroupMemoryBarrier(EmitContext& ctx) { + ctx.Add("MEMBAR.CTA;"); +} + +void EmitDeviceMemoryBarrier(EmitContext& ctx) { + ctx.Add("MEMBAR;"); +} + +void EmitPrologue(EmitContext& ctx) { + // TODO +} + +void EmitEpilogue(EmitContext& ctx) { + // TODO +} + +void EmitEmitVertex(EmitContext& ctx, ScalarS32 stream) { + if (stream.type == Type::U32 && stream.imm_u32 == 0) { + ctx.Add("EMIT;"); + } else { + ctx.Add("EMITS {};", stream); + } +} + +void EmitEndPrimitive(EmitContext& ctx, const IR::Value& stream) { + if (!stream.IsImmediate()) { + LOG_WARNING(Shader_GLASM, "Stream is not immediate"); + } + ctx.reg_alloc.Consume(stream); + ctx.Add("ENDPRIM;"); +} + +void EmitGetRegister(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetRegister(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetPred(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetPred(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetGotoVariable(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetGotoVariable(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetIndirectBranchVariable(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetIndirectBranchVariable(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetZFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetSFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetCFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetOFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetZFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetSFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetCFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetOFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitWorkgroupId(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.S {},invocation.groupid;", inst); +} + +void EmitLocalInvocationId(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.S {},invocation.localid;", inst); +} + +void EmitInvocationId(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.S {}.x,primitive_invocation.x;", inst); +} + +void EmitSampleId(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.S {}.x,fragment.sampleid.x;", inst); +} + +void EmitIsHelperInvocation(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.S {}.x,fragment.helperthread.x;", inst); +} + +void EmitYDirection(EmitContext& ctx, IR::Inst& inst) { + ctx.uses_y_direction = true; + ctx.Add("MOV.F {}.x,y_direction[0].w;", inst); +} + +void EmitUndefU1(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.S {}.x,0;", inst); +} + +void EmitUndefU8(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.S {}.x,0;", inst); +} + +void EmitUndefU16(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.S {}.x,0;", inst); +} + +void EmitUndefU32(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.S {}.x,0;", inst); +} + +void EmitUndefU64(EmitContext& ctx, IR::Inst& inst) { + ctx.LongAdd("MOV.S64 {}.x,0;", inst); +} + +void EmitGetZeroFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetSignFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetCarryFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetOverflowFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetSparseFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetInBoundsFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitLogicalOr(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + ctx.Add("OR.S {},{},{};", inst, a, b); +} + +void EmitLogicalAnd(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + ctx.Add("AND.S {},{},{};", inst, a, b); +} + +void EmitLogicalXor(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) { + ctx.Add("XOR.S {},{},{};", inst, a, b); +} + +void EmitLogicalNot(EmitContext& ctx, IR::Inst& inst, ScalarS32 value) { + ctx.Add("SEQ.S {},{},0;", inst, value); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_select.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_select.cpp new file mode 100755 index 000000000..68fff613c --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_select.cpp @@ -0,0 +1,67 @@ + +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLASM { + +void EmitSelectU1(EmitContext& ctx, IR::Inst& inst, ScalarS32 cond, ScalarS32 true_value, + ScalarS32 false_value) { + ctx.Add("CMP.S {},{},{},{};", inst, cond, true_value, false_value); +} + +void EmitSelectU8([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] ScalarS32 cond, + [[maybe_unused]] ScalarS32 true_value, [[maybe_unused]] ScalarS32 false_value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitSelectU16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] ScalarS32 cond, + [[maybe_unused]] ScalarS32 true_value, [[maybe_unused]] ScalarS32 false_value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitSelectU32(EmitContext& ctx, IR::Inst& inst, ScalarS32 cond, ScalarS32 true_value, + ScalarS32 false_value) { + ctx.Add("CMP.S {},{},{},{};", inst, cond, true_value, false_value); +} + +void EmitSelectU64(EmitContext& ctx, IR::Inst& inst, ScalarS32 cond, Register true_value, + Register false_value) { + ctx.reg_alloc.InvalidateConditionCodes(); + const Register ret{ctx.reg_alloc.LongDefine(inst)}; + if (ret == true_value) { + ctx.Add("MOV.S.CC RC.x,{};" + "MOV.U64 {}.x(EQ.x),{};", + cond, ret, false_value); + } else if (ret == false_value) { + ctx.Add("MOV.S.CC RC.x,{};" + "MOV.U64 {}.x(NE.x),{};", + cond, ret, true_value); + } else { + ctx.Add("MOV.S.CC RC.x,{};" + "MOV.U64 {}.x,{};" + "MOV.U64 {}.x(NE.x),{};", + cond, ret, false_value, ret, true_value); + } +} + +void EmitSelectF16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] ScalarS32 cond, + [[maybe_unused]] Register true_value, [[maybe_unused]] Register false_value) { + throw NotImplementedException("GLASM instruction"); +} + +void EmitSelectF32(EmitContext& ctx, IR::Inst& inst, ScalarS32 cond, ScalarS32 true_value, + ScalarS32 false_value) { + ctx.Add("CMP.S {},{},{},{};", inst, cond, true_value, false_value); +} + +void EmitSelectF64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] ScalarS32 cond, + [[maybe_unused]] Register true_value, [[maybe_unused]] Register false_value) { + throw NotImplementedException("GLASM instruction"); +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_shared_memory.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_shared_memory.cpp new file mode 100755 index 000000000..c1498f449 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_shared_memory.cpp @@ -0,0 +1,58 @@ + +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLASM { +void EmitLoadSharedU8(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset) { + ctx.Add("LDS.U8 {},shared_mem[{}];", inst, offset); +} + +void EmitLoadSharedS8(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset) { + ctx.Add("LDS.S8 {},shared_mem[{}];", inst, offset); +} + +void EmitLoadSharedU16(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset) { + ctx.Add("LDS.U16 {},shared_mem[{}];", inst, offset); +} + +void EmitLoadSharedS16(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset) { + ctx.Add("LDS.S16 {},shared_mem[{}];", inst, offset); +} + +void EmitLoadSharedU32(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset) { + ctx.Add("LDS.U32 {},shared_mem[{}];", inst, offset); +} + +void EmitLoadSharedU64(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset) { + ctx.Add("LDS.U32X2 {},shared_mem[{}];", inst, offset); +} + +void EmitLoadSharedU128(EmitContext& ctx, IR::Inst& inst, ScalarU32 offset) { + ctx.Add("LDS.U32X4 {},shared_mem[{}];", inst, offset); +} + +void EmitWriteSharedU8(EmitContext& ctx, ScalarU32 offset, ScalarU32 value) { + ctx.Add("STS.U8 {},shared_mem[{}];", value, offset); +} + +void EmitWriteSharedU16(EmitContext& ctx, ScalarU32 offset, ScalarU32 value) { + ctx.Add("STS.U16 {},shared_mem[{}];", value, offset); +} + +void EmitWriteSharedU32(EmitContext& ctx, ScalarU32 offset, ScalarU32 value) { + ctx.Add("STS.U32 {},shared_mem[{}];", value, offset); +} + +void EmitWriteSharedU64(EmitContext& ctx, ScalarU32 offset, Register value) { + ctx.Add("STS.U32X2 {},shared_mem[{}];", value, offset); +} + +void EmitWriteSharedU128(EmitContext& ctx, ScalarU32 offset, Register value) { + ctx.Add("STS.U32X4 {},shared_mem[{}];", value, offset); +} +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_special.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_special.cpp new file mode 100755 index 000000000..e69de29bb diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_undefined.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_undefined.cpp new file mode 100755 index 000000000..e69de29bb diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_warp.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_warp.cpp new file mode 100755 index 000000000..544d475b4 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/emit_glasm_warp.cpp @@ -0,0 +1,150 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/profile.h" + +namespace Shader::Backend::GLASM { + +void EmitLaneId(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.S {}.x,{}.threadid;", inst, ctx.stage_name); +} + +void EmitVoteAll(EmitContext& ctx, IR::Inst& inst, ScalarS32 pred) { + ctx.Add("TGALL.S {}.x,{};", inst, pred); +} + +void EmitVoteAny(EmitContext& ctx, IR::Inst& inst, ScalarS32 pred) { + ctx.Add("TGANY.S {}.x,{};", inst, pred); +} + +void EmitVoteEqual(EmitContext& ctx, IR::Inst& inst, ScalarS32 pred) { + ctx.Add("TGEQ.S {}.x,{};", inst, pred); +} + +void EmitSubgroupBallot(EmitContext& ctx, IR::Inst& inst, ScalarS32 pred) { + ctx.Add("TGBALLOT {}.x,{};", inst, pred); +} + +void EmitSubgroupEqMask(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.U {},{}.threadeqmask;", inst, ctx.stage_name); +} + +void EmitSubgroupLtMask(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.U {},{}.threadltmask;", inst, ctx.stage_name); +} + +void EmitSubgroupLeMask(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.U {},{}.threadlemask;", inst, ctx.stage_name); +} + +void EmitSubgroupGtMask(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.U {},{}.threadgtmask;", inst, ctx.stage_name); +} + +void EmitSubgroupGeMask(EmitContext& ctx, IR::Inst& inst) { + ctx.Add("MOV.U {},{}.threadgemask;", inst, ctx.stage_name); +} + +static void Shuffle(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 index, + const IR::Value& clamp, const IR::Value& segmentation_mask, + std::string_view op) { + IR::Inst* const in_bounds{inst.GetAssociatedPseudoOperation(IR::Opcode::GetInBoundsFromOp)}; + if (in_bounds) { + in_bounds->Invalidate(); + } + std::string mask; + if (clamp.IsImmediate() && segmentation_mask.IsImmediate()) { + mask = fmt::to_string(clamp.U32() | (segmentation_mask.U32() << 8)); + } else { + mask = "RC"; + ctx.Add("BFI.U RC.x,{{5,8,0,0}},{},{};", + ScalarU32{ctx.reg_alloc.Consume(segmentation_mask)}, + ScalarU32{ctx.reg_alloc.Consume(clamp)}); + } + const Register value_ret{ctx.reg_alloc.Define(inst)}; + if (in_bounds) { + const Register bounds_ret{ctx.reg_alloc.Define(*in_bounds)}; + ctx.Add("SHF{}.U {},{},{},{};" + "MOV.U {}.x,{}.y;", + op, bounds_ret, value, index, mask, value_ret, bounds_ret); + } else { + ctx.Add("SHF{}.U {},{},{},{};" + "MOV.U {}.x,{}.y;", + op, value_ret, value, index, mask, value_ret, value_ret); + } +} + +void EmitShuffleIndex(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 index, + const IR::Value& clamp, const IR::Value& segmentation_mask) { + Shuffle(ctx, inst, value, index, clamp, segmentation_mask, "IDX"); +} + +void EmitShuffleUp(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 index, + const IR::Value& clamp, const IR::Value& segmentation_mask) { + Shuffle(ctx, inst, value, index, clamp, segmentation_mask, "UP"); +} + +void EmitShuffleDown(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 index, + const IR::Value& clamp, const IR::Value& segmentation_mask) { + Shuffle(ctx, inst, value, index, clamp, segmentation_mask, "DOWN"); +} + +void EmitShuffleButterfly(EmitContext& ctx, IR::Inst& inst, ScalarU32 value, ScalarU32 index, + const IR::Value& clamp, const IR::Value& segmentation_mask) { + Shuffle(ctx, inst, value, index, clamp, segmentation_mask, "XOR"); +} + +void EmitFSwizzleAdd(EmitContext& ctx, IR::Inst& inst, ScalarF32 op_a, ScalarF32 op_b, + ScalarU32 swizzle) { + const auto ret{ctx.reg_alloc.Define(inst)}; + ctx.Add("AND.U RC.z,{}.threadid,3;" + "SHL.U RC.z,RC.z,1;" + "SHR.U RC.z,{},RC.z;" + "AND.U RC.z,RC.z,3;" + "MUL.F RC.x,{},FSWZA[RC.z];" + "MUL.F RC.y,{},FSWZB[RC.z];" + "ADD.F {}.x,RC.x,RC.y;", + ctx.stage_name, swizzle, op_a, op_b, ret); +} + +void EmitDPdxFine(EmitContext& ctx, IR::Inst& inst, ScalarF32 p) { + if (ctx.profile.support_derivative_control) { + ctx.Add("DDX.FINE {}.x,{};", inst, p); + } else { + LOG_WARNING(Shader_GLASM, "Fine derivatives not supported by device"); + ctx.Add("DDX {}.x,{};", inst, p); + } +} + +void EmitDPdyFine(EmitContext& ctx, IR::Inst& inst, ScalarF32 p) { + if (ctx.profile.support_derivative_control) { + ctx.Add("DDY.FINE {}.x,{};", inst, p); + } else { + LOG_WARNING(Shader_GLASM, "Fine derivatives not supported by device"); + ctx.Add("DDY {}.x,{};", inst, p); + } +} + +void EmitDPdxCoarse(EmitContext& ctx, IR::Inst& inst, ScalarF32 p) { + if (ctx.profile.support_derivative_control) { + ctx.Add("DDX.COARSE {}.x,{};", inst, p); + } else { + LOG_WARNING(Shader_GLASM, "Coarse derivatives not supported by device"); + ctx.Add("DDX {}.x,{};", inst, p); + } +} + +void EmitDPdyCoarse(EmitContext& ctx, IR::Inst& inst, ScalarF32 p) { + if (ctx.profile.support_derivative_control) { + ctx.Add("DDY.COARSE {}.x,{};", inst, p); + } else { + LOG_WARNING(Shader_GLASM, "Coarse derivatives not supported by device"); + ctx.Add("DDY {}.x,{};", inst, p); + } +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/reg_alloc.cpp b/src/shader_recompiler/backend/glasm/reg_alloc.cpp new file mode 100755 index 000000000..4c046db6e --- /dev/null +++ b/src/shader_recompiler/backend/glasm/reg_alloc.cpp @@ -0,0 +1,186 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include + +#include "shader_recompiler/backend/glasm/emit_context.h" +#include "shader_recompiler/backend/glasm/reg_alloc.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLASM { + +Register RegAlloc::Define(IR::Inst& inst) { + return Define(inst, false); +} + +Register RegAlloc::LongDefine(IR::Inst& inst) { + return Define(inst, true); +} + +Value RegAlloc::Peek(const IR::Value& value) { + if (value.IsImmediate()) { + return MakeImm(value); + } else { + return PeekInst(*value.Inst()); + } +} + +Value RegAlloc::Consume(const IR::Value& value) { + if (value.IsImmediate()) { + return MakeImm(value); + } else { + return ConsumeInst(*value.Inst()); + } +} + +void RegAlloc::Unref(IR::Inst& inst) { + IR::Inst& value_inst{AliasInst(inst)}; + value_inst.DestructiveRemoveUsage(); + if (!value_inst.HasUses()) { + Free(value_inst.Definition()); + } +} + +Register RegAlloc::AllocReg() { + Register ret; + ret.type = Type::Register; + ret.id = Alloc(false); + return ret; +} + +Register RegAlloc::AllocLongReg() { + Register ret; + ret.type = Type::Register; + ret.id = Alloc(true); + return ret; +} + +void RegAlloc::FreeReg(Register reg) { + Free(reg.id); +} + +Value RegAlloc::MakeImm(const IR::Value& value) { + Value ret; + switch (value.Type()) { + case IR::Type::Void: + ret.type = Type::Void; + break; + case IR::Type::U1: + ret.type = Type::U32; + ret.imm_u32 = value.U1() ? 0xffffffff : 0; + break; + case IR::Type::U32: + ret.type = Type::U32; + ret.imm_u32 = value.U32(); + break; + case IR::Type::F32: + ret.type = Type::U32; + ret.imm_u32 = Common::BitCast(value.F32()); + break; + case IR::Type::U64: + ret.type = Type::U64; + ret.imm_u64 = value.U64(); + break; + case IR::Type::F64: + ret.type = Type::U64; + ret.imm_u64 = Common::BitCast(value.F64()); + break; + default: + throw NotImplementedException("Immediate type {}", value.Type()); + } + return ret; +} + +Register RegAlloc::Define(IR::Inst& inst, bool is_long) { + if (inst.HasUses()) { + inst.SetDefinition(Alloc(is_long)); + } else { + Id id{}; + id.is_long.Assign(is_long ? 1 : 0); + id.is_null.Assign(1); + inst.SetDefinition(id); + } + return Register{PeekInst(inst)}; +} + +Value RegAlloc::PeekInst(IR::Inst& inst) { + Value ret; + ret.type = Type::Register; + ret.id = inst.Definition(); + return ret; +} + +Value RegAlloc::ConsumeInst(IR::Inst& inst) { + Unref(inst); + return PeekInst(inst); +} + +Id RegAlloc::Alloc(bool is_long) { + size_t& num_regs{is_long ? num_used_long_registers : num_used_registers}; + std::bitset& use{is_long ? long_register_use : register_use}; + if (num_used_registers + num_used_long_registers < NUM_REGS) { + for (size_t reg = 0; reg < NUM_REGS; ++reg) { + if (use[reg]) { + continue; + } + num_regs = std::max(num_regs, reg + 1); + use[reg] = true; + Id ret{}; + ret.is_valid.Assign(1); + ret.is_long.Assign(is_long ? 1 : 0); + ret.is_spill.Assign(0); + ret.is_condition_code.Assign(0); + ret.is_null.Assign(0); + ret.index.Assign(static_cast(reg)); + return ret; + } + } + throw NotImplementedException("Register spilling"); +} + +void RegAlloc::Free(Id id) { + if (id.is_valid == 0) { + throw LogicError("Freeing invalid register"); + } + if (id.is_spill != 0) { + throw NotImplementedException("Free spill"); + } + if (id.is_long != 0) { + long_register_use[id.index] = false; + } else { + register_use[id.index] = false; + } +} + +/*static*/ bool RegAlloc::IsAliased(const IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::Identity: + case IR::Opcode::BitCastU16F16: + case IR::Opcode::BitCastU32F32: + case IR::Opcode::BitCastU64F64: + case IR::Opcode::BitCastF16U16: + case IR::Opcode::BitCastF32U32: + case IR::Opcode::BitCastF64U64: + return true; + default: + return false; + } +} + +/*static*/ IR::Inst& RegAlloc::AliasInst(IR::Inst& inst) { + IR::Inst* it{&inst}; + while (IsAliased(*it)) { + const IR::Value arg{it->Arg(0)}; + if (arg.IsImmediate()) { + break; + } + it = arg.InstRecursive(); + } + return *it; +} + +} // namespace Shader::Backend::GLASM diff --git a/src/shader_recompiler/backend/glasm/reg_alloc.h b/src/shader_recompiler/backend/glasm/reg_alloc.h new file mode 100755 index 000000000..99dea02d6 --- /dev/null +++ b/src/shader_recompiler/backend/glasm/reg_alloc.h @@ -0,0 +1,304 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include + +#include "common/bit_cast.h" +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/exception.h" + +namespace Shader::IR { +class Inst; +class Value; +} // namespace Shader::IR + +namespace Shader::Backend::GLASM { + +class EmitContext; + +enum class Type : u32 { + Void, + Register, + U32, + U64, +}; + +struct Id { + union { + u32 raw; + BitField<0, 1, u32> is_valid; + BitField<1, 1, u32> is_long; + BitField<2, 1, u32> is_spill; + BitField<3, 1, u32> is_condition_code; + BitField<4, 1, u32> is_null; + BitField<5, 27, u32> index; + }; + + bool operator==(Id rhs) const noexcept { + return raw == rhs.raw; + } + bool operator!=(Id rhs) const noexcept { + return !operator==(rhs); + } +}; +static_assert(sizeof(Id) == sizeof(u32)); + +struct Value { + Type type; + union { + Id id; + u32 imm_u32; + u64 imm_u64; + }; + + bool operator==(const Value& rhs) const noexcept { + if (type != rhs.type) { + return false; + } + switch (type) { + case Type::Void: + return true; + case Type::Register: + return id == rhs.id; + case Type::U32: + return imm_u32 == rhs.imm_u32; + case Type::U64: + return imm_u64 == rhs.imm_u64; + } + return false; + } + bool operator!=(const Value& rhs) const noexcept { + return !operator==(rhs); + } +}; +struct Register : Value {}; +struct ScalarRegister : Value {}; +struct ScalarU32 : Value {}; +struct ScalarS32 : Value {}; +struct ScalarF32 : Value {}; +struct ScalarF64 : Value {}; + +class RegAlloc { +public: + RegAlloc(EmitContext& ctx_) : ctx{ctx_} {} + + Register Define(IR::Inst& inst); + + Register LongDefine(IR::Inst& inst); + + [[nodiscard]] Value Peek(const IR::Value& value); + + Value Consume(const IR::Value& value); + + void Unref(IR::Inst& inst); + + [[nodiscard]] Register AllocReg(); + + [[nodiscard]] Register AllocLongReg(); + + void FreeReg(Register reg); + + void InvalidateConditionCodes() { + // This does nothing for now + } + + [[nodiscard]] size_t NumUsedRegisters() const noexcept { + return num_used_registers; + } + + [[nodiscard]] size_t NumUsedLongRegisters() const noexcept { + return num_used_long_registers; + } + + [[nodiscard]] bool IsEmpty() const noexcept { + return register_use.none() && long_register_use.none(); + } + + /// Returns true if the instruction is expected to be aliased to another + static bool IsAliased(const IR::Inst& inst); + + /// Returns the underlying value out of an alias sequence + static IR::Inst& AliasInst(IR::Inst& inst); + +private: + static constexpr size_t NUM_REGS = 4096; + static constexpr size_t NUM_ELEMENTS = 4; + + Value MakeImm(const IR::Value& value); + + Register Define(IR::Inst& inst, bool is_long); + + Value PeekInst(IR::Inst& inst); + + Value ConsumeInst(IR::Inst& inst); + + Id Alloc(bool is_long); + + void Free(Id id); + + EmitContext& ctx; + size_t num_used_registers{}; + size_t num_used_long_registers{}; + std::bitset register_use{}; + std::bitset long_register_use{}; +}; + +template +auto FormatTo(FormatContext& ctx, Id id) { + if (id.is_condition_code != 0) { + throw NotImplementedException("Condition code emission"); + } + if (id.is_spill != 0) { + throw NotImplementedException("Spill emission"); + } + if constexpr (scalar) { + if (id.is_null != 0) { + return fmt::format_to(ctx.out(), "{}", id.is_long != 0 ? "DC.x" : "RC.x"); + } + if (id.is_long != 0) { + return fmt::format_to(ctx.out(), "D{}.x", id.index.Value()); + } else { + return fmt::format_to(ctx.out(), "R{}.x", id.index.Value()); + } + } else { + if (id.is_null != 0) { + return fmt::format_to(ctx.out(), "{}", id.is_long != 0 ? "DC" : "RC"); + } + if (id.is_long != 0) { + return fmt::format_to(ctx.out(), "D{}", id.index.Value()); + } else { + return fmt::format_to(ctx.out(), "R{}", id.index.Value()); + } + } +} + +} // namespace Shader::Backend::GLASM + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(Shader::Backend::GLASM::Id id, FormatContext& ctx) { + return Shader::Backend::GLASM::FormatTo(ctx, id); + } +}; + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::Backend::GLASM::Register& value, FormatContext& ctx) { + if (value.type != Shader::Backend::GLASM::Type::Register) { + throw Shader::InvalidArgument("Register value type is not register"); + } + return Shader::Backend::GLASM::FormatTo(ctx, value.id); + } +}; + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::Backend::GLASM::ScalarRegister& value, FormatContext& ctx) { + if (value.type != Shader::Backend::GLASM::Type::Register) { + throw Shader::InvalidArgument("Register value type is not register"); + } + return Shader::Backend::GLASM::FormatTo(ctx, value.id); + } +}; + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::Backend::GLASM::ScalarU32& value, FormatContext& ctx) { + switch (value.type) { + case Shader::Backend::GLASM::Type::Void: + break; + case Shader::Backend::GLASM::Type::Register: + return Shader::Backend::GLASM::FormatTo(ctx, value.id); + case Shader::Backend::GLASM::Type::U32: + return fmt::format_to(ctx.out(), "{}", value.imm_u32); + case Shader::Backend::GLASM::Type::U64: + break; + } + throw Shader::InvalidArgument("Invalid value type {}", value.type); + } +}; + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::Backend::GLASM::ScalarS32& value, FormatContext& ctx) { + switch (value.type) { + case Shader::Backend::GLASM::Type::Void: + break; + case Shader::Backend::GLASM::Type::Register: + return Shader::Backend::GLASM::FormatTo(ctx, value.id); + case Shader::Backend::GLASM::Type::U32: + return fmt::format_to(ctx.out(), "{}", static_cast(value.imm_u32)); + case Shader::Backend::GLASM::Type::U64: + break; + } + throw Shader::InvalidArgument("Invalid value type {}", value.type); + } +}; + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::Backend::GLASM::ScalarF32& value, FormatContext& ctx) { + switch (value.type) { + case Shader::Backend::GLASM::Type::Void: + break; + case Shader::Backend::GLASM::Type::Register: + return Shader::Backend::GLASM::FormatTo(ctx, value.id); + case Shader::Backend::GLASM::Type::U32: + return fmt::format_to(ctx.out(), "{}", Common::BitCast(value.imm_u32)); + case Shader::Backend::GLASM::Type::U64: + break; + } + throw Shader::InvalidArgument("Invalid value type {}", value.type); + } +}; + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::Backend::GLASM::ScalarF64& value, FormatContext& ctx) { + switch (value.type) { + case Shader::Backend::GLASM::Type::Void: + break; + case Shader::Backend::GLASM::Type::Register: + return Shader::Backend::GLASM::FormatTo(ctx, value.id); + case Shader::Backend::GLASM::Type::U32: + break; + case Shader::Backend::GLASM::Type::U64: + return format_to(ctx.out(), "{}", Common::BitCast(value.imm_u64)); + } + throw Shader::InvalidArgument("Invalid value type {}", value.type); + } +}; diff --git a/src/shader_recompiler/backend/glsl/emit_context.cpp b/src/shader_recompiler/backend/glsl/emit_context.cpp new file mode 100755 index 000000000..e08d2d2eb --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_context.cpp @@ -0,0 +1,687 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/bindings.h" +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/profile.h" +#include "shader_recompiler/runtime_info.h" + +namespace Shader::Backend::GLSL { +namespace { +u32 CbufIndex(size_t offset) { + return (offset / 4) % 4; +} + +char Swizzle(size_t offset) { + return "xyzw"[CbufIndex(offset)]; +} + +std::string_view InterpDecorator(Interpolation interp) { + switch (interp) { + case Interpolation::Smooth: + return ""; + case Interpolation::Flat: + return "flat "; + case Interpolation::NoPerspective: + return "noperspective "; + } + throw InvalidArgument("Invalid interpolation {}", interp); +} + +std::string_view InputArrayDecorator(Stage stage) { + switch (stage) { + case Stage::Geometry: + case Stage::TessellationControl: + case Stage::TessellationEval: + return "[]"; + default: + return ""; + } +} + +bool StoresPerVertexAttributes(Stage stage) { + switch (stage) { + case Stage::VertexA: + case Stage::VertexB: + case Stage::Geometry: + case Stage::TessellationEval: + return true; + default: + return false; + } +} + +std::string OutputDecorator(Stage stage, u32 size) { + switch (stage) { + case Stage::TessellationControl: + return fmt::format("[{}]", size); + default: + return ""; + } +} + +std::string_view SamplerType(TextureType type, bool is_depth) { + if (is_depth) { + switch (type) { + case TextureType::Color1D: + return "sampler1DShadow"; + case TextureType::ColorArray1D: + return "sampler1DArrayShadow"; + case TextureType::Color2D: + return "sampler2DShadow"; + case TextureType::ColorArray2D: + return "sampler2DArrayShadow"; + case TextureType::ColorCube: + return "samplerCubeShadow"; + case TextureType::ColorArrayCube: + return "samplerCubeArrayShadow"; + default: + throw NotImplementedException("Texture type: {}", type); + } + } + switch (type) { + case TextureType::Color1D: + return "sampler1D"; + case TextureType::ColorArray1D: + return "sampler1DArray"; + case TextureType::Color2D: + return "sampler2D"; + case TextureType::ColorArray2D: + return "sampler2DArray"; + case TextureType::Color3D: + return "sampler3D"; + case TextureType::ColorCube: + return "samplerCube"; + case TextureType::ColorArrayCube: + return "samplerCubeArray"; + case TextureType::Buffer: + return "samplerBuffer"; + default: + throw NotImplementedException("Texture type: {}", type); + } +} + +std::string_view ImageType(TextureType type) { + switch (type) { + case TextureType::Color1D: + return "uimage1D"; + case TextureType::ColorArray1D: + return "uimage1DArray"; + case TextureType::Color2D: + return "uimage2D"; + case TextureType::ColorArray2D: + return "uimage2DArray"; + case TextureType::Color3D: + return "uimage3D"; + case TextureType::ColorCube: + return "uimageCube"; + case TextureType::ColorArrayCube: + return "uimageCubeArray"; + case TextureType::Buffer: + return "uimageBuffer"; + default: + throw NotImplementedException("Image type: {}", type); + } +} + +std::string_view ImageFormatString(ImageFormat format) { + switch (format) { + case ImageFormat::Typeless: + return ""; + case ImageFormat::R8_UINT: + return ",r8ui"; + case ImageFormat::R8_SINT: + return ",r8i"; + case ImageFormat::R16_UINT: + return ",r16ui"; + case ImageFormat::R16_SINT: + return ",r16i"; + case ImageFormat::R32_UINT: + return ",r32ui"; + case ImageFormat::R32G32_UINT: + return ",rg32ui"; + case ImageFormat::R32G32B32A32_UINT: + return ",rgba32ui"; + default: + throw NotImplementedException("Image format: {}", format); + } +} + +std::string_view ImageAccessQualifier(bool is_written, bool is_read) { + if (is_written && !is_read) { + return "writeonly "; + } + if (is_read && !is_written) { + return "readonly "; + } + return ""; +} + +std::string_view GetTessMode(TessPrimitive primitive) { + switch (primitive) { + case TessPrimitive::Triangles: + return "triangles"; + case TessPrimitive::Quads: + return "quads"; + case TessPrimitive::Isolines: + return "isolines"; + } + throw InvalidArgument("Invalid tessellation primitive {}", primitive); +} + +std::string_view GetTessSpacing(TessSpacing spacing) { + switch (spacing) { + case TessSpacing::Equal: + return "equal_spacing"; + case TessSpacing::FractionalOdd: + return "fractional_odd_spacing"; + case TessSpacing::FractionalEven: + return "fractional_even_spacing"; + } + throw InvalidArgument("Invalid tessellation spacing {}", spacing); +} + +std::string_view InputPrimitive(InputTopology topology) { + switch (topology) { + case InputTopology::Points: + return "points"; + case InputTopology::Lines: + return "lines"; + case InputTopology::LinesAdjacency: + return "lines_adjacency"; + case InputTopology::Triangles: + return "triangles"; + case InputTopology::TrianglesAdjacency: + return "triangles_adjacency"; + } + throw InvalidArgument("Invalid input topology {}", topology); +} + +std::string_view OutputPrimitive(OutputTopology topology) { + switch (topology) { + case OutputTopology::PointList: + return "points"; + case OutputTopology::LineStrip: + return "line_strip"; + case OutputTopology::TriangleStrip: + return "triangle_strip"; + } + throw InvalidArgument("Invalid output topology {}", topology); +} + +void SetupLegacyOutPerVertex(EmitContext& ctx, std::string& header) { + if (!ctx.info.stores.Legacy()) { + return; + } + if (ctx.info.stores.FixedFunctionTexture()) { + header += "vec4 gl_TexCoord[8];"; + } + if (ctx.info.stores.AnyComponent(IR::Attribute::ColorFrontDiffuseR)) { + header += "vec4 gl_FrontColor;"; + } + if (ctx.info.stores.AnyComponent(IR::Attribute::ColorFrontSpecularR)) { + header += "vec4 gl_FrontSecondaryColor;"; + } + if (ctx.info.stores.AnyComponent(IR::Attribute::ColorBackDiffuseR)) { + header += "vec4 gl_BackColor;"; + } + if (ctx.info.stores.AnyComponent(IR::Attribute::ColorBackSpecularR)) { + header += "vec4 gl_BackSecondaryColor;"; + } +} + +void SetupOutPerVertex(EmitContext& ctx, std::string& header) { + if (!StoresPerVertexAttributes(ctx.stage)) { + return; + } + if (ctx.uses_geometry_passthrough) { + return; + } + header += "out gl_PerVertex{vec4 gl_Position;"; + if (ctx.info.stores[IR::Attribute::PointSize]) { + header += "float gl_PointSize;"; + } + if (ctx.info.stores.ClipDistances()) { + header += "float gl_ClipDistance[];"; + } + if (ctx.info.stores[IR::Attribute::ViewportIndex] && + ctx.profile.support_viewport_index_layer_non_geometry && ctx.stage != Stage::Geometry) { + header += "int gl_ViewportIndex;"; + } + SetupLegacyOutPerVertex(ctx, header); + header += "};"; + if (ctx.info.stores[IR::Attribute::ViewportIndex] && ctx.stage == Stage::Geometry) { + header += "out int gl_ViewportIndex;"; + } +} + +void SetupLegacyInPerFragment(EmitContext& ctx, std::string& header) { + if (!ctx.info.loads.Legacy()) { + return; + } + header += "in gl_PerFragment{"; + if (ctx.info.loads.FixedFunctionTexture()) { + header += "vec4 gl_TexCoord[8];"; + } + if (ctx.info.loads.AnyComponent(IR::Attribute::ColorFrontDiffuseR)) { + header += "vec4 gl_Color;"; + } + header += "};"; +} + +} // Anonymous namespace + +EmitContext::EmitContext(IR::Program& program, Bindings& bindings, const Profile& profile_, + const RuntimeInfo& runtime_info_) + : info{program.info}, profile{profile_}, runtime_info{runtime_info_}, stage{program.stage}, + uses_geometry_passthrough{program.is_geometry_passthrough && + profile.support_geometry_shader_passthrough} { + if (profile.need_fastmath_off) { + header += "#pragma optionNV(fastmath off)\n"; + } + SetupExtensions(); + switch (program.stage) { + case Stage::VertexA: + case Stage::VertexB: + stage_name = "vs"; + break; + case Stage::TessellationControl: + stage_name = "tcs"; + header += fmt::format("layout(vertices={})out;", program.invocations); + break; + case Stage::TessellationEval: + stage_name = "tes"; + header += fmt::format("layout({},{},{})in;", GetTessMode(runtime_info.tess_primitive), + GetTessSpacing(runtime_info.tess_spacing), + runtime_info.tess_clockwise ? "cw" : "ccw"); + break; + case Stage::Geometry: + stage_name = "gs"; + header += fmt::format("layout({})in;", InputPrimitive(runtime_info.input_topology)); + if (uses_geometry_passthrough) { + header += "layout(passthrough)in gl_PerVertex{vec4 gl_Position;};"; + break; + } else if (program.is_geometry_passthrough && + !profile.support_geometry_shader_passthrough) { + LOG_WARNING(Shader_GLSL, "Passthrough geometry program used but not supported"); + } + header += fmt::format( + "layout({},max_vertices={})out;in gl_PerVertex{{vec4 gl_Position;}}gl_in[];", + OutputPrimitive(program.output_topology), program.output_vertices); + break; + case Stage::Fragment: + stage_name = "fs"; + position_name = "gl_FragCoord"; + if (runtime_info.force_early_z) { + header += "layout(early_fragment_tests)in;"; + } + if (info.uses_sample_id) { + header += "in int gl_SampleID;"; + } + if (info.stores_sample_mask) { + header += "out int gl_SampleMask[];"; + } + break; + case Stage::Compute: + stage_name = "cs"; + const u32 local_x{std::max(program.workgroup_size[0], 1u)}; + const u32 local_y{std::max(program.workgroup_size[1], 1u)}; + const u32 local_z{std::max(program.workgroup_size[2], 1u)}; + header += fmt::format("layout(local_size_x={},local_size_y={},local_size_z={}) in;", + local_x, local_y, local_z); + break; + } + SetupOutPerVertex(*this, header); + SetupLegacyInPerFragment(*this, header); + + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + if (!info.loads.Generic(index) || !runtime_info.previous_stage_stores.Generic(index)) { + continue; + } + const auto qualifier{uses_geometry_passthrough ? "passthrough" + : fmt::format("location={}", index)}; + header += fmt::format("layout({}){}in vec4 in_attr{}{};", qualifier, + InterpDecorator(info.interpolation[index]), index, + InputArrayDecorator(stage)); + } + for (size_t index = 0; index < info.uses_patches.size(); ++index) { + if (!info.uses_patches[index]) { + continue; + } + const auto qualifier{stage == Stage::TessellationControl ? "out" : "in"}; + header += fmt::format("layout(location={})patch {} vec4 patch{};", index, qualifier, index); + } + if (stage == Stage::Fragment) { + for (size_t index = 0; index < info.stores_frag_color.size(); ++index) { + if (!info.stores_frag_color[index] && !profile.need_declared_frag_colors) { + continue; + } + header += fmt::format("layout(location={})out vec4 frag_color{};", index, index); + } + } + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + if (info.stores.Generic(index)) { + DefineGenericOutput(index, program.invocations); + } + } + DefineConstantBuffers(bindings); + DefineStorageBuffers(bindings); + SetupImages(bindings); + SetupTextures(bindings); + DefineHelperFunctions(); + DefineConstants(); +} + +void EmitContext::SetupExtensions() { + if (info.uses_shadow_lod && profile.support_gl_texture_shadow_lod) { + header += "#extension GL_EXT_texture_shadow_lod : enable\n"; + } + if (info.uses_int64 && profile.support_int64) { + header += "#extension GL_ARB_gpu_shader_int64 : enable\n"; + } + if (info.uses_int64_bit_atomics) { + header += "#extension GL_NV_shader_atomic_int64 : enable\n"; + } + if (info.uses_atomic_f32_add) { + header += "#extension GL_NV_shader_atomic_float : enable\n"; + } + if (info.uses_atomic_f16x2_add || info.uses_atomic_f16x2_min || info.uses_atomic_f16x2_max) { + header += "#extension GL_NV_shader_atomic_fp16_vector : enable\n"; + } + if (info.uses_fp16) { + if (profile.support_gl_nv_gpu_shader_5) { + header += "#extension GL_NV_gpu_shader5 : enable\n"; + } + if (profile.support_gl_amd_gpu_shader_half_float) { + header += "#extension GL_AMD_gpu_shader_half_float : enable\n"; + } + } + if (info.uses_subgroup_invocation_id || info.uses_subgroup_mask || info.uses_subgroup_vote || + info.uses_subgroup_shuffles || info.uses_fswzadd) { + header += "#extension GL_ARB_shader_ballot : enable\n" + "#extension GL_ARB_shader_group_vote : enable\n"; + if (!info.uses_int64 && profile.support_int64) { + header += "#extension GL_ARB_gpu_shader_int64 : enable\n"; + } + if (profile.support_gl_warp_intrinsics) { + header += "#extension GL_NV_shader_thread_shuffle : enable\n"; + } + } + if ((info.stores[IR::Attribute::ViewportIndex] || info.stores[IR::Attribute::Layer]) && + profile.support_viewport_index_layer_non_geometry && stage != Stage::Geometry) { + header += "#extension GL_ARB_shader_viewport_layer_array : enable\n"; + } + if (info.uses_sparse_residency && profile.support_gl_sparse_textures) { + header += "#extension GL_ARB_sparse_texture2 : enable\n"; + } + if (info.stores[IR::Attribute::ViewportMask] && profile.support_viewport_mask) { + header += "#extension GL_NV_viewport_array2 : enable\n"; + } + if (info.uses_typeless_image_reads) { + header += "#extension GL_EXT_shader_image_load_formatted : enable\n"; + } + if (info.uses_derivatives && profile.support_gl_derivative_control) { + header += "#extension GL_ARB_derivative_control : enable\n"; + } + if (uses_geometry_passthrough) { + header += "#extension GL_NV_geometry_shader_passthrough : enable\n"; + } +} + +void EmitContext::DefineConstantBuffers(Bindings& bindings) { + if (info.constant_buffer_descriptors.empty()) { + return; + } + for (const auto& desc : info.constant_buffer_descriptors) { + header += fmt::format( + "layout(std140,binding={}) uniform {}_cbuf_{}{{vec4 {}_cbuf{}[{}];}};", + bindings.uniform_buffer, stage_name, desc.index, stage_name, desc.index, 4 * 1024); + bindings.uniform_buffer += desc.count; + } +} + +void EmitContext::DefineStorageBuffers(Bindings& bindings) { + if (info.storage_buffers_descriptors.empty()) { + return; + } + u32 index{}; + for (const auto& desc : info.storage_buffers_descriptors) { + header += fmt::format("layout(std430,binding={}) buffer {}_ssbo_{}{{uint {}_ssbo{}[];}};", + bindings.storage_buffer, stage_name, bindings.storage_buffer, + stage_name, index); + bindings.storage_buffer += desc.count; + index += desc.count; + } +} + +void EmitContext::DefineGenericOutput(size_t index, u32 invocations) { + static constexpr std::string_view swizzle{"xyzw"}; + const size_t base_index{static_cast(IR::Attribute::Generic0X) + index * 4}; + u32 element{0}; + while (element < 4) { + std::string definition{fmt::format("layout(location={}", index)}; + const u32 remainder{4 - element}; + const TransformFeedbackVarying* xfb_varying{}; + if (!runtime_info.xfb_varyings.empty()) { + xfb_varying = &runtime_info.xfb_varyings[base_index + element]; + xfb_varying = xfb_varying && xfb_varying->components > 0 ? xfb_varying : nullptr; + } + const u32 num_components{xfb_varying ? xfb_varying->components : remainder}; + if (element > 0) { + definition += fmt::format(",component={}", element); + } + if (xfb_varying) { + definition += + fmt::format(",xfb_buffer={},xfb_stride={},xfb_offset={}", xfb_varying->buffer, + xfb_varying->stride, xfb_varying->offset); + } + std::string name{fmt::format("out_attr{}", index)}; + if (num_components < 4 || element > 0) { + name += fmt::format("_{}", swizzle.substr(element, num_components)); + } + const auto type{num_components == 1 ? "float" : fmt::format("vec{}", num_components)}; + definition += fmt::format(")out {} {}{};", type, name, OutputDecorator(stage, invocations)); + header += definition; + + const GenericElementInfo element_info{ + .name = name, + .first_element = element, + .num_components = num_components, + }; + std::fill_n(output_generics[index].begin() + element, num_components, element_info); + element += num_components; + } +} + +void EmitContext::DefineHelperFunctions() { + header += "\n#define ftoi floatBitsToInt\n#define ftou floatBitsToUint\n" + "#define itof intBitsToFloat\n#define utof uintBitsToFloat\n"; + if (info.uses_global_increment || info.uses_shared_increment) { + header += "uint CasIncrement(uint op_a,uint op_b){return op_a>=op_b?0u:(op_a+1u);}"; + } + if (info.uses_global_decrement || info.uses_shared_decrement) { + header += "uint CasDecrement(uint op_a,uint op_b){" + "return op_a==0||op_a>op_b?op_b:(op_a-1u);}"; + } + if (info.uses_atomic_f32_add) { + header += "uint CasFloatAdd(uint op_a,float op_b){" + "return ftou(utof(op_a)+op_b);}"; + } + if (info.uses_atomic_f32x2_add) { + header += "uint CasFloatAdd32x2(uint op_a,vec2 op_b){" + "return packHalf2x16(unpackHalf2x16(op_a)+op_b);}"; + } + if (info.uses_atomic_f32x2_min) { + header += "uint CasFloatMin32x2(uint op_a,vec2 op_b){return " + "packHalf2x16(min(unpackHalf2x16(op_a),op_b));}"; + } + if (info.uses_atomic_f32x2_max) { + header += "uint CasFloatMax32x2(uint op_a,vec2 op_b){return " + "packHalf2x16(max(unpackHalf2x16(op_a),op_b));}"; + } + if (info.uses_atomic_f16x2_add) { + header += "uint CasFloatAdd16x2(uint op_a,f16vec2 op_b){return " + "packFloat2x16(unpackFloat2x16(op_a)+op_b);}"; + } + if (info.uses_atomic_f16x2_min) { + header += "uint CasFloatMin16x2(uint op_a,f16vec2 op_b){return " + "packFloat2x16(min(unpackFloat2x16(op_a),op_b));}"; + } + if (info.uses_atomic_f16x2_max) { + header += "uint CasFloatMax16x2(uint op_a,f16vec2 op_b){return " + "packFloat2x16(max(unpackFloat2x16(op_a),op_b));}"; + } + if (info.uses_atomic_s32_min) { + header += "uint CasMinS32(uint op_a,uint op_b){return uint(min(int(op_a),int(op_b)));}"; + } + if (info.uses_atomic_s32_max) { + header += "uint CasMaxS32(uint op_a,uint op_b){return uint(max(int(op_a),int(op_b)));}"; + } + if (info.uses_global_memory && profile.support_int64) { + header += DefineGlobalMemoryFunctions(); + } + if (info.loads_indexed_attributes) { + const bool is_array{stage == Stage::Geometry}; + const auto vertex_arg{is_array ? ",uint vertex" : ""}; + std::string func{ + fmt::format("float IndexedAttrLoad(int offset{}){{int base_index=offset>>2;uint " + "masked_index=uint(base_index)&3u;switch(base_index>>2){{", + vertex_arg)}; + if (info.loads.AnyComponent(IR::Attribute::PositionX)) { + const auto position_idx{is_array ? "gl_in[vertex]." : ""}; + func += fmt::format("case {}:return {}{}[masked_index];", + static_cast(IR::Attribute::PositionX) >> 2, position_idx, + position_name); + } + const u32 base_attribute_value = static_cast(IR::Attribute::Generic0X) >> 2; + for (u32 index = 0; index < IR::NUM_GENERICS; ++index) { + if (!info.loads.Generic(index)) { + continue; + } + const auto vertex_idx{is_array ? "[vertex]" : ""}; + func += fmt::format("case {}:return in_attr{}{}[masked_index];", + base_attribute_value + index, index, vertex_idx); + } + func += "default: return 0.0;}}"; + header += func; + } + if (info.stores_indexed_attributes) { + // TODO + } +} + +std::string EmitContext::DefineGlobalMemoryFunctions() { + const auto define_body{[&](std::string& func, size_t index, std::string_view return_statement) { + const auto& ssbo{info.storage_buffers_descriptors[index]}; + const u32 size_cbuf_offset{ssbo.cbuf_offset + 8}; + const auto ssbo_addr{fmt::format("ssbo_addr{}", index)}; + const auto cbuf{fmt::format("{}_cbuf{}", stage_name, ssbo.cbuf_index)}; + std::array addr_xy; + std::array size_xy; + for (size_t i = 0; i < addr_xy.size(); ++i) { + const auto addr_loc{ssbo.cbuf_offset + 4 * i}; + const auto size_loc{size_cbuf_offset + 4 * i}; + addr_xy[i] = fmt::format("ftou({}[{}].{})", cbuf, addr_loc / 16, Swizzle(addr_loc)); + size_xy[i] = fmt::format("ftou({}[{}].{})", cbuf, size_loc / 16, Swizzle(size_loc)); + } + const auto addr_pack{fmt::format("packUint2x32(uvec2({},{}))", addr_xy[0], addr_xy[1])}; + const auto addr_statment{fmt::format("uint64_t {}={};", ssbo_addr, addr_pack)}; + func += addr_statment; + + const auto size_vec{fmt::format("uvec2({},{})", size_xy[0], size_xy[1])}; + const auto comp_lhs{fmt::format("(addr>={})", ssbo_addr)}; + const auto comp_rhs{fmt::format("(addr<({}+uint64_t({})))", ssbo_addr, size_vec)}; + const auto comparison{fmt::format("if({}&&{}){{", comp_lhs, comp_rhs)}; + func += comparison; + + const auto ssbo_name{fmt::format("{}_ssbo{}", stage_name, index)}; + func += fmt::format(fmt::runtime(return_statement), ssbo_name, ssbo_addr); + }}; + std::string write_func{"void WriteGlobal32(uint64_t addr,uint data){"}; + std::string write_func_64{"void WriteGlobal64(uint64_t addr,uvec2 data){"}; + std::string write_func_128{"void WriteGlobal128(uint64_t addr,uvec4 data){"}; + std::string load_func{"uint LoadGlobal32(uint64_t addr){"}; + std::string load_func_64{"uvec2 LoadGlobal64(uint64_t addr){"}; + std::string load_func_128{"uvec4 LoadGlobal128(uint64_t addr){"}; + const size_t num_buffers{info.storage_buffers_descriptors.size()}; + for (size_t index = 0; index < num_buffers; ++index) { + if (!info.nvn_buffer_used[index]) { + continue; + } + define_body(write_func, index, "{0}[uint(addr-{1})>>2]=data;return;}}"); + define_body(write_func_64, index, + "{0}[uint(addr-{1})>>2]=data.x;{0}[uint(addr-{1}+4)>>2]=data.y;return;}}"); + define_body(write_func_128, index, + "{0}[uint(addr-{1})>>2]=data.x;{0}[uint(addr-{1}+4)>>2]=data.y;{0}[uint(" + "addr-{1}+8)>>2]=data.z;{0}[uint(addr-{1}+12)>>2]=data.w;return;}}"); + define_body(load_func, index, "return {0}[uint(addr-{1})>>2];}}"); + define_body(load_func_64, index, + "return uvec2({0}[uint(addr-{1})>>2],{0}[uint(addr-{1}+4)>>2]);}}"); + define_body(load_func_128, index, + "return uvec4({0}[uint(addr-{1})>>2],{0}[uint(addr-{1}+4)>>2],{0}[" + "uint(addr-{1}+8)>>2],{0}[uint(addr-{1}+12)>>2]);}}"); + } + write_func += '}'; + write_func_64 += '}'; + write_func_128 += '}'; + load_func += "return 0u;}"; + load_func_64 += "return uvec2(0);}"; + load_func_128 += "return uvec4(0);}"; + return write_func + write_func_64 + write_func_128 + load_func + load_func_64 + load_func_128; +} + +void EmitContext::SetupImages(Bindings& bindings) { + image_buffers.reserve(info.image_buffer_descriptors.size()); + for (const auto& desc : info.image_buffer_descriptors) { + image_buffers.push_back({bindings.image, desc.count}); + const auto format{ImageFormatString(desc.format)}; + const auto qualifier{ImageAccessQualifier(desc.is_written, desc.is_read)}; + const auto array_decorator{desc.count > 1 ? fmt::format("[{}]", desc.count) : ""}; + header += fmt::format("layout(binding={}{}) uniform {}uimageBuffer img{}{};", + bindings.image, format, qualifier, bindings.image, array_decorator); + bindings.image += desc.count; + } + images.reserve(info.image_descriptors.size()); + for (const auto& desc : info.image_descriptors) { + images.push_back({bindings.image, desc.count}); + const auto format{ImageFormatString(desc.format)}; + const auto image_type{ImageType(desc.type)}; + const auto qualifier{ImageAccessQualifier(desc.is_written, desc.is_read)}; + const auto array_decorator{desc.count > 1 ? fmt::format("[{}]", desc.count) : ""}; + header += fmt::format("layout(binding={}{})uniform {}{} img{}{};", bindings.image, format, + qualifier, image_type, bindings.image, array_decorator); + bindings.image += desc.count; + } +} + +void EmitContext::SetupTextures(Bindings& bindings) { + texture_buffers.reserve(info.texture_buffer_descriptors.size()); + for (const auto& desc : info.texture_buffer_descriptors) { + texture_buffers.push_back({bindings.texture, desc.count}); + const auto sampler_type{SamplerType(TextureType::Buffer, false)}; + const auto array_decorator{desc.count > 1 ? fmt::format("[{}]", desc.count) : ""}; + header += fmt::format("layout(binding={}) uniform {} tex{}{};", bindings.texture, + sampler_type, bindings.texture, array_decorator); + bindings.texture += desc.count; + } + textures.reserve(info.texture_descriptors.size()); + for (const auto& desc : info.texture_descriptors) { + textures.push_back({bindings.texture, desc.count}); + const auto sampler_type{SamplerType(desc.type, desc.is_depth)}; + const auto array_decorator{desc.count > 1 ? fmt::format("[{}]", desc.count) : ""}; + header += fmt::format("layout(binding={}) uniform {} tex{}{};", bindings.texture, + sampler_type, bindings.texture, array_decorator); + bindings.texture += desc.count; + } +} + +void EmitContext::DefineConstants() { + if (info.uses_fswzadd) { + header += "const float FSWZ_A[]=float[4](-1.f,1.f,-1.f,0.f);" + "const float FSWZ_B[]=float[4](-1.f,-1.f,1.f,-1.f);"; + } +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_context.h b/src/shader_recompiler/backend/glsl/emit_context.h new file mode 100755 index 000000000..e9b073a26 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_context.h @@ -0,0 +1,172 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include + +#include "shader_recompiler/backend/glsl/var_alloc.h" +#include "shader_recompiler/stage.h" + +namespace Shader { +struct Info; +struct Profile; +struct RuntimeInfo; +} // namespace Shader + +namespace Shader::Backend { +struct Bindings; +} + +namespace Shader::IR { +class Inst; +struct Program; +} // namespace Shader::IR + +namespace Shader::Backend::GLSL { + +struct GenericElementInfo { + std::string name; + u32 first_element{}; + u32 num_components{}; +}; + +struct TextureImageDefinition { + u32 binding; + u32 count; +}; + +class EmitContext { +public: + explicit EmitContext(IR::Program& program, Bindings& bindings, const Profile& profile_, + const RuntimeInfo& runtime_info_); + + template + void Add(const char* format_str, IR::Inst& inst, Args&&... args) { + const auto var_def{var_alloc.AddDefine(inst, type)}; + if (var_def.empty()) { + // skip assigment. + code += fmt::format(fmt::runtime(format_str + 3), std::forward(args)...); + } else { + code += fmt::format(fmt::runtime(format_str), var_def, std::forward(args)...); + } + // TODO: Remove this + code += '\n'; + } + + template + void AddU1(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddF16x2(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddU32(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddF32(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddU64(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddF64(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddU32x2(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddF32x2(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddU32x3(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddF32x3(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddU32x4(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddF32x4(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddPrecF32(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void AddPrecF64(const char* format_str, IR::Inst& inst, Args&&... args) { + Add(format_str, inst, args...); + } + + template + void Add(const char* format_str, Args&&... args) { + code += fmt::format(fmt::runtime(format_str), std::forward(args)...); + // TODO: Remove this + code += '\n'; + } + + std::string header; + std::string code; + VarAlloc var_alloc; + const Info& info; + const Profile& profile; + const RuntimeInfo& runtime_info; + + Stage stage{}; + std::string_view stage_name = "invalid"; + std::string_view position_name = "gl_Position"; + + std::vector texture_buffers; + std::vector image_buffers; + std::vector textures; + std::vector images; + std::array, 32> output_generics{}; + + u32 num_safety_loop_vars{}; + + bool uses_y_direction{}; + bool uses_cc_carry{}; + bool uses_geometry_passthrough{}; + +private: + void SetupExtensions(); + void DefineConstantBuffers(Bindings& bindings); + void DefineStorageBuffers(Bindings& bindings); + void DefineGenericOutput(size_t index, u32 invocations); + void DefineHelperFunctions(); + void DefineConstants(); + std::string DefineGlobalMemoryFunctions(); + void SetupImages(Bindings& bindings); + void SetupTextures(Bindings& bindings); +}; + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl.cpp b/src/shader_recompiler/backend/glsl/emit_glsl.cpp new file mode 100755 index 000000000..ffdc6dbba --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl.cpp @@ -0,0 +1,240 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include "common/div_ceil.h" +#include "common/settings.h" +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" + +namespace Shader::Backend::GLSL { +namespace { +template +struct FuncTraits {}; + +template +struct FuncTraits { + using ReturnType = ReturnType_; + + static constexpr size_t NUM_ARGS = sizeof...(Args); + + template + using ArgType = std::tuple_element_t>; +}; + +template +void SetDefinition(EmitContext& ctx, IR::Inst* inst, Args... args) { + inst->SetDefinition(func(ctx, std::forward(args)...)); +} + +template +auto Arg(EmitContext& ctx, const IR::Value& arg) { + if constexpr (std::is_same_v) { + return ctx.var_alloc.Consume(arg); + } else if constexpr (std::is_same_v) { + return arg; + } else if constexpr (std::is_same_v) { + return arg.U32(); + } else if constexpr (std::is_same_v) { + return arg.Attribute(); + } else if constexpr (std::is_same_v) { + return arg.Patch(); + } else if constexpr (std::is_same_v) { + return arg.Reg(); + } +} + +template +void Invoke(EmitContext& ctx, IR::Inst* inst, std::index_sequence) { + using Traits = FuncTraits; + if constexpr (std::is_same_v) { + if constexpr (is_first_arg_inst) { + SetDefinition( + ctx, inst, *inst, + Arg>(ctx, inst->Arg(I))...); + } else { + SetDefinition( + ctx, inst, Arg>(ctx, inst->Arg(I))...); + } + } else { + if constexpr (is_first_arg_inst) { + func(ctx, *inst, Arg>(ctx, inst->Arg(I))...); + } else { + func(ctx, Arg>(ctx, inst->Arg(I))...); + } + } +} + +template +void Invoke(EmitContext& ctx, IR::Inst* inst) { + using Traits = FuncTraits; + static_assert(Traits::NUM_ARGS >= 1, "Insufficient arguments"); + if constexpr (Traits::NUM_ARGS == 1) { + Invoke(ctx, inst, std::make_index_sequence<0>{}); + } else { + using FirstArgType = typename Traits::template ArgType<1>; + static constexpr bool is_first_arg_inst = std::is_same_v; + using Indices = std::make_index_sequence; + Invoke(ctx, inst, Indices{}); + } +} + +void EmitInst(EmitContext& ctx, IR::Inst* inst) { + switch (inst->GetOpcode()) { +#define OPCODE(name, result_type, ...) \ + case IR::Opcode::name: \ + return Invoke<&Emit##name>(ctx, inst); +#include "shader_recompiler/frontend/ir/opcodes.inc" +#undef OPCODE + } + throw LogicError("Invalid opcode {}", inst->GetOpcode()); +} + +bool IsReference(IR::Inst& inst) { + return inst.GetOpcode() == IR::Opcode::Reference; +} + +void PrecolorInst(IR::Inst& phi) { + // Insert phi moves before references to avoid overwritting other phis + const size_t num_args{phi.NumArgs()}; + for (size_t i = 0; i < num_args; ++i) { + IR::Block& phi_block{*phi.PhiBlock(i)}; + auto it{std::find_if_not(phi_block.rbegin(), phi_block.rend(), IsReference).base()}; + IR::IREmitter ir{phi_block, it}; + const IR::Value arg{phi.Arg(i)}; + if (arg.IsImmediate()) { + ir.PhiMove(phi, arg); + } else { + ir.PhiMove(phi, IR::Value{arg.InstRecursive()}); + } + } + for (size_t i = 0; i < num_args; ++i) { + IR::IREmitter{*phi.PhiBlock(i)}.Reference(IR::Value{&phi}); + } +} + +void Precolor(const IR::Program& program) { + for (IR::Block* const block : program.blocks) { + for (IR::Inst& phi : block->Instructions() | std::views::take_while(IR::IsPhi)) { + PrecolorInst(phi); + } + } +} + +void EmitCode(EmitContext& ctx, const IR::Program& program) { + for (const IR::AbstractSyntaxNode& node : program.syntax_list) { + switch (node.type) { + case IR::AbstractSyntaxNode::Type::Block: + for (IR::Inst& inst : node.data.block->Instructions()) { + EmitInst(ctx, &inst); + } + break; + case IR::AbstractSyntaxNode::Type::If: + ctx.Add("if({}){{", ctx.var_alloc.Consume(node.data.if_node.cond)); + break; + case IR::AbstractSyntaxNode::Type::EndIf: + ctx.Add("}}"); + break; + case IR::AbstractSyntaxNode::Type::Break: + if (node.data.break_node.cond.IsImmediate()) { + if (node.data.break_node.cond.U1()) { + ctx.Add("break;"); + } + } else { + ctx.Add("if({}){{break;}}", ctx.var_alloc.Consume(node.data.break_node.cond)); + } + break; + case IR::AbstractSyntaxNode::Type::Return: + case IR::AbstractSyntaxNode::Type::Unreachable: + ctx.Add("return;"); + break; + case IR::AbstractSyntaxNode::Type::Loop: + ctx.Add("for(;;){{"); + break; + case IR::AbstractSyntaxNode::Type::Repeat: + if (Settings::values.disable_shader_loop_safety_checks) { + ctx.Add("if(!{}){{break;}}}}", ctx.var_alloc.Consume(node.data.repeat.cond)); + } else { + ctx.Add("if(--loop{}<0 || !{}){{break;}}}}", ctx.num_safety_loop_vars++, + ctx.var_alloc.Consume(node.data.repeat.cond)); + } + break; + default: + throw NotImplementedException("AbstractSyntaxNode Type {}", node.type); + } + } +} + +std::string GlslVersionSpecifier(const EmitContext& ctx) { + if (ctx.uses_y_direction || ctx.info.stores.Legacy() || ctx.info.loads.Legacy()) { + return " compatibility"; + } + return ""; +} + +bool IsPreciseType(GlslVarType type) { + switch (type) { + case GlslVarType::PrecF32: + case GlslVarType::PrecF64: + return true; + default: + return false; + } +} + +void DefineVariables(const EmitContext& ctx, std::string& header) { + for (u32 i = 0; i < static_cast(GlslVarType::Void); ++i) { + const auto type{static_cast(i)}; + const auto& tracker{ctx.var_alloc.GetUseTracker(type)}; + const auto type_name{ctx.var_alloc.GetGlslType(type)}; + const bool has_precise_bug{ctx.stage == Stage::Fragment && ctx.profile.has_gl_precise_bug}; + const auto precise{!has_precise_bug && IsPreciseType(type) ? "precise " : ""}; + // Temps/return types that are never used are stored at index 0 + if (tracker.uses_temp) { + header += fmt::format("{}{} t{}={}(0);", precise, type_name, + ctx.var_alloc.Representation(0, type), type_name); + } + for (u32 index = 0; index < tracker.num_used; ++index) { + header += fmt::format("{}{} {}={}(0);", precise, type_name, + ctx.var_alloc.Representation(index, type), type_name); + } + } + for (u32 i = 0; i < ctx.num_safety_loop_vars; ++i) { + header += fmt::format("int loop{}=0x2000;", i); + } +} +} // Anonymous namespace + +std::string EmitGLSL(const Profile& profile, const RuntimeInfo& runtime_info, IR::Program& program, + Bindings& bindings) { + EmitContext ctx{program, bindings, profile, runtime_info}; + Precolor(program); + EmitCode(ctx, program); + const std::string version{fmt::format("#version 450{}\n", GlslVersionSpecifier(ctx))}; + ctx.header.insert(0, version); + if (program.shared_memory_size > 0) { + ctx.header += + fmt::format("shared uint smem[{}];", Common::DivCeil(program.shared_memory_size, 4U)); + } + ctx.header += "void main(){\n"; + if (program.local_memory_size > 0) { + ctx.header += fmt::format("uint lmem[{}];", Common::DivCeil(program.local_memory_size, 4U)); + } + DefineVariables(ctx, ctx.header); + if (ctx.uses_cc_carry) { + ctx.header += "uint carry;"; + } + if (program.info.uses_subgroup_shuffles) { + ctx.header += "bool shfl_in_bounds;"; + } + ctx.code.insert(0, ctx.header); + ctx.code += '}'; + return ctx.code; +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl.h b/src/shader_recompiler/backend/glsl/emit_glsl.h new file mode 100755 index 000000000..20e5719e6 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl.h @@ -0,0 +1,24 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include "shader_recompiler/backend/bindings.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/profile.h" +#include "shader_recompiler/runtime_info.h" + +namespace Shader::Backend::GLSL { + +[[nodiscard]] std::string EmitGLSL(const Profile& profile, const RuntimeInfo& runtime_info, + IR::Program& program, Bindings& bindings); + +[[nodiscard]] inline std::string EmitGLSL(const Profile& profile, IR::Program& program) { + Bindings binding; + return EmitGLSL(profile, {}, program, binding); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp new file mode 100755 index 000000000..772acc5a4 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp @@ -0,0 +1,418 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { +namespace { +constexpr char cas_loop[]{ + "for (;;){{uint old={};{}=atomicCompSwap({},old,{}({},{}));if({}==old){{break;}}}}"}; + +void SharedCasFunction(EmitContext& ctx, IR::Inst& inst, std::string_view offset, + std::string_view value, std::string_view function) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + const std::string smem{fmt::format("smem[{}>>2]", offset)}; + ctx.Add(cas_loop, smem, ret, smem, function, smem, value, ret); +} + +void SsboCasFunction(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value, std::string_view function) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + const std::string ssbo{fmt::format("{}_ssbo{}[{}>>2]", ctx.stage_name, binding.U32(), + ctx.var_alloc.Consume(offset))}; + ctx.Add(cas_loop, ssbo, ret, ssbo, function, ssbo, value, ret); +} + +void SsboCasFunctionF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value, + std::string_view function) { + const std::string ssbo{fmt::format("{}_ssbo{}[{}>>2]", ctx.stage_name, binding.U32(), + ctx.var_alloc.Consume(offset))}; + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + ctx.Add(cas_loop, ssbo, ret, ssbo, function, ssbo, value, ret); + ctx.AddF32("{}=utof({});", inst, ret); +} +} // Anonymous namespace + +void EmitSharedAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + ctx.AddU32("{}=atomicAdd(smem[{}>>2],{});", inst, pointer_offset, value); +} + +void EmitSharedAtomicSMin32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + const std::string u32_value{fmt::format("uint({})", value)}; + SharedCasFunction(ctx, inst, pointer_offset, u32_value, "CasMinS32"); +} + +void EmitSharedAtomicUMin32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + ctx.AddU32("{}=atomicMin(smem[{}>>2],{});", inst, pointer_offset, value); +} + +void EmitSharedAtomicSMax32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + const std::string u32_value{fmt::format("uint({})", value)}; + SharedCasFunction(ctx, inst, pointer_offset, u32_value, "CasMaxS32"); +} + +void EmitSharedAtomicUMax32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + ctx.AddU32("{}=atomicMax(smem[{}>>2],{});", inst, pointer_offset, value); +} + +void EmitSharedAtomicInc32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + SharedCasFunction(ctx, inst, pointer_offset, value, "CasIncrement"); +} + +void EmitSharedAtomicDec32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + SharedCasFunction(ctx, inst, pointer_offset, value, "CasDecrement"); +} + +void EmitSharedAtomicAnd32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + ctx.AddU32("{}=atomicAnd(smem[{}>>2],{});", inst, pointer_offset, value); +} + +void EmitSharedAtomicOr32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + ctx.AddU32("{}=atomicOr(smem[{}>>2],{});", inst, pointer_offset, value); +} + +void EmitSharedAtomicXor32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + ctx.AddU32("{}=atomicXor(smem[{}>>2],{});", inst, pointer_offset, value); +} + +void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + ctx.AddU32("{}=atomicExchange(smem[{}>>2],{});", inst, pointer_offset, value); +} + +void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value) { + LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic"); + ctx.AddU64("{}=packUint2x32(uvec2(smem[{}>>2],smem[({}+4)>>2]));", inst, pointer_offset, + pointer_offset); + ctx.Add("smem[{}>>2]=unpackUint2x32({}).x;smem[({}+4)>>2]=unpackUint2x32({}).y;", + pointer_offset, value, pointer_offset, value); +} + +void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU32("{}=atomicAdd({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), + ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + const std::string u32_value{fmt::format("uint({})", value)}; + SsboCasFunction(ctx, inst, binding, offset, u32_value, "CasMinS32"); +} + +void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU32("{}=atomicMin({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), + ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + const std::string u32_value{fmt::format("uint({})", value)}; + SsboCasFunction(ctx, inst, binding, offset, u32_value, "CasMaxS32"); +} + +void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU32("{}=atomicMax({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), + ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + SsboCasFunction(ctx, inst, binding, offset, value, "CasIncrement"); +} + +void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + SsboCasFunction(ctx, inst, binding, offset, value, "CasDecrement"); +} + +void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU32("{}=atomicAnd({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), + ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU32("{}=atomicOr({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), + ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU32("{}=atomicXor({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), + ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU32("{}=atomicExchange({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), + ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic"); + ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset)); + ctx.Add("{}_ssbo{}[{}>>2]+=unpackUint2x32({}).x;{}_ssbo{}[({}>>2)+1]+=unpackUint2x32({}).y;", + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic"); + ctx.AddU64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset)); + ctx.Add("for(int i=0;i<2;++i){{ " + "{}_ssbo{}[({}>>2)+i]=uint(min(int({}_ssbo{}[({}>>2)+i]),unpackInt2x32(int64_t({}))[i])" + ");}}", + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic"); + ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset)); + ctx.Add("for(int i=0;i<2;++i){{ " + "{}_ssbo{}[({}>>2)+i]=min({}_ssbo{}[({}>>2)+i],unpackUint2x32(uint64_t({}))[i]);}}", + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic"); + ctx.AddU64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset)); + ctx.Add("for(int i=0;i<2;++i){{ " + "{}_ssbo{}[({}>>2)+i]=uint(max(int({}_ssbo{}[({}>>2)+i]),unpackInt2x32(int64_t({}))[i])" + ");}}", + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic"); + ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset)); + ctx.Add("for(int " + "i=0;i<2;++i){{{}_ssbo{}[({}>>2)+i]=max({}_ssbo{}[({}>>2)+i],unpackUint2x32(uint64_t({}" + "))[i]);}}", + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU64( + "{}=packUint2x32(uvec2(atomicAnd({}_ssbo{}[{}>>2],unpackUint2x32({}).x),atomicAnd({}_" + "ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));", + inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU64("{}=packUint2x32(uvec2(atomicOr({}_ssbo{}[{}>>2],unpackUint2x32({}).x),atomicOr({}_" + "ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));", + inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU64( + "{}=packUint2x32(uvec2(atomicXor({}_ssbo{}[{}>>2],unpackUint2x32({}).x),atomicXor({}_" + "ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));", + inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, ctx.stage_name, + binding.U32(), ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + ctx.AddU64("{}=packUint2x32(uvec2(atomicExchange({}_ssbo{}[{}>>2],unpackUint2x32({}).x)," + "atomicExchange({}_ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));", + inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, + ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value); +} + +void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + SsboCasFunctionF32(ctx, inst, binding, offset, value, "CasFloatAdd"); +} + +void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatAdd16x2"); +} + +void EmitStorageAtomicAddF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatAdd32x2"); +} + +void EmitStorageAtomicMinF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatMin16x2"); +} + +void EmitStorageAtomicMinF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatMin32x2"); +} + +void EmitStorageAtomicMaxF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatMax16x2"); +} + +void EmitStorageAtomicMaxF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value) { + SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatMax32x2"); +} + +void EmitGlobalAtomicIAdd32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicSMin32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicUMin32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicSMax32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicUMax32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicInc32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicDec32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicAnd32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicOr32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicXor32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicExchange32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicIAdd64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicSMin64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicUMin64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicSMax64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicUMax64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicInc64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicDec64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicAnd64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicOr64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicXor64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicExchange64(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicAddF32(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicAddF16x2(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicAddF32x2(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicMinF16x2(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicMinF32x2(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicMaxF16x2(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} + +void EmitGlobalAtomicMaxF32x2(EmitContext&) { + throw NotImplementedException("GLSL Instrucion"); +} +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_barriers.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_barriers.cpp new file mode 100755 index 000000000..e1d1b558e --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_barriers.cpp @@ -0,0 +1,21 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { +void EmitBarrier(EmitContext& ctx) { + ctx.Add("barrier();"); +} + +void EmitWorkgroupMemoryBarrier(EmitContext& ctx) { + ctx.Add("groupMemoryBarrier();"); +} + +void EmitDeviceMemoryBarrier(EmitContext& ctx) { + ctx.Add("memoryBarrier();"); +} +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_bitwise_conversion.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_bitwise_conversion.cpp new file mode 100755 index 000000000..3c1714e89 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_bitwise_conversion.cpp @@ -0,0 +1,94 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { +namespace { +void Alias(IR::Inst& inst, const IR::Value& value) { + if (value.IsImmediate()) { + return; + } + IR::Inst& value_inst{*value.InstRecursive()}; + value_inst.DestructiveAddUsage(inst.UseCount()); + value_inst.DestructiveRemoveUsage(); + inst.SetDefinition(value_inst.Definition()); +} +} // Anonymous namespace + +void EmitIdentity(EmitContext&, IR::Inst& inst, const IR::Value& value) { + Alias(inst, value); +} + +void EmitConditionRef(EmitContext& ctx, IR::Inst& inst, const IR::Value& value) { + // Fake one usage to get a real variable out of the condition + inst.DestructiveAddUsage(1); + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U1)}; + const auto input{ctx.var_alloc.Consume(value)}; + if (ret != input) { + ctx.Add("{}={};", ret, input); + } +} + +void EmitBitCastU16F16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst) { + NotImplemented(); +} + +void EmitBitCastU32F32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=ftou({});", inst, value); +} + +void EmitBitCastU64F64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU64("{}=doubleBitsToUint64({});", inst, value); +} + +void EmitBitCastF16U16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst) { + NotImplemented(); +} + +void EmitBitCastF32U32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=utof({});", inst, value); +} + +void EmitBitCastF64U64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=uint64BitsToDouble({});", inst, value); +} + +void EmitPackUint2x32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU64("{}=packUint2x32({});", inst, value); +} + +void EmitUnpackUint2x32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32x2("{}=unpackUint2x32({});", inst, value); +} + +void EmitPackFloat2x16(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=packFloat2x16({});", inst, value); +} + +void EmitUnpackFloat2x16(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF16x2("{}=unpackFloat2x16({});", inst, value); +} + +void EmitPackHalf2x16(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=packHalf2x16({});", inst, value); +} + +void EmitUnpackHalf2x16(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32x2("{}=unpackHalf2x16({});", inst, value); +} + +void EmitPackDouble2x32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=packDouble2x32({});", inst, value); +} + +void EmitUnpackDouble2x32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32x2("{}=unpackDouble2x32({});", inst, value); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_composite.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_composite.cpp new file mode 100755 index 000000000..49a66e3ec --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_composite.cpp @@ -0,0 +1,219 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { +namespace { +constexpr std::string_view SWIZZLE{"xyzw"}; +void CompositeInsert(EmitContext& ctx, std::string_view result, std::string_view composite, + std::string_view object, u32 index) { + if (result == composite) { + // The result is aliased with the composite + ctx.Add("{}.{}={};", composite, SWIZZLE[index], object); + } else { + ctx.Add("{}={};{}.{}={};", result, composite, result, SWIZZLE[index], object); + } +} +} // Anonymous namespace + +void EmitCompositeConstructU32x2(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2) { + ctx.AddU32x2("{}=uvec2({},{});", inst, e1, e2); +} + +void EmitCompositeConstructU32x3(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2, std::string_view e3) { + ctx.AddU32x3("{}=uvec3({},{},{});", inst, e1, e2, e3); +} + +void EmitCompositeConstructU32x4(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2, std::string_view e3, std::string_view e4) { + ctx.AddU32x4("{}=uvec4({},{},{},{});", inst, e1, e2, e3, e4); +} + +void EmitCompositeExtractU32x2(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index) { + ctx.AddU32("{}={}.{};", inst, composite, SWIZZLE[index]); +} + +void EmitCompositeExtractU32x3(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index) { + ctx.AddU32("{}={}.{};", inst, composite, SWIZZLE[index]); +} + +void EmitCompositeExtractU32x4(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index) { + ctx.AddU32("{}={}.{};", inst, composite, SWIZZLE[index]); +} + +void EmitCompositeInsertU32x2(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32x2)}; + CompositeInsert(ctx, ret, composite, object, index); +} + +void EmitCompositeInsertU32x3(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32x3)}; + CompositeInsert(ctx, ret, composite, object, index); +} + +void EmitCompositeInsertU32x4(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32x4)}; + CompositeInsert(ctx, ret, composite, object, index); +} + +void EmitCompositeConstructF16x2([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view e1, + [[maybe_unused]] std::string_view e2) { + NotImplemented(); +} + +void EmitCompositeConstructF16x3([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view e1, + [[maybe_unused]] std::string_view e2, + [[maybe_unused]] std::string_view e3) { + NotImplemented(); +} + +void EmitCompositeConstructF16x4([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view e1, + [[maybe_unused]] std::string_view e2, + [[maybe_unused]] std::string_view e3, + [[maybe_unused]] std::string_view e4) { + NotImplemented(); +} + +void EmitCompositeExtractF16x2([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view composite, + [[maybe_unused]] u32 index) { + NotImplemented(); +} + +void EmitCompositeExtractF16x3([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view composite, + [[maybe_unused]] u32 index) { + NotImplemented(); +} + +void EmitCompositeExtractF16x4([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view composite, + [[maybe_unused]] u32 index) { + NotImplemented(); +} + +void EmitCompositeInsertF16x2([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view composite, + [[maybe_unused]] std::string_view object, + [[maybe_unused]] u32 index) { + NotImplemented(); +} + +void EmitCompositeInsertF16x3([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view composite, + [[maybe_unused]] std::string_view object, + [[maybe_unused]] u32 index) { + NotImplemented(); +} + +void EmitCompositeInsertF16x4([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view composite, + [[maybe_unused]] std::string_view object, + [[maybe_unused]] u32 index) { + NotImplemented(); +} + +void EmitCompositeConstructF32x2(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2) { + ctx.AddF32x2("{}=vec2({},{});", inst, e1, e2); +} + +void EmitCompositeConstructF32x3(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2, std::string_view e3) { + ctx.AddF32x3("{}=vec3({},{},{});", inst, e1, e2, e3); +} + +void EmitCompositeConstructF32x4(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2, std::string_view e3, std::string_view e4) { + ctx.AddF32x4("{}=vec4({},{},{},{});", inst, e1, e2, e3, e4); +} + +void EmitCompositeExtractF32x2(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index) { + ctx.AddF32("{}={}.{};", inst, composite, SWIZZLE[index]); +} + +void EmitCompositeExtractF32x3(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index) { + ctx.AddF32("{}={}.{};", inst, composite, SWIZZLE[index]); +} + +void EmitCompositeExtractF32x4(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index) { + ctx.AddF32("{}={}.{};", inst, composite, SWIZZLE[index]); +} + +void EmitCompositeInsertF32x2(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::F32x2)}; + CompositeInsert(ctx, ret, composite, object, index); +} + +void EmitCompositeInsertF32x3(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::F32x3)}; + CompositeInsert(ctx, ret, composite, object, index); +} + +void EmitCompositeInsertF32x4(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; + CompositeInsert(ctx, ret, composite, object, index); +} + +void EmitCompositeConstructF64x2([[maybe_unused]] EmitContext& ctx) { + NotImplemented(); +} + +void EmitCompositeConstructF64x3([[maybe_unused]] EmitContext& ctx) { + NotImplemented(); +} + +void EmitCompositeConstructF64x4([[maybe_unused]] EmitContext& ctx) { + NotImplemented(); +} + +void EmitCompositeExtractF64x2([[maybe_unused]] EmitContext& ctx) { + NotImplemented(); +} + +void EmitCompositeExtractF64x3([[maybe_unused]] EmitContext& ctx) { + NotImplemented(); +} + +void EmitCompositeExtractF64x4([[maybe_unused]] EmitContext& ctx) { + NotImplemented(); +} + +void EmitCompositeInsertF64x2(EmitContext& ctx, std::string_view composite, std::string_view object, + u32 index) { + ctx.Add("{}.{}={};", composite, SWIZZLE[index], object); +} + +void EmitCompositeInsertF64x3(EmitContext& ctx, std::string_view composite, std::string_view object, + u32 index) { + ctx.Add("{}.{}={};", composite, SWIZZLE[index], object); +} + +void EmitCompositeInsertF64x4(EmitContext& ctx, std::string_view composite, std::string_view object, + u32 index) { + ctx.Add("{}.{}={};", composite, SWIZZLE[index], object); +} +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_context_get_set.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_context_get_set.cpp new file mode 100755 index 000000000..580063fa9 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_context_get_set.cpp @@ -0,0 +1,456 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/profile.h" +#include "shader_recompiler/runtime_info.h" + +namespace Shader::Backend::GLSL { +namespace { +constexpr char SWIZZLE[]{"xyzw"}; + +u32 CbufIndex(u32 offset) { + return (offset / 4) % 4; +} + +char OffsetSwizzle(u32 offset) { + return SWIZZLE[CbufIndex(offset)]; +} + +bool IsInputArray(Stage stage) { + return stage == Stage::Geometry || stage == Stage::TessellationControl || + stage == Stage::TessellationEval; +} + +std::string InputVertexIndex(EmitContext& ctx, std::string_view vertex) { + return IsInputArray(ctx.stage) ? fmt::format("[{}]", vertex) : ""; +} + +std::string_view OutputVertexIndex(EmitContext& ctx) { + return ctx.stage == Stage::TessellationControl ? "[gl_InvocationID]" : ""; +} + +void GetCbuf(EmitContext& ctx, std::string_view ret, const IR::Value& binding, + const IR::Value& offset, u32 num_bits, std::string_view cast = {}, + std::string_view bit_offset = {}) { + const bool is_immediate{offset.IsImmediate()}; + const bool component_indexing_bug{!is_immediate && ctx.profile.has_gl_component_indexing_bug}; + if (is_immediate) { + const s32 signed_offset{static_cast(offset.U32())}; + static constexpr u32 cbuf_size{0x10000}; + if (signed_offset < 0 || offset.U32() > cbuf_size) { + LOG_WARNING(Shader_GLSL, "Immediate constant buffer offset is out of bounds"); + ctx.Add("{}=0u;", ret); + return; + } + } + const auto offset_var{ctx.var_alloc.Consume(offset)}; + const auto index{is_immediate ? fmt::format("{}", offset.U32() / 16) + : fmt::format("{}>>4", offset_var)}; + const auto swizzle{is_immediate ? fmt::format(".{}", OffsetSwizzle(offset.U32())) + : fmt::format("[({}>>2)%4]", offset_var)}; + + const auto cbuf{fmt::format("{}_cbuf{}", ctx.stage_name, binding.U32())}; + const auto cbuf_cast{fmt::format("{}({}[{}]{{}})", cast, cbuf, index)}; + const auto extraction{num_bits == 32 ? cbuf_cast + : fmt ::format("bitfieldExtract({},int({}),{})", cbuf_cast, + bit_offset, num_bits)}; + if (!component_indexing_bug) { + const auto result{fmt::format(fmt::runtime(extraction), swizzle)}; + ctx.Add("{}={};", ret, result); + return; + } + const auto cbuf_offset{fmt::format("{}>>2", offset_var)}; + for (u32 i = 0; i < 4; ++i) { + const auto swizzle_string{fmt::format(".{}", "xyzw"[i])}; + const auto result{fmt::format(fmt::runtime(extraction), swizzle_string)}; + ctx.Add("if(({}&3)=={}){}={};", cbuf_offset, i, ret, result); + } +} + +void GetCbuf8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, const IR::Value& offset, + std::string_view cast) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + if (offset.IsImmediate()) { + const auto bit_offset{fmt::format("{}", (offset.U32() % 4) * 8)}; + GetCbuf(ctx, ret, binding, offset, 8, cast, bit_offset); + } else { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + const auto bit_offset{fmt::format("({}%4)*8", offset_var)}; + GetCbuf(ctx, ret, binding, offset, 8, cast, bit_offset); + } +} + +void GetCbuf16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, const IR::Value& offset, + std::string_view cast) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + if (offset.IsImmediate()) { + const auto bit_offset{fmt::format("{}", ((offset.U32() / 2) % 2) * 16)}; + GetCbuf(ctx, ret, binding, offset, 16, cast, bit_offset); + } else { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + const auto bit_offset{fmt::format("(({}>>1)%2)*16", offset_var)}; + GetCbuf(ctx, ret, binding, offset, 16, cast, bit_offset); + } +} + +u32 TexCoordIndex(IR::Attribute attr) { + return (static_cast(attr) - static_cast(IR::Attribute::FixedFncTexture0S)) / 4; +} +} // Anonymous namespace + +void EmitGetCbufU8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + GetCbuf8(ctx, inst, binding, offset, "ftou"); +} + +void EmitGetCbufS8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + GetCbuf8(ctx, inst, binding, offset, "ftoi"); +} + +void EmitGetCbufU16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + GetCbuf16(ctx, inst, binding, offset, "ftou"); +} + +void EmitGetCbufS16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + GetCbuf16(ctx, inst, binding, offset, "ftoi"); +} + +void EmitGetCbufU32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + GetCbuf(ctx, ret, binding, offset, 32, "ftou"); +} + +void EmitGetCbufF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::F32)}; + GetCbuf(ctx, ret, binding, offset, 32); +} + +void EmitGetCbufU32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + const auto cbuf{fmt::format("{}_cbuf{}", ctx.stage_name, binding.U32())}; + if (offset.IsImmediate()) { + static constexpr u32 cbuf_size{0x10000}; + const u32 u32_offset{offset.U32()}; + const s32 signed_offset{static_cast(offset.U32())}; + if (signed_offset < 0 || u32_offset > cbuf_size) { + LOG_WARNING(Shader_GLSL, "Immediate constant buffer offset is out of bounds"); + ctx.AddU32x2("{}=uvec2(0u);", inst); + return; + } + if (u32_offset % 2 == 0) { + ctx.AddU32x2("{}=ftou({}[{}].{}{});", inst, cbuf, u32_offset / 16, + OffsetSwizzle(u32_offset), OffsetSwizzle(u32_offset + 4)); + } else { + ctx.AddU32x2("{}=uvec2(ftou({}[{}].{}),ftou({}[{}].{}));", inst, cbuf, u32_offset / 16, + OffsetSwizzle(u32_offset), cbuf, (u32_offset + 4) / 16, + OffsetSwizzle(u32_offset + 4)); + } + return; + } + const auto offset_var{ctx.var_alloc.Consume(offset)}; + if (!ctx.profile.has_gl_component_indexing_bug) { + ctx.AddU32x2("{}=uvec2(ftou({}[{}>>4][({}>>2)%4]),ftou({}[({}+4)>>4][(({}+4)>>2)%4]));", + inst, cbuf, offset_var, offset_var, cbuf, offset_var, offset_var); + return; + } + const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32x2)}; + const auto cbuf_offset{fmt::format("{}>>2", offset_var)}; + for (u32 swizzle = 0; swizzle < 4; ++swizzle) { + ctx.Add("if(({}&3)=={}){}=uvec2(ftou({}[{}>>4].{}),ftou({}[({}+4)>>4].{}));", cbuf_offset, + swizzle, ret, cbuf, offset_var, "xyzw"[swizzle], cbuf, offset_var, + "xyzw"[(swizzle + 1) % 4]); + } +} + +void EmitGetAttribute(EmitContext& ctx, IR::Inst& inst, IR::Attribute attr, + std::string_view vertex) { + const u32 element{static_cast(attr) % 4}; + const char swizzle{"xyzw"[element]}; + if (IR::IsGeneric(attr)) { + const u32 index{IR::GenericAttributeIndex(attr)}; + if (!ctx.runtime_info.previous_stage_stores.Generic(index, element)) { + if (element == 3) { + ctx.AddF32("{}=1.f;", inst, attr); + } else { + ctx.AddF32("{}=0.f;", inst, attr); + } + return; + } + ctx.AddF32("{}=in_attr{}{}.{};", inst, index, InputVertexIndex(ctx, vertex), swizzle); + return; + } + // GLSL only exposes 8 legacy texcoords + if (attr >= IR::Attribute::FixedFncTexture8S && attr <= IR::Attribute::FixedFncTexture9Q) { + LOG_WARNING(Shader_GLSL, "GLSL does not allow access to gl_TexCoord[{}]", + TexCoordIndex(attr)); + ctx.AddF32("{}=0.f;", inst); + return; + } + if (attr >= IR::Attribute::FixedFncTexture0S && attr <= IR::Attribute::FixedFncTexture7Q) { + const u32 index{TexCoordIndex(attr)}; + ctx.AddF32("{}=gl_TexCoord[{}].{};", inst, index, swizzle); + return; + } + switch (attr) { + case IR::Attribute::PrimitiveId: + ctx.AddF32("{}=itof(gl_PrimitiveID);", inst); + break; + case IR::Attribute::PositionX: + case IR::Attribute::PositionY: + case IR::Attribute::PositionZ: + case IR::Attribute::PositionW: { + const bool is_array{IsInputArray(ctx.stage)}; + const auto input_decorator{is_array ? fmt::format("gl_in[{}].", vertex) : ""}; + ctx.AddF32("{}={}{}.{};", inst, input_decorator, ctx.position_name, swizzle); + break; + } + case IR::Attribute::ColorFrontDiffuseR: + case IR::Attribute::ColorFrontDiffuseG: + case IR::Attribute::ColorFrontDiffuseB: + case IR::Attribute::ColorFrontDiffuseA: + if (ctx.stage == Stage::Fragment) { + ctx.AddF32("{}=gl_Color.{};", inst, swizzle); + } else { + ctx.AddF32("{}=gl_FrontColor.{};", inst, swizzle); + } + break; + case IR::Attribute::PointSpriteS: + case IR::Attribute::PointSpriteT: + ctx.AddF32("{}=gl_PointCoord.{};", inst, swizzle); + break; + case IR::Attribute::TessellationEvaluationPointU: + case IR::Attribute::TessellationEvaluationPointV: + ctx.AddF32("{}=gl_TessCoord.{};", inst, swizzle); + break; + case IR::Attribute::InstanceId: + ctx.AddF32("{}=itof(gl_InstanceID);", inst); + break; + case IR::Attribute::VertexId: + ctx.AddF32("{}=itof(gl_VertexID);", inst); + break; + case IR::Attribute::FrontFace: + ctx.AddF32("{}=itof(gl_FrontFacing?-1:0);", inst); + break; + default: + throw NotImplementedException("Get attribute {}", attr); + } +} + +void EmitSetAttribute(EmitContext& ctx, IR::Attribute attr, std::string_view value, + [[maybe_unused]] std::string_view vertex) { + if (IR::IsGeneric(attr)) { + const u32 index{IR::GenericAttributeIndex(attr)}; + const u32 attr_element{IR::GenericAttributeElement(attr)}; + const GenericElementInfo& info{ctx.output_generics.at(index).at(attr_element)}; + const auto output_decorator{OutputVertexIndex(ctx)}; + if (info.num_components == 1) { + ctx.Add("{}{}={};", info.name, output_decorator, value); + } else { + const u32 index_element{attr_element - info.first_element}; + ctx.Add("{}{}.{}={};", info.name, output_decorator, "xyzw"[index_element], value); + } + return; + } + const u32 element{static_cast(attr) % 4}; + const char swizzle{"xyzw"[element]}; + // GLSL only exposes 8 legacy texcoords + if (attr >= IR::Attribute::FixedFncTexture8S && attr <= IR::Attribute::FixedFncTexture9Q) { + LOG_WARNING(Shader_GLSL, "GLSL does not allow access to gl_TexCoord[{}]", + TexCoordIndex(attr)); + return; + } + if (attr >= IR::Attribute::FixedFncTexture0S && attr <= IR::Attribute::FixedFncTexture7Q) { + const u32 index{TexCoordIndex(attr)}; + ctx.Add("gl_TexCoord[{}].{}={};", index, swizzle, value); + return; + } + switch (attr) { + case IR::Attribute::Layer: + if (ctx.stage != Stage::Geometry && + !ctx.profile.support_viewport_index_layer_non_geometry) { + LOG_WARNING(Shader_GLSL, "Shader stores viewport layer but device does not support " + "viewport layer extension"); + break; + } + ctx.Add("gl_Layer=ftoi({});", value); + break; + case IR::Attribute::ViewportIndex: + if (ctx.stage != Stage::Geometry && + !ctx.profile.support_viewport_index_layer_non_geometry) { + LOG_WARNING(Shader_GLSL, "Shader stores viewport index but device does not support " + "viewport layer extension"); + break; + } + ctx.Add("gl_ViewportIndex=ftoi({});", value); + break; + case IR::Attribute::ViewportMask: + if (ctx.stage != Stage::Geometry && !ctx.profile.support_viewport_mask) { + LOG_WARNING( + Shader_GLSL, + "Shader stores viewport mask but device does not support viewport mask extension"); + break; + } + ctx.Add("gl_ViewportMask[0]=ftoi({});", value); + break; + case IR::Attribute::PointSize: + ctx.Add("gl_PointSize={};", value); + break; + case IR::Attribute::PositionX: + case IR::Attribute::PositionY: + case IR::Attribute::PositionZ: + case IR::Attribute::PositionW: + ctx.Add("gl_Position.{}={};", swizzle, value); + break; + case IR::Attribute::ColorFrontDiffuseR: + case IR::Attribute::ColorFrontDiffuseG: + case IR::Attribute::ColorFrontDiffuseB: + case IR::Attribute::ColorFrontDiffuseA: + ctx.Add("gl_FrontColor.{}={};", swizzle, value); + break; + case IR::Attribute::ColorFrontSpecularR: + case IR::Attribute::ColorFrontSpecularG: + case IR::Attribute::ColorFrontSpecularB: + case IR::Attribute::ColorFrontSpecularA: + ctx.Add("gl_FrontSecondaryColor.{}={};", swizzle, value); + break; + case IR::Attribute::ColorBackDiffuseR: + case IR::Attribute::ColorBackDiffuseG: + case IR::Attribute::ColorBackDiffuseB: + case IR::Attribute::ColorBackDiffuseA: + ctx.Add("gl_BackColor.{}={};", swizzle, value); + break; + case IR::Attribute::ColorBackSpecularR: + case IR::Attribute::ColorBackSpecularG: + case IR::Attribute::ColorBackSpecularB: + case IR::Attribute::ColorBackSpecularA: + ctx.Add("gl_BackSecondaryColor.{}={};", swizzle, value); + break; + case IR::Attribute::FogCoordinate: + ctx.Add("gl_FogFragCoord={};", value); + break; + case IR::Attribute::ClipDistance0: + case IR::Attribute::ClipDistance1: + case IR::Attribute::ClipDistance2: + case IR::Attribute::ClipDistance3: + case IR::Attribute::ClipDistance4: + case IR::Attribute::ClipDistance5: + case IR::Attribute::ClipDistance6: + case IR::Attribute::ClipDistance7: { + const u32 index{static_cast(attr) - static_cast(IR::Attribute::ClipDistance0)}; + ctx.Add("gl_ClipDistance[{}]={};", index, value); + break; + } + default: + throw NotImplementedException("Set attribute {}", attr); + } +} + +void EmitGetAttributeIndexed(EmitContext& ctx, IR::Inst& inst, std::string_view offset, + std::string_view vertex) { + const bool is_array{ctx.stage == Stage::Geometry}; + const auto vertex_arg{is_array ? fmt::format(",{}", vertex) : ""}; + ctx.AddF32("{}=IndexedAttrLoad(int({}){});", inst, offset, vertex_arg); +} + +void EmitSetAttributeIndexed([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view offset, + [[maybe_unused]] std::string_view value, + [[maybe_unused]] std::string_view vertex) { + NotImplemented(); +} + +void EmitGetPatch(EmitContext& ctx, IR::Inst& inst, IR::Patch patch) { + if (!IR::IsGeneric(patch)) { + throw NotImplementedException("Non-generic patch load"); + } + const u32 index{IR::GenericPatchIndex(patch)}; + const u32 element{IR::GenericPatchElement(patch)}; + const char swizzle{"xyzw"[element]}; + ctx.AddF32("{}=patch{}.{};", inst, index, swizzle); +} + +void EmitSetPatch(EmitContext& ctx, IR::Patch patch, std::string_view value) { + if (IR::IsGeneric(patch)) { + const u32 index{IR::GenericPatchIndex(patch)}; + const u32 element{IR::GenericPatchElement(patch)}; + ctx.Add("patch{}.{}={};", index, "xyzw"[element], value); + return; + } + switch (patch) { + case IR::Patch::TessellationLodLeft: + case IR::Patch::TessellationLodRight: + case IR::Patch::TessellationLodTop: + case IR::Patch::TessellationLodBottom: { + const u32 index{static_cast(patch) - u32(IR::Patch::TessellationLodLeft)}; + ctx.Add("gl_TessLevelOuter[{}]={};", index, value); + break; + } + case IR::Patch::TessellationLodInteriorU: + ctx.Add("gl_TessLevelInner[0]={};", value); + break; + case IR::Patch::TessellationLodInteriorV: + ctx.Add("gl_TessLevelInner[1]={};", value); + break; + default: + throw NotImplementedException("Patch {}", patch); + } +} + +void EmitSetFragColor(EmitContext& ctx, u32 index, u32 component, std::string_view value) { + const char swizzle{"xyzw"[component]}; + ctx.Add("frag_color{}.{}={};", index, swizzle, value); +} + +void EmitSetSampleMask(EmitContext& ctx, std::string_view value) { + ctx.Add("gl_SampleMask[0]=int({});", value); +} + +void EmitSetFragDepth(EmitContext& ctx, std::string_view value) { + ctx.Add("gl_FragDepth={};", value); +} + +void EmitLocalInvocationId(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32x3("{}=gl_LocalInvocationID;", inst); +} + +void EmitWorkgroupId(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32x3("{}=gl_WorkGroupID;", inst); +} + +void EmitInvocationId(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=uint(gl_InvocationID);", inst); +} + +void EmitSampleId(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=uint(gl_SampleID);", inst); +} + +void EmitIsHelperInvocation(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU1("{}=gl_HelperInvocation;", inst); +} + +void EmitYDirection(EmitContext& ctx, IR::Inst& inst) { + ctx.uses_y_direction = true; + ctx.AddF32("{}=gl_FrontMaterial.ambient.a;", inst); +} + +void EmitLoadLocal(EmitContext& ctx, IR::Inst& inst, std::string_view word_offset) { + ctx.AddU32("{}=lmem[{}];", inst, word_offset); +} + +void EmitWriteLocal(EmitContext& ctx, std::string_view word_offset, std::string_view value) { + ctx.Add("lmem[{}]={};", word_offset, value); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_control_flow.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_control_flow.cpp new file mode 100755 index 000000000..53f8896be --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_control_flow.cpp @@ -0,0 +1,21 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/exception.h" + +namespace Shader::Backend::GLSL { + +void EmitJoin(EmitContext&) { + throw NotImplementedException("Join shouldn't be emitted"); +} + +void EmitDemoteToHelperInvocation(EmitContext& ctx) { + ctx.Add("discard;"); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_convert.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_convert.cpp new file mode 100755 index 000000000..eeae6562c --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_convert.cpp @@ -0,0 +1,230 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { +void EmitConvertS16F16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertS16F32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=(int({})&0xffff)|(bitfieldExtract(int({}),31,1)<<15);", inst, value, value); +} + +void EmitConvertS16F64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertS32F16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertS32F32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=int({});", inst, value); +} + +void EmitConvertS32F64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=int({});", inst, value); +} + +void EmitConvertS64F16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertS64F32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU64("{}=int64_t({});", inst, value); +} + +void EmitConvertS64F64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU64("{}=int64_t({});", inst, value); +} + +void EmitConvertU16F16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertU16F32([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertU16F64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertU32F16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertU32F32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=uint({});", inst, value); +} + +void EmitConvertU32F64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=uint({});", inst, value); +} + +void EmitConvertU64F16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertU64F32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU64("{}=uint64_t({});", inst, value); +} + +void EmitConvertU64F64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU64("{}=uint64_t({});", inst, value); +} + +void EmitConvertU64U32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU64("{}=uint64_t({});", inst, value); +} + +void EmitConvertU32U64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=uint({});", inst, value); +} + +void EmitConvertF16F32([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF32F16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF32F64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=float({});", inst, value); +} + +void EmitConvertF64F32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=double({});", inst, value); +} + +void EmitConvertF16S8([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF16S16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF16S32([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF16S64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF16U8([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF16U16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF16U32([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF16U64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF32S8([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF32S16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF32S32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=float(int({}));", inst, value); +} + +void EmitConvertF32S64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=float(int64_t({}));", inst, value); +} + +void EmitConvertF32U8([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF32U16(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=float({}&0xffff);", inst, value); +} + +void EmitConvertF32U32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=float({});", inst, value); +} + +void EmitConvertF32U64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=float({});", inst, value); +} + +void EmitConvertF64S8([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF64S16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF64S32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=double(int({}));", inst, value); +} + +void EmitConvertF64S64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=double(int64_t({}));", inst, value); +} + +void EmitConvertF64U8([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF64U16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitConvertF64U32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=double({});", inst, value); +} + +void EmitConvertF64U64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=double({});", inst, value); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_floating_point.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_floating_point.cpp new file mode 100755 index 000000000..b11be5bd7 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_floating_point.cpp @@ -0,0 +1,458 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { +namespace { +void Compare(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs, + std::string_view op, bool ordered) { + ctx.AddU1("{}={}{}{}", inst, lhs, op, rhs, lhs, rhs); + if (ordered) { + ctx.Add("&&!isnan({})&&!isnan({});", lhs, rhs); + } else { + ctx.Add("||isnan({})||isnan({});", lhs, rhs); + } +} + +bool IsPrecise(const IR::Inst& inst) { + return {inst.Flags().no_contraction}; +} +} // Anonymous namespace + +void EmitFPAbs16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitFPAbs32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=abs({});", inst, value); +} + +void EmitFPAbs64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=abs({});", inst, value); +} + +void EmitFPAdd16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view a, [[maybe_unused]] std::string_view b) { + NotImplemented(); +} + +void EmitFPAdd32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + if (IsPrecise(inst)) { + ctx.AddPrecF32("{}={}+{};", inst, a, b); + } else { + ctx.AddF32("{}={}+{};", inst, a, b); + } +} + +void EmitFPAdd64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + if (IsPrecise(inst)) { + ctx.AddPrecF64("{}={}+{};", inst, a, b); + } else { + ctx.AddF64("{}={}+{};", inst, a, b); + } +} + +void EmitFPFma16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view a, [[maybe_unused]] std::string_view b, + [[maybe_unused]] std::string_view c) { + NotImplemented(); +} + +void EmitFPFma32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b, + std::string_view c) { + if (IsPrecise(inst)) { + ctx.AddPrecF32("{}=fma({},{},{});", inst, a, b, c); + } else { + ctx.AddF32("{}=fma({},{},{});", inst, a, b, c); + } +} + +void EmitFPFma64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b, + std::string_view c) { + if (IsPrecise(inst)) { + ctx.AddPrecF64("{}=fma({},{},{});", inst, a, b, c); + } else { + ctx.AddF64("{}=fma({},{},{});", inst, a, b, c); + } +} + +void EmitFPMax32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddF32("{}=max({},{});", inst, a, b); +} + +void EmitFPMax64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddF64("{}=max({},{});", inst, a, b); +} + +void EmitFPMin32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddF32("{}=min({},{});", inst, a, b); +} + +void EmitFPMin64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddF64("{}=min({},{});", inst, a, b); +} + +void EmitFPMul16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view a, [[maybe_unused]] std::string_view b) { + NotImplemented(); +} + +void EmitFPMul32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + if (IsPrecise(inst)) { + ctx.AddPrecF32("{}={}*{};", inst, a, b); + } else { + ctx.AddF32("{}={}*{};", inst, a, b); + } +} + +void EmitFPMul64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + if (IsPrecise(inst)) { + ctx.AddPrecF64("{}={}*{};", inst, a, b); + } else { + ctx.AddF64("{}={}*{};", inst, a, b); + } +} + +void EmitFPNeg16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitFPNeg32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=-({});", inst, value); +} + +void EmitFPNeg64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=-({});", inst, value); +} + +void EmitFPSin(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=sin({});", inst, value); +} + +void EmitFPCos(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=cos({});", inst, value); +} + +void EmitFPExp2(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=exp2({});", inst, value); +} + +void EmitFPLog2(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=log2({});", inst, value); +} + +void EmitFPRecip32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=(1.0f)/{};", inst, value); +} + +void EmitFPRecip64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=1.0/{};", inst, value); +} + +void EmitFPRecipSqrt32([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + ctx.AddF32("{}=inversesqrt({});", inst, value); +} + +void EmitFPRecipSqrt64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitFPSqrt(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=sqrt({});", inst, value); +} + +void EmitFPSaturate16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitFPSaturate32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=min(max({},0.0),1.0);", inst, value); +} + +void EmitFPSaturate64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=min(max({},0.0),1.0);", inst, value); +} + +void EmitFPClamp16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value, + [[maybe_unused]] std::string_view min_value, + [[maybe_unused]] std::string_view max_value) { + NotImplemented(); +} + +void EmitFPClamp32(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view min_value, std::string_view max_value) { + // GLSL's clamp does not produce desirable results + ctx.AddF32("{}=min(max({},float({})),float({}));", inst, value, min_value, max_value); +} + +void EmitFPClamp64(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view min_value, std::string_view max_value) { + // GLSL's clamp does not produce desirable results + ctx.AddF64("{}=min(max({},double({})),double({}));", inst, value, min_value, max_value); +} + +void EmitFPRoundEven16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitFPRoundEven32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=roundEven({});", inst, value); +} + +void EmitFPRoundEven64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=roundEven({});", inst, value); +} + +void EmitFPFloor16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitFPFloor32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=floor({});", inst, value); +} + +void EmitFPFloor64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=floor({});", inst, value); +} + +void EmitFPCeil16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitFPCeil32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=ceil({});", inst, value); +} + +void EmitFPCeil64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=ceil({});", inst, value); +} + +void EmitFPTrunc16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitFPTrunc32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF32("{}=trunc({});", inst, value); +} + +void EmitFPTrunc64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddF64("{}=trunc({});", inst, value); +} + +void EmitFPOrdEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPOrdEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "==", true); +} + +void EmitFPOrdEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "==", true); +} + +void EmitFPUnordEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPUnordEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "==", false); +} + +void EmitFPUnordEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "==", false); +} + +void EmitFPOrdNotEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPOrdNotEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "!=", true); +} + +void EmitFPOrdNotEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "!=", true); +} + +void EmitFPUnordNotEqual16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPUnordNotEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "!=", false); +} + +void EmitFPUnordNotEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "!=", false); +} + +void EmitFPOrdLessThan16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPOrdLessThan32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "<", true); +} + +void EmitFPOrdLessThan64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "<", true); +} + +void EmitFPUnordLessThan16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPUnordLessThan32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "<", false); +} + +void EmitFPUnordLessThan64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "<", false); +} + +void EmitFPOrdGreaterThan16([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPOrdGreaterThan32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, ">", true); +} + +void EmitFPOrdGreaterThan64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, ">", true); +} + +void EmitFPUnordGreaterThan16([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPUnordGreaterThan32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, ">", false); +} + +void EmitFPUnordGreaterThan64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, ">", false); +} + +void EmitFPOrdLessThanEqual16([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPOrdLessThanEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "<=", true); +} + +void EmitFPOrdLessThanEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "<=", true); +} + +void EmitFPUnordLessThanEqual16([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPUnordLessThanEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "<=", false); +} + +void EmitFPUnordLessThanEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, "<=", false); +} + +void EmitFPOrdGreaterThanEqual16([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPOrdGreaterThanEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, ">=", true); +} + +void EmitFPOrdGreaterThanEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, ">=", true); +} + +void EmitFPUnordGreaterThanEqual16([[maybe_unused]] EmitContext& ctx, + [[maybe_unused]] std::string_view lhs, + [[maybe_unused]] std::string_view rhs) { + NotImplemented(); +} + +void EmitFPUnordGreaterThanEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, ">=", false); +} + +void EmitFPUnordGreaterThanEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + Compare(ctx, inst, lhs, rhs, ">=", false); +} + +void EmitFPIsNan16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, + [[maybe_unused]] std::string_view value) { + NotImplemented(); +} + +void EmitFPIsNan32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU1("{}=isnan({});", inst, value); +} + +void EmitFPIsNan64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU1("{}=isnan({});", inst, value); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp new file mode 100755 index 000000000..447eb8e0a --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp @@ -0,0 +1,799 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/profile.h" + +namespace Shader::Backend::GLSL { +namespace { +std::string Texture(EmitContext& ctx, const IR::TextureInstInfo& info, const IR::Value& index) { + const auto def{info.type == TextureType::Buffer ? ctx.texture_buffers.at(info.descriptor_index) + : ctx.textures.at(info.descriptor_index)}; + const auto index_offset{def.count > 1 ? fmt::format("[{}]", ctx.var_alloc.Consume(index)) : ""}; + return fmt::format("tex{}{}", def.binding, index_offset); +} + +std::string Image(EmitContext& ctx, const IR::TextureInstInfo& info, const IR::Value& index) { + const auto def{info.type == TextureType::Buffer ? ctx.image_buffers.at(info.descriptor_index) + : ctx.images.at(info.descriptor_index)}; + const auto index_offset{def.count > 1 ? fmt::format("[{}]", ctx.var_alloc.Consume(index)) : ""}; + return fmt::format("img{}{}", def.binding, index_offset); +} + +std::string CastToIntVec(std::string_view value, const IR::TextureInstInfo& info) { + switch (info.type) { + case TextureType::Color1D: + case TextureType::Buffer: + return fmt::format("int({})", value); + case TextureType::ColorArray1D: + case TextureType::Color2D: + case TextureType::ColorArray2D: + return fmt::format("ivec2({})", value); + case TextureType::Color3D: + case TextureType::ColorCube: + return fmt::format("ivec3({})", value); + case TextureType::ColorArrayCube: + return fmt::format("ivec4({})", value); + default: + throw NotImplementedException("Integer cast for TextureType {}", info.type.Value()); + } +} + +std::string CoordsCastToInt(std::string_view value, const IR::TextureInstInfo& info) { + switch (info.type) { + case TextureType::Color1D: + case TextureType::Buffer: + return fmt::format("int({})", value); + case TextureType::ColorArray1D: + case TextureType::Color2D: + return fmt::format("ivec2({})", value); + case TextureType::ColorArray2D: + case TextureType::Color3D: + case TextureType::ColorCube: + return fmt::format("ivec3({})", value); + case TextureType::ColorArrayCube: + return fmt::format("ivec4({})", value); + default: + throw NotImplementedException("TexelFetchCast type {}", info.type.Value()); + } +} + +bool NeedsShadowLodExt(TextureType type) { + switch (type) { + case TextureType::ColorArray2D: + case TextureType::ColorCube: + case TextureType::ColorArrayCube: + return true; + default: + return false; + } +} + +std::string GetOffsetVec(EmitContext& ctx, const IR::Value& offset) { + if (offset.IsImmediate()) { + return fmt::format("int({})", offset.U32()); + } + IR::Inst* const inst{offset.InstRecursive()}; + if (inst->AreAllArgsImmediates()) { + switch (inst->GetOpcode()) { + case IR::Opcode::CompositeConstructU32x2: + return fmt::format("ivec2({},{})", inst->Arg(0).U32(), inst->Arg(1).U32()); + case IR::Opcode::CompositeConstructU32x3: + return fmt::format("ivec3({},{},{})", inst->Arg(0).U32(), inst->Arg(1).U32(), + inst->Arg(2).U32()); + case IR::Opcode::CompositeConstructU32x4: + return fmt::format("ivec4({},{},{},{})", inst->Arg(0).U32(), inst->Arg(1).U32(), + inst->Arg(2).U32(), inst->Arg(3).U32()); + default: + break; + } + } + const bool has_var_aoffi{ctx.profile.support_gl_variable_aoffi}; + if (!has_var_aoffi) { + LOG_WARNING(Shader_GLSL, "Device does not support variable texture offsets, STUBBING"); + } + const auto offset_str{has_var_aoffi ? ctx.var_alloc.Consume(offset) : "0"}; + switch (offset.Type()) { + case IR::Type::U32: + return fmt::format("int({})", offset_str); + case IR::Type::U32x2: + return fmt::format("ivec2({})", offset_str); + case IR::Type::U32x3: + return fmt::format("ivec3({})", offset_str); + case IR::Type::U32x4: + return fmt::format("ivec4({})", offset_str); + default: + throw NotImplementedException("Offset type {}", offset.Type()); + } +} + +std::string PtpOffsets(const IR::Value& offset, const IR::Value& offset2) { + const std::array values{offset.InstRecursive(), offset2.InstRecursive()}; + if (!values[0]->AreAllArgsImmediates() || !values[1]->AreAllArgsImmediates()) { + LOG_WARNING(Shader_GLSL, "Not all arguments in PTP are immediate, STUBBING"); + return "ivec2[](ivec2(0), ivec2(1), ivec2(2), ivec2(3))"; + } + const IR::Opcode opcode{values[0]->GetOpcode()}; + if (opcode != values[1]->GetOpcode() || opcode != IR::Opcode::CompositeConstructU32x4) { + throw LogicError("Invalid PTP arguments"); + } + auto read{[&](unsigned int a, unsigned int b) { return values[a]->Arg(b).U32(); }}; + + return fmt::format("ivec2[](ivec2({},{}),ivec2({},{}),ivec2({},{}),ivec2({},{}))", read(0, 0), + read(0, 1), read(0, 2), read(0, 3), read(1, 0), read(1, 1), read(1, 2), + read(1, 3)); +} + +IR::Inst* PrepareSparse(IR::Inst& inst) { + const auto sparse_inst{inst.GetAssociatedPseudoOperation(IR::Opcode::GetSparseFromOp)}; + if (sparse_inst) { + sparse_inst->Invalidate(); + } + return sparse_inst; +} +} // Anonymous namespace + +void EmitImageSampleImplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view bias_lc, + const IR::Value& offset) { + const auto info{inst.Flags()}; + if (info.has_lod_clamp) { + throw NotImplementedException("EmitImageSampleImplicitLod Lod clamp samples"); + } + const auto texture{Texture(ctx, info, index)}; + const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""}; + const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; + const auto sparse_inst{PrepareSparse(inst)}; + const bool supports_sparse{ctx.profile.support_gl_sparse_textures}; + if (sparse_inst && !supports_sparse) { + LOG_WARNING(Shader_GLSL, "Device does not support sparse texture queries. STUBBING"); + ctx.AddU1("{}=true;", *sparse_inst); + } + if (!sparse_inst || !supports_sparse) { + if (!offset.IsEmpty()) { + const auto offset_str{GetOffsetVec(ctx, offset)}; + if (ctx.stage == Stage::Fragment) { + ctx.Add("{}=textureOffset({},{},{}{});", texel, texture, coords, offset_str, bias); + } else { + ctx.Add("{}=textureLodOffset({},{},0.0,{});", texel, texture, coords, offset_str); + } + } else { + if (ctx.stage == Stage::Fragment) { + ctx.Add("{}=texture({},{}{});", texel, texture, coords, bias); + } else { + ctx.Add("{}=textureLod({},{},0.0);", texel, texture, coords); + } + } + return; + } + if (!offset.IsEmpty()) { + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureOffsetARB({},{},{},{}{}));", + *sparse_inst, texture, coords, GetOffsetVec(ctx, offset), texel, bias); + } else { + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureARB({},{},{}{}));", *sparse_inst, + texture, coords, texel, bias); + } +} + +void EmitImageSampleExplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view lod_lc, + const IR::Value& offset) { + const auto info{inst.Flags()}; + if (info.has_bias) { + throw NotImplementedException("EmitImageSampleExplicitLod Bias texture samples"); + } + if (info.has_lod_clamp) { + throw NotImplementedException("EmitImageSampleExplicitLod Lod clamp samples"); + } + const auto texture{Texture(ctx, info, index)}; + const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; + const auto sparse_inst{PrepareSparse(inst)}; + const bool supports_sparse{ctx.profile.support_gl_sparse_textures}; + if (sparse_inst && !supports_sparse) { + LOG_WARNING(Shader_GLSL, "Device does not support sparse texture queries. STUBBING"); + ctx.AddU1("{}=true;", *sparse_inst); + } + if (!sparse_inst || !supports_sparse) { + if (!offset.IsEmpty()) { + ctx.Add("{}=textureLodOffset({},{},{},{});", texel, texture, coords, lod_lc, + GetOffsetVec(ctx, offset)); + } else { + ctx.Add("{}=textureLod({},{},{});", texel, texture, coords, lod_lc); + } + return; + } + if (!offset.IsEmpty()) { + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchOffsetARB({},{},int({}),{},{}));", + *sparse_inst, texture, CastToIntVec(coords, info), lod_lc, + GetOffsetVec(ctx, offset), texel); + } else { + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureLodARB({},{},{},{}));", *sparse_inst, + texture, coords, lod_lc, texel); + } +} + +void EmitImageSampleDrefImplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view dref, + std::string_view bias_lc, const IR::Value& offset) { + const auto info{inst.Flags()}; + const auto sparse_inst{PrepareSparse(inst)}; + if (sparse_inst) { + throw NotImplementedException("EmitImageSampleDrefImplicitLod Sparse texture samples"); + } + if (info.has_bias) { + throw NotImplementedException("EmitImageSampleDrefImplicitLod Bias texture samples"); + } + if (info.has_lod_clamp) { + throw NotImplementedException("EmitImageSampleDrefImplicitLod Lod clamp samples"); + } + const auto texture{Texture(ctx, info, index)}; + const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""}; + const bool needs_shadow_ext{NeedsShadowLodExt(info.type)}; + const auto cast{needs_shadow_ext ? "vec4" : "vec3"}; + const bool use_grad{!ctx.profile.support_gl_texture_shadow_lod && + ctx.stage != Stage::Fragment && needs_shadow_ext}; + if (use_grad) { + LOG_WARNING(Shader_GLSL, + "Device lacks GL_EXT_texture_shadow_lod. Using textureGrad fallback"); + if (info.type == TextureType::ColorArrayCube) { + LOG_WARNING(Shader_GLSL, "textureGrad does not support ColorArrayCube. Stubbing"); + ctx.AddF32("{}=0.0f;", inst); + return; + } + const auto d_cast{info.type == TextureType::ColorArray2D ? "vec2" : "vec3"}; + ctx.AddF32("{}=textureGrad({},{}({},{}),{}(0),{}(0));", inst, texture, cast, coords, dref, + d_cast, d_cast); + return; + } + if (!offset.IsEmpty()) { + const auto offset_str{GetOffsetVec(ctx, offset)}; + if (ctx.stage == Stage::Fragment) { + ctx.AddF32("{}=textureOffset({},{}({},{}),{}{});", inst, texture, cast, coords, dref, + offset_str, bias); + } else { + ctx.AddF32("{}=textureLodOffset({},{}({},{}),0.0,{});", inst, texture, cast, coords, + dref, offset_str); + } + } else { + if (ctx.stage == Stage::Fragment) { + if (info.type == TextureType::ColorArrayCube) { + ctx.AddF32("{}=texture({},vec4({}),{});", inst, texture, coords, dref); + } else { + ctx.AddF32("{}=texture({},{}({},{}){});", inst, texture, cast, coords, dref, bias); + } + } else { + ctx.AddF32("{}=textureLod({},{}({},{}),0.0);", inst, texture, cast, coords, dref); + } + } +} + +void EmitImageSampleDrefExplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view dref, + std::string_view lod_lc, const IR::Value& offset) { + const auto info{inst.Flags()}; + const auto sparse_inst{PrepareSparse(inst)}; + if (sparse_inst) { + throw NotImplementedException("EmitImageSampleDrefExplicitLod Sparse texture samples"); + } + if (info.has_bias) { + throw NotImplementedException("EmitImageSampleDrefExplicitLod Bias texture samples"); + } + if (info.has_lod_clamp) { + throw NotImplementedException("EmitImageSampleDrefExplicitLod Lod clamp samples"); + } + const auto texture{Texture(ctx, info, index)}; + const bool needs_shadow_ext{NeedsShadowLodExt(info.type)}; + const bool use_grad{!ctx.profile.support_gl_texture_shadow_lod && needs_shadow_ext}; + const auto cast{needs_shadow_ext ? "vec4" : "vec3"}; + if (use_grad) { + LOG_WARNING(Shader_GLSL, + "Device lacks GL_EXT_texture_shadow_lod. Using textureGrad fallback"); + if (info.type == TextureType::ColorArrayCube) { + LOG_WARNING(Shader_GLSL, "textureGrad does not support ColorArrayCube. Stubbing"); + ctx.AddF32("{}=0.0f;", inst); + return; + } + const auto d_cast{info.type == TextureType::ColorArray2D ? "vec2" : "vec3"}; + ctx.AddF32("{}=textureGrad({},{}({},{}),{}(0),{}(0));", inst, texture, cast, coords, dref, + d_cast, d_cast); + return; + } + if (!offset.IsEmpty()) { + const auto offset_str{GetOffsetVec(ctx, offset)}; + if (info.type == TextureType::ColorArrayCube) { + ctx.AddF32("{}=textureLodOffset({},{},{},{},{});", inst, texture, coords, dref, lod_lc, + offset_str); + } else { + ctx.AddF32("{}=textureLodOffset({},{}({},{}),{},{});", inst, texture, cast, coords, + dref, lod_lc, offset_str); + } + } else { + if (info.type == TextureType::ColorArrayCube) { + ctx.AddF32("{}=textureLod({},{},{},{});", inst, texture, coords, dref, lod_lc); + } else { + ctx.AddF32("{}=textureLod({},{}({},{}),{});", inst, texture, cast, coords, dref, + lod_lc); + } + } +} + +void EmitImageGather(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, const IR::Value& offset, const IR::Value& offset2) { + const auto info{inst.Flags()}; + const auto texture{Texture(ctx, info, index)}; + const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; + const auto sparse_inst{PrepareSparse(inst)}; + const bool supports_sparse{ctx.profile.support_gl_sparse_textures}; + if (sparse_inst && !supports_sparse) { + LOG_WARNING(Shader_GLSL, "Device does not support sparse texture queries. STUBBING"); + ctx.AddU1("{}=true;", *sparse_inst); + } + if (!sparse_inst || !supports_sparse) { + if (offset.IsEmpty()) { + ctx.Add("{}=textureGather({},{},int({}));", texel, texture, coords, + info.gather_component); + return; + } + if (offset2.IsEmpty()) { + ctx.Add("{}=textureGatherOffset({},{},{},int({}));", texel, texture, coords, + GetOffsetVec(ctx, offset), info.gather_component); + return; + } + // PTP + const auto offsets{PtpOffsets(offset, offset2)}; + ctx.Add("{}=textureGatherOffsets({},{},{},int({}));", texel, texture, coords, offsets, + info.gather_component); + return; + } + if (offset.IsEmpty()) { + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherARB({},{},{},int({})));", + *sparse_inst, texture, coords, texel, info.gather_component); + return; + } + if (offset2.IsEmpty()) { + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));", + *sparse_inst, texture, CastToIntVec(coords, info), GetOffsetVec(ctx, offset), + texel, info.gather_component); + return; + } + // PTP + const auto offsets{PtpOffsets(offset, offset2)}; + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));", + *sparse_inst, texture, CastToIntVec(coords, info), offsets, texel, + info.gather_component); +} + +void EmitImageGatherDref(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, const IR::Value& offset, const IR::Value& offset2, + std::string_view dref) { + const auto info{inst.Flags()}; + const auto texture{Texture(ctx, info, index)}; + const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; + const auto sparse_inst{PrepareSparse(inst)}; + const bool supports_sparse{ctx.profile.support_gl_sparse_textures}; + if (sparse_inst && !supports_sparse) { + LOG_WARNING(Shader_GLSL, "Device does not support sparse texture queries. STUBBING"); + ctx.AddU1("{}=true;", *sparse_inst); + } + if (!sparse_inst || !supports_sparse) { + if (offset.IsEmpty()) { + ctx.Add("{}=textureGather({},{},{});", texel, texture, coords, dref); + return; + } + if (offset2.IsEmpty()) { + ctx.Add("{}=textureGatherOffset({},{},{},{});", texel, texture, coords, dref, + GetOffsetVec(ctx, offset)); + return; + } + // PTP + const auto offsets{PtpOffsets(offset, offset2)}; + ctx.Add("{}=textureGatherOffsets({},{},{},{});", texel, texture, coords, dref, offsets); + return; + } + if (offset.IsEmpty()) { + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherARB({},{},{},{}));", *sparse_inst, + texture, coords, dref, texel); + return; + } + if (offset2.IsEmpty()) { + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));", + *sparse_inst, texture, CastToIntVec(coords, info), dref, + GetOffsetVec(ctx, offset), texel); + return; + } + // PTP + const auto offsets{PtpOffsets(offset, offset2)}; + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));", + *sparse_inst, texture, CastToIntVec(coords, info), dref, offsets, texel); +} + +void EmitImageFetch(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view offset, std::string_view lod, + [[maybe_unused]] std::string_view ms) { + const auto info{inst.Flags()}; + if (info.has_bias) { + throw NotImplementedException("EmitImageFetch Bias texture samples"); + } + if (info.has_lod_clamp) { + throw NotImplementedException("EmitImageFetch Lod clamp samples"); + } + const auto texture{Texture(ctx, info, index)}; + const auto sparse_inst{PrepareSparse(inst)}; + const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; + const bool supports_sparse{ctx.profile.support_gl_sparse_textures}; + if (sparse_inst && !supports_sparse) { + LOG_WARNING(Shader_GLSL, "Device does not support sparse texture queries. STUBBING"); + ctx.AddU1("{}=true;", *sparse_inst); + } + if (!sparse_inst || !supports_sparse) { + if (!offset.empty()) { + ctx.Add("{}=texelFetchOffset({},{},int({}),{});", texel, texture, + CoordsCastToInt(coords, info), lod, CoordsCastToInt(offset, info)); + } else { + if (info.type == TextureType::Buffer) { + ctx.Add("{}=texelFetch({},int({}));", texel, texture, coords); + } else { + ctx.Add("{}=texelFetch({},{},int({}));", texel, texture, + CoordsCastToInt(coords, info), lod); + } + } + return; + } + if (!offset.empty()) { + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchOffsetARB({},{},int({}),{},{}));", + *sparse_inst, texture, CastToIntVec(coords, info), lod, + CastToIntVec(offset, info), texel); + } else { + ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchARB({},{},int({}),{}));", + *sparse_inst, texture, CastToIntVec(coords, info), lod, texel); + } +} + +void EmitImageQueryDimensions(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view lod) { + const auto info{inst.Flags()}; + const auto texture{Texture(ctx, info, index)}; + switch (info.type) { + case TextureType::Color1D: + return ctx.AddU32x4( + "{}=uvec4(uint(textureSize({},int({}))),0u,0u,uint(textureQueryLevels({})));", inst, + texture, lod, texture); + case TextureType::ColorArray1D: + case TextureType::Color2D: + case TextureType::ColorCube: + return ctx.AddU32x4( + "{}=uvec4(uvec2(textureSize({},int({}))),0u,uint(textureQueryLevels({})));", inst, + texture, lod, texture); + case TextureType::ColorArray2D: + case TextureType::Color3D: + case TextureType::ColorArrayCube: + return ctx.AddU32x4( + "{}=uvec4(uvec3(textureSize({},int({}))),uint(textureQueryLevels({})));", inst, texture, + lod, texture); + case TextureType::Buffer: + throw NotImplementedException("EmitImageQueryDimensions Texture buffers"); + } + throw LogicError("Unspecified image type {}", info.type.Value()); +} + +void EmitImageQueryLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords) { + const auto info{inst.Flags()}; + const auto texture{Texture(ctx, info, index)}; + return ctx.AddF32x4("{}=vec4(textureQueryLod({},{}),0.0,0.0);", inst, texture, coords); +} + +void EmitImageGradient(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, const IR::Value& derivatives, + const IR::Value& offset, [[maybe_unused]] const IR::Value& lod_clamp) { + const auto info{inst.Flags()}; + if (info.has_lod_clamp) { + throw NotImplementedException("EmitImageGradient Lod clamp samples"); + } + const auto sparse_inst{PrepareSparse(inst)}; + if (sparse_inst) { + throw NotImplementedException("EmitImageGradient Sparse"); + } + if (!offset.IsEmpty()) { + throw NotImplementedException("EmitImageGradient offset"); + } + const auto texture{Texture(ctx, info, index)}; + const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; + const bool multi_component{info.num_derivates > 1 || info.has_lod_clamp}; + const auto derivatives_vec{ctx.var_alloc.Consume(derivatives)}; + if (multi_component) { + ctx.Add("{}=textureGrad({},{},vec2({}.xz),vec2({}.yz));", texel, texture, coords, + derivatives_vec, derivatives_vec); + } else { + ctx.Add("{}=textureGrad({},{},float({}.x),float({}.y));", texel, texture, coords, + derivatives_vec, derivatives_vec); + } +} + +void EmitImageRead(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords) { + const auto info{inst.Flags()}; + const auto sparse_inst{PrepareSparse(inst)}; + if (sparse_inst) { + throw NotImplementedException("EmitImageRead Sparse"); + } + const auto image{Image(ctx, info, index)}; + ctx.AddU32x4("{}=uvec4(imageLoad({},{}));", inst, image, CoordsCastToInt(coords, info)); +} + +void EmitImageWrite(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view color) { + const auto info{inst.Flags()}; + const auto image{Image(ctx, info, index)}; + ctx.Add("imageStore({},{},{});", image, CoordsCastToInt(coords, info), color); +} + +void EmitImageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value) { + const auto info{inst.Flags()}; + const auto image{Image(ctx, info, index)}; + ctx.AddU32("{}=imageAtomicAdd({},{},{});", inst, image, CoordsCastToInt(coords, info), value); +} + +void EmitImageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value) { + const auto info{inst.Flags()}; + const auto image{Image(ctx, info, index)}; + ctx.AddU32("{}=imageAtomicMin({},{},int({}));", inst, image, CoordsCastToInt(coords, info), + value); +} + +void EmitImageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value) { + const auto info{inst.Flags()}; + const auto image{Image(ctx, info, index)}; + ctx.AddU32("{}=imageAtomicMin({},{},uint({}));", inst, image, CoordsCastToInt(coords, info), + value); +} + +void EmitImageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value) { + const auto info{inst.Flags()}; + const auto image{Image(ctx, info, index)}; + ctx.AddU32("{}=imageAtomicMax({},{},int({}));", inst, image, CoordsCastToInt(coords, info), + value); +} + +void EmitImageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value) { + const auto info{inst.Flags()}; + const auto image{Image(ctx, info, index)}; + ctx.AddU32("{}=imageAtomicMax({},{},uint({}));", inst, image, CoordsCastToInt(coords, info), + value); +} + +void EmitImageAtomicInc32(EmitContext&, IR::Inst&, const IR::Value&, std::string_view, + std::string_view) { + NotImplemented(); +} + +void EmitImageAtomicDec32(EmitContext&, IR::Inst&, const IR::Value&, std::string_view, + std::string_view) { + NotImplemented(); +} + +void EmitImageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value) { + const auto info{inst.Flags()}; + const auto image{Image(ctx, info, index)}; + ctx.AddU32("{}=imageAtomicAnd({},{},{});", inst, image, CoordsCastToInt(coords, info), value); +} + +void EmitImageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value) { + const auto info{inst.Flags()}; + const auto image{Image(ctx, info, index)}; + ctx.AddU32("{}=imageAtomicOr({},{},{});", inst, image, CoordsCastToInt(coords, info), value); +} + +void EmitImageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value) { + const auto info{inst.Flags()}; + const auto image{Image(ctx, info, index)}; + ctx.AddU32("{}=imageAtomicXor({},{},{});", inst, image, CoordsCastToInt(coords, info), value); +} + +void EmitImageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value) { + const auto info{inst.Flags()}; + const auto image{Image(ctx, info, index)}; + ctx.AddU32("{}=imageAtomicExchange({},{},{});", inst, image, CoordsCastToInt(coords, info), + value); +} + +void EmitBindlessImageSampleImplicitLod(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageSampleExplicitLod(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageSampleDrefImplicitLod(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageSampleDrefExplicitLod(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageGather(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageGatherDref(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageFetch(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageQueryDimensions(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageQueryLod(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageGradient(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageRead(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageWrite(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageSampleImplicitLod(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageSampleExplicitLod(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageSampleDrefImplicitLod(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageSampleDrefExplicitLod(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageGather(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageGatherDref(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageFetch(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageQueryDimensions(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageQueryLod(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageGradient(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageRead(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageWrite(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicIAdd32(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicSMin32(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicUMin32(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicSMax32(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicUMax32(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicInc32(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicDec32(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicAnd32(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicOr32(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicXor32(EmitContext&) { + NotImplemented(); +} + +void EmitBindlessImageAtomicExchange32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicIAdd32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicSMin32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicUMin32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicSMax32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicUMax32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicInc32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicDec32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicAnd32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicOr32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicXor32(EmitContext&) { + NotImplemented(); +} + +void EmitBoundImageAtomicExchange32(EmitContext&) { + NotImplemented(); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h b/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h new file mode 100755 index 000000000..5936d086f --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h @@ -0,0 +1,702 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include "common/common_types.h" + +namespace Shader::IR { +enum class Attribute : u64; +enum class Patch : u64; +class Inst; +class Value; +} // namespace Shader::IR + +namespace Shader::Backend::GLSL { +class EmitContext; + +#define NotImplemented() throw NotImplementedException("GLSL instruction {}", __func__) + +// Microinstruction emitters +void EmitPhi(EmitContext& ctx, IR::Inst& inst); +void EmitVoid(EmitContext& ctx); +void EmitIdentity(EmitContext& ctx, IR::Inst& inst, const IR::Value& value); +void EmitConditionRef(EmitContext& ctx, IR::Inst& inst, const IR::Value& value); +void EmitReference(EmitContext& ctx, const IR::Value& value); +void EmitPhiMove(EmitContext& ctx, const IR::Value& phi, const IR::Value& value); +void EmitJoin(EmitContext& ctx); +void EmitDemoteToHelperInvocation(EmitContext& ctx); +void EmitBarrier(EmitContext& ctx); +void EmitWorkgroupMemoryBarrier(EmitContext& ctx); +void EmitDeviceMemoryBarrier(EmitContext& ctx); +void EmitPrologue(EmitContext& ctx); +void EmitEpilogue(EmitContext& ctx); +void EmitEmitVertex(EmitContext& ctx, const IR::Value& stream); +void EmitEndPrimitive(EmitContext& ctx, const IR::Value& stream); +void EmitGetRegister(EmitContext& ctx); +void EmitSetRegister(EmitContext& ctx); +void EmitGetPred(EmitContext& ctx); +void EmitSetPred(EmitContext& ctx); +void EmitSetGotoVariable(EmitContext& ctx); +void EmitGetGotoVariable(EmitContext& ctx); +void EmitSetIndirectBranchVariable(EmitContext& ctx); +void EmitGetIndirectBranchVariable(EmitContext& ctx); +void EmitGetCbufU8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitGetCbufS8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitGetCbufU16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitGetCbufS16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitGetCbufU32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitGetCbufF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitGetCbufU32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitGetAttribute(EmitContext& ctx, IR::Inst& inst, IR::Attribute attr, + std::string_view vertex); +void EmitSetAttribute(EmitContext& ctx, IR::Attribute attr, std::string_view value, + std::string_view vertex); +void EmitGetAttributeIndexed(EmitContext& ctx, IR::Inst& inst, std::string_view offset, + std::string_view vertex); +void EmitSetAttributeIndexed(EmitContext& ctx, std::string_view offset, std::string_view value, + std::string_view vertex); +void EmitGetPatch(EmitContext& ctx, IR::Inst& inst, IR::Patch patch); +void EmitSetPatch(EmitContext& ctx, IR::Patch patch, std::string_view value); +void EmitSetFragColor(EmitContext& ctx, u32 index, u32 component, std::string_view value); +void EmitSetSampleMask(EmitContext& ctx, std::string_view value); +void EmitSetFragDepth(EmitContext& ctx, std::string_view value); +void EmitGetZFlag(EmitContext& ctx); +void EmitGetSFlag(EmitContext& ctx); +void EmitGetCFlag(EmitContext& ctx); +void EmitGetOFlag(EmitContext& ctx); +void EmitSetZFlag(EmitContext& ctx); +void EmitSetSFlag(EmitContext& ctx); +void EmitSetCFlag(EmitContext& ctx); +void EmitSetOFlag(EmitContext& ctx); +void EmitWorkgroupId(EmitContext& ctx, IR::Inst& inst); +void EmitLocalInvocationId(EmitContext& ctx, IR::Inst& inst); +void EmitInvocationId(EmitContext& ctx, IR::Inst& inst); +void EmitSampleId(EmitContext& ctx, IR::Inst& inst); +void EmitIsHelperInvocation(EmitContext& ctx, IR::Inst& inst); +void EmitYDirection(EmitContext& ctx, IR::Inst& inst); +void EmitLoadLocal(EmitContext& ctx, IR::Inst& inst, std::string_view word_offset); +void EmitWriteLocal(EmitContext& ctx, std::string_view word_offset, std::string_view value); +void EmitUndefU1(EmitContext& ctx, IR::Inst& inst); +void EmitUndefU8(EmitContext& ctx, IR::Inst& inst); +void EmitUndefU16(EmitContext& ctx, IR::Inst& inst); +void EmitUndefU32(EmitContext& ctx, IR::Inst& inst); +void EmitUndefU64(EmitContext& ctx, IR::Inst& inst); +void EmitLoadGlobalU8(EmitContext& ctx); +void EmitLoadGlobalS8(EmitContext& ctx); +void EmitLoadGlobalU16(EmitContext& ctx); +void EmitLoadGlobalS16(EmitContext& ctx); +void EmitLoadGlobal32(EmitContext& ctx, IR::Inst& inst, std::string_view address); +void EmitLoadGlobal64(EmitContext& ctx, IR::Inst& inst, std::string_view address); +void EmitLoadGlobal128(EmitContext& ctx, IR::Inst& inst, std::string_view address); +void EmitWriteGlobalU8(EmitContext& ctx); +void EmitWriteGlobalS8(EmitContext& ctx); +void EmitWriteGlobalU16(EmitContext& ctx); +void EmitWriteGlobalS16(EmitContext& ctx); +void EmitWriteGlobal32(EmitContext& ctx, std::string_view address, std::string_view value); +void EmitWriteGlobal64(EmitContext& ctx, std::string_view address, std::string_view value); +void EmitWriteGlobal128(EmitContext& ctx, std::string_view address, std::string_view value); +void EmitLoadStorageU8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitLoadStorageS8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitLoadStorageU16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitLoadStorageS16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitLoadStorage32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitLoadStorage64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitLoadStorage128(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset); +void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value); +void EmitWriteStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value); +void EmitWriteStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value); +void EmitWriteStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value); +void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value); +void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value); +void EmitWriteStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value); +void EmitLoadSharedU8(EmitContext& ctx, IR::Inst& inst, std::string_view offset); +void EmitLoadSharedS8(EmitContext& ctx, IR::Inst& inst, std::string_view offset); +void EmitLoadSharedU16(EmitContext& ctx, IR::Inst& inst, std::string_view offset); +void EmitLoadSharedS16(EmitContext& ctx, IR::Inst& inst, std::string_view offset); +void EmitLoadSharedU32(EmitContext& ctx, IR::Inst& inst, std::string_view offset); +void EmitLoadSharedU64(EmitContext& ctx, IR::Inst& inst, std::string_view offset); +void EmitLoadSharedU128(EmitContext& ctx, IR::Inst& inst, std::string_view offset); +void EmitWriteSharedU8(EmitContext& ctx, std::string_view offset, std::string_view value); +void EmitWriteSharedU16(EmitContext& ctx, std::string_view offset, std::string_view value); +void EmitWriteSharedU32(EmitContext& ctx, std::string_view offset, std::string_view value); +void EmitWriteSharedU64(EmitContext& ctx, std::string_view offset, std::string_view value); +void EmitWriteSharedU128(EmitContext& ctx, std::string_view offset, std::string_view value); +void EmitCompositeConstructU32x2(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2); +void EmitCompositeConstructU32x3(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2, std::string_view e3); +void EmitCompositeConstructU32x4(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2, std::string_view e3, std::string_view e4); +void EmitCompositeExtractU32x2(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index); +void EmitCompositeExtractU32x3(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index); +void EmitCompositeExtractU32x4(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index); +void EmitCompositeInsertU32x2(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index); +void EmitCompositeInsertU32x3(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index); +void EmitCompositeInsertU32x4(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index); +void EmitCompositeConstructF16x2(EmitContext& ctx, std::string_view e1, std::string_view e2); +void EmitCompositeConstructF16x3(EmitContext& ctx, std::string_view e1, std::string_view e2, + std::string_view e3); +void EmitCompositeConstructF16x4(EmitContext& ctx, std::string_view e1, std::string_view e2, + std::string_view e3, std::string_view e4); +void EmitCompositeExtractF16x2(EmitContext& ctx, std::string_view composite, u32 index); +void EmitCompositeExtractF16x3(EmitContext& ctx, std::string_view composite, u32 index); +void EmitCompositeExtractF16x4(EmitContext& ctx, std::string_view composite, u32 index); +void EmitCompositeInsertF16x2(EmitContext& ctx, std::string_view composite, std::string_view object, + u32 index); +void EmitCompositeInsertF16x3(EmitContext& ctx, std::string_view composite, std::string_view object, + u32 index); +void EmitCompositeInsertF16x4(EmitContext& ctx, std::string_view composite, std::string_view object, + u32 index); +void EmitCompositeConstructF32x2(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2); +void EmitCompositeConstructF32x3(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2, std::string_view e3); +void EmitCompositeConstructF32x4(EmitContext& ctx, IR::Inst& inst, std::string_view e1, + std::string_view e2, std::string_view e3, std::string_view e4); +void EmitCompositeExtractF32x2(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index); +void EmitCompositeExtractF32x3(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index); +void EmitCompositeExtractF32x4(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + u32 index); +void EmitCompositeInsertF32x2(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index); +void EmitCompositeInsertF32x3(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index); +void EmitCompositeInsertF32x4(EmitContext& ctx, IR::Inst& inst, std::string_view composite, + std::string_view object, u32 index); +void EmitCompositeConstructF64x2(EmitContext& ctx); +void EmitCompositeConstructF64x3(EmitContext& ctx); +void EmitCompositeConstructF64x4(EmitContext& ctx); +void EmitCompositeExtractF64x2(EmitContext& ctx); +void EmitCompositeExtractF64x3(EmitContext& ctx); +void EmitCompositeExtractF64x4(EmitContext& ctx); +void EmitCompositeInsertF64x2(EmitContext& ctx, std::string_view composite, std::string_view object, + u32 index); +void EmitCompositeInsertF64x3(EmitContext& ctx, std::string_view composite, std::string_view object, + u32 index); +void EmitCompositeInsertF64x4(EmitContext& ctx, std::string_view composite, std::string_view object, + u32 index); +void EmitSelectU1(EmitContext& ctx, IR::Inst& inst, std::string_view cond, + std::string_view true_value, std::string_view false_value); +void EmitSelectU8(EmitContext& ctx, std::string_view cond, std::string_view true_value, + std::string_view false_value); +void EmitSelectU16(EmitContext& ctx, std::string_view cond, std::string_view true_value, + std::string_view false_value); +void EmitSelectU32(EmitContext& ctx, IR::Inst& inst, std::string_view cond, + std::string_view true_value, std::string_view false_value); +void EmitSelectU64(EmitContext& ctx, IR::Inst& inst, std::string_view cond, + std::string_view true_value, std::string_view false_value); +void EmitSelectF16(EmitContext& ctx, std::string_view cond, std::string_view true_value, + std::string_view false_value); +void EmitSelectF32(EmitContext& ctx, IR::Inst& inst, std::string_view cond, + std::string_view true_value, std::string_view false_value); +void EmitSelectF64(EmitContext& ctx, IR::Inst& inst, std::string_view cond, + std::string_view true_value, std::string_view false_value); +void EmitBitCastU16F16(EmitContext& ctx, IR::Inst& inst); +void EmitBitCastU32F32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitBitCastU64F64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitBitCastF16U16(EmitContext& ctx, IR::Inst& inst); +void EmitBitCastF32U32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitBitCastF64U64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitPackUint2x32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitUnpackUint2x32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitPackFloat2x16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitUnpackFloat2x16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitPackHalf2x16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitUnpackHalf2x16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitPackDouble2x32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitUnpackDouble2x32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitGetZeroFromOp(EmitContext& ctx); +void EmitGetSignFromOp(EmitContext& ctx); +void EmitGetCarryFromOp(EmitContext& ctx); +void EmitGetOverflowFromOp(EmitContext& ctx); +void EmitGetSparseFromOp(EmitContext& ctx); +void EmitGetInBoundsFromOp(EmitContext& ctx); +void EmitFPAbs16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPAbs32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPAbs64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPAdd16(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitFPAdd32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitFPAdd64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitFPFma16(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b, + std::string_view c); +void EmitFPFma32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b, + std::string_view c); +void EmitFPFma64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b, + std::string_view c); +void EmitFPMax32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitFPMax64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitFPMin32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitFPMin64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitFPMul16(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitFPMul32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitFPMul64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitFPNeg16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPNeg32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPNeg64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPSin(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPCos(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPExp2(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPLog2(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPRecip32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPRecip64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPRecipSqrt32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPRecipSqrt64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPSqrt(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPSaturate16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPSaturate32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPSaturate64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPClamp16(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view min_value, std::string_view max_value); +void EmitFPClamp32(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view min_value, std::string_view max_value); +void EmitFPClamp64(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view min_value, std::string_view max_value); +void EmitFPRoundEven16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPRoundEven32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPRoundEven64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPFloor16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPFloor32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPFloor64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPCeil16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPCeil32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPCeil64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPTrunc16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPTrunc32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPTrunc64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPOrdEqual16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPOrdEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs); +void EmitFPOrdEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs); +void EmitFPUnordEqual16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPUnordEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPOrdNotEqual16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPOrdNotEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPOrdNotEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordNotEqual16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPUnordNotEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordNotEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPOrdLessThan16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPOrdLessThan32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPOrdLessThan64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordLessThan16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPUnordLessThan32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordLessThan64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPOrdGreaterThan16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPOrdGreaterThan32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPOrdGreaterThan64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordGreaterThan16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPUnordGreaterThan32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordGreaterThan64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPOrdLessThanEqual16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPOrdLessThanEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPOrdLessThanEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordLessThanEqual16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPUnordLessThanEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordLessThanEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPOrdGreaterThanEqual16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPOrdGreaterThanEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPOrdGreaterThanEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordGreaterThanEqual16(EmitContext& ctx, std::string_view lhs, std::string_view rhs); +void EmitFPUnordGreaterThanEqual32(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPUnordGreaterThanEqual64(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitFPIsNan16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPIsNan32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFPIsNan64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitIAdd32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitIAdd64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitISub32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitISub64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitIMul32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitINeg32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitINeg64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitIAbs32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitShiftLeftLogical32(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift); +void EmitShiftLeftLogical64(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift); +void EmitShiftRightLogical32(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift); +void EmitShiftRightLogical64(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift); +void EmitShiftRightArithmetic32(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift); +void EmitShiftRightArithmetic64(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift); +void EmitBitwiseAnd32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitBitwiseOr32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitBitwiseXor32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitBitFieldInsert(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view insert, std::string_view offset, std::string_view count); +void EmitBitFieldSExtract(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view offset, std::string_view count); +void EmitBitFieldUExtract(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view offset, std::string_view count); +void EmitBitReverse32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitBitCount32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitBitwiseNot32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFindSMsb32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitFindUMsb32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitSMin32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitUMin32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitSMax32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitUMax32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitSClamp32(EmitContext& ctx, IR::Inst& inst, std::string_view value, std::string_view min, + std::string_view max); +void EmitUClamp32(EmitContext& ctx, IR::Inst& inst, std::string_view value, std::string_view min, + std::string_view max); +void EmitSLessThan(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs); +void EmitULessThan(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs); +void EmitIEqual(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs); +void EmitSLessThanEqual(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitULessThanEqual(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitSGreaterThan(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs); +void EmitUGreaterThan(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs); +void EmitINotEqual(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs); +void EmitSGreaterThanEqual(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitUGreaterThanEqual(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs); +void EmitSharedAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicSMin32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicUMin32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicSMax32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicUMax32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicInc32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicDec32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicAnd32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicOr32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicXor32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, + std::string_view value); +void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicAddF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicMinF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicMinF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicMaxF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitStorageAtomicMaxF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset, std::string_view value); +void EmitGlobalAtomicIAdd32(EmitContext& ctx); +void EmitGlobalAtomicSMin32(EmitContext& ctx); +void EmitGlobalAtomicUMin32(EmitContext& ctx); +void EmitGlobalAtomicSMax32(EmitContext& ctx); +void EmitGlobalAtomicUMax32(EmitContext& ctx); +void EmitGlobalAtomicInc32(EmitContext& ctx); +void EmitGlobalAtomicDec32(EmitContext& ctx); +void EmitGlobalAtomicAnd32(EmitContext& ctx); +void EmitGlobalAtomicOr32(EmitContext& ctx); +void EmitGlobalAtomicXor32(EmitContext& ctx); +void EmitGlobalAtomicExchange32(EmitContext& ctx); +void EmitGlobalAtomicIAdd64(EmitContext& ctx); +void EmitGlobalAtomicSMin64(EmitContext& ctx); +void EmitGlobalAtomicUMin64(EmitContext& ctx); +void EmitGlobalAtomicSMax64(EmitContext& ctx); +void EmitGlobalAtomicUMax64(EmitContext& ctx); +void EmitGlobalAtomicInc64(EmitContext& ctx); +void EmitGlobalAtomicDec64(EmitContext& ctx); +void EmitGlobalAtomicAnd64(EmitContext& ctx); +void EmitGlobalAtomicOr64(EmitContext& ctx); +void EmitGlobalAtomicXor64(EmitContext& ctx); +void EmitGlobalAtomicExchange64(EmitContext& ctx); +void EmitGlobalAtomicAddF32(EmitContext& ctx); +void EmitGlobalAtomicAddF16x2(EmitContext& ctx); +void EmitGlobalAtomicAddF32x2(EmitContext& ctx); +void EmitGlobalAtomicMinF16x2(EmitContext& ctx); +void EmitGlobalAtomicMinF32x2(EmitContext& ctx); +void EmitGlobalAtomicMaxF16x2(EmitContext& ctx); +void EmitGlobalAtomicMaxF32x2(EmitContext& ctx); +void EmitLogicalOr(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitLogicalAnd(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitLogicalXor(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b); +void EmitLogicalNot(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertS16F16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertS16F32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertS16F64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertS32F16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertS32F32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertS32F64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertS64F16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertS64F32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertS64F64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU16F16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU16F32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU16F64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU32F16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU32F32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU32F64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU64F16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU64F32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU64F64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU64U32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertU32U64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF16F32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF32F16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF32F64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF64F32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF16S8(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF16S16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF16S32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF16S64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF16U8(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF16U16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF16U32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF16U64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF32S8(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF32S16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF32S32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF32S64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF32U8(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF32U16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF32U32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF32U64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF64S8(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF64S16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF64S32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF64S64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF64U8(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF64U16(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF64U32(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitConvertF64U64(EmitContext& ctx, IR::Inst& inst, std::string_view value); +void EmitBindlessImageSampleImplicitLod(EmitContext&); +void EmitBindlessImageSampleExplicitLod(EmitContext&); +void EmitBindlessImageSampleDrefImplicitLod(EmitContext&); +void EmitBindlessImageSampleDrefExplicitLod(EmitContext&); +void EmitBindlessImageGather(EmitContext&); +void EmitBindlessImageGatherDref(EmitContext&); +void EmitBindlessImageFetch(EmitContext&); +void EmitBindlessImageQueryDimensions(EmitContext&); +void EmitBindlessImageQueryLod(EmitContext&); +void EmitBindlessImageGradient(EmitContext&); +void EmitBindlessImageRead(EmitContext&); +void EmitBindlessImageWrite(EmitContext&); +void EmitBoundImageSampleImplicitLod(EmitContext&); +void EmitBoundImageSampleExplicitLod(EmitContext&); +void EmitBoundImageSampleDrefImplicitLod(EmitContext&); +void EmitBoundImageSampleDrefExplicitLod(EmitContext&); +void EmitBoundImageGather(EmitContext&); +void EmitBoundImageGatherDref(EmitContext&); +void EmitBoundImageFetch(EmitContext&); +void EmitBoundImageQueryDimensions(EmitContext&); +void EmitBoundImageQueryLod(EmitContext&); +void EmitBoundImageGradient(EmitContext&); +void EmitBoundImageRead(EmitContext&); +void EmitBoundImageWrite(EmitContext&); +void EmitImageSampleImplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view bias_lc, + const IR::Value& offset); +void EmitImageSampleExplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view lod_lc, + const IR::Value& offset); +void EmitImageSampleDrefImplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view dref, + std::string_view bias_lc, const IR::Value& offset); +void EmitImageSampleDrefExplicitLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view dref, + std::string_view lod_lc, const IR::Value& offset); +void EmitImageGather(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, const IR::Value& offset, const IR::Value& offset2); +void EmitImageGatherDref(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, const IR::Value& offset, const IR::Value& offset2, + std::string_view dref); +void EmitImageFetch(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view offset, std::string_view lod, + std::string_view ms); +void EmitImageQueryDimensions(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view lod); +void EmitImageQueryLod(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords); +void EmitImageGradient(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, const IR::Value& derivatives, + const IR::Value& offset, const IR::Value& lod_clamp); +void EmitImageRead(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords); +void EmitImageWrite(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view color); +void EmitBindlessImageAtomicIAdd32(EmitContext&); +void EmitBindlessImageAtomicSMin32(EmitContext&); +void EmitBindlessImageAtomicUMin32(EmitContext&); +void EmitBindlessImageAtomicSMax32(EmitContext&); +void EmitBindlessImageAtomicUMax32(EmitContext&); +void EmitBindlessImageAtomicInc32(EmitContext&); +void EmitBindlessImageAtomicDec32(EmitContext&); +void EmitBindlessImageAtomicAnd32(EmitContext&); +void EmitBindlessImageAtomicOr32(EmitContext&); +void EmitBindlessImageAtomicXor32(EmitContext&); +void EmitBindlessImageAtomicExchange32(EmitContext&); +void EmitBoundImageAtomicIAdd32(EmitContext&); +void EmitBoundImageAtomicSMin32(EmitContext&); +void EmitBoundImageAtomicUMin32(EmitContext&); +void EmitBoundImageAtomicSMax32(EmitContext&); +void EmitBoundImageAtomicUMax32(EmitContext&); +void EmitBoundImageAtomicInc32(EmitContext&); +void EmitBoundImageAtomicDec32(EmitContext&); +void EmitBoundImageAtomicAnd32(EmitContext&); +void EmitBoundImageAtomicOr32(EmitContext&); +void EmitBoundImageAtomicXor32(EmitContext&); +void EmitBoundImageAtomicExchange32(EmitContext&); +void EmitImageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitImageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitImageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitImageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitImageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitImageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitImageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitImageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitImageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitImageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitImageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& index, + std::string_view coords, std::string_view value); +void EmitLaneId(EmitContext& ctx, IR::Inst& inst); +void EmitVoteAll(EmitContext& ctx, IR::Inst& inst, std::string_view pred); +void EmitVoteAny(EmitContext& ctx, IR::Inst& inst, std::string_view pred); +void EmitVoteEqual(EmitContext& ctx, IR::Inst& inst, std::string_view pred); +void EmitSubgroupBallot(EmitContext& ctx, IR::Inst& inst, std::string_view pred); +void EmitSubgroupEqMask(EmitContext& ctx, IR::Inst& inst); +void EmitSubgroupLtMask(EmitContext& ctx, IR::Inst& inst); +void EmitSubgroupLeMask(EmitContext& ctx, IR::Inst& inst); +void EmitSubgroupGtMask(EmitContext& ctx, IR::Inst& inst); +void EmitSubgroupGeMask(EmitContext& ctx, IR::Inst& inst); +void EmitShuffleIndex(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view index, std::string_view clamp, + std::string_view segmentation_mask); +void EmitShuffleUp(EmitContext& ctx, IR::Inst& inst, std::string_view value, std::string_view index, + std::string_view clamp, std::string_view segmentation_mask); +void EmitShuffleDown(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view index, std::string_view clamp, + std::string_view segmentation_mask); +void EmitShuffleButterfly(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view index, std::string_view clamp, + std::string_view segmentation_mask); +void EmitFSwizzleAdd(EmitContext& ctx, IR::Inst& inst, std::string_view op_a, std::string_view op_b, + std::string_view swizzle); +void EmitDPdxFine(EmitContext& ctx, IR::Inst& inst, std::string_view op_a); +void EmitDPdyFine(EmitContext& ctx, IR::Inst& inst, std::string_view op_a); +void EmitDPdxCoarse(EmitContext& ctx, IR::Inst& inst, std::string_view op_a); +void EmitDPdyCoarse(EmitContext& ctx, IR::Inst& inst, std::string_view op_a); + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_integer.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_integer.cpp new file mode 100755 index 000000000..38419f88f --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_integer.cpp @@ -0,0 +1,253 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { +namespace { +void SetZeroFlag(EmitContext& ctx, IR::Inst& inst, std::string_view result) { + IR::Inst* const zero{inst.GetAssociatedPseudoOperation(IR::Opcode::GetZeroFromOp)}; + if (!zero) { + return; + } + ctx.AddU1("{}={}==0;", *zero, result); + zero->Invalidate(); +} + +void SetSignFlag(EmitContext& ctx, IR::Inst& inst, std::string_view result) { + IR::Inst* const sign{inst.GetAssociatedPseudoOperation(IR::Opcode::GetSignFromOp)}; + if (!sign) { + return; + } + ctx.AddU1("{}=int({})<0;", *sign, result); + sign->Invalidate(); +} + +void BitwiseLogicalOp(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b, + char lop) { + const auto result{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + ctx.Add("{}={}{}{};", result, a, lop, b); + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); +} +} // Anonymous namespace + +void EmitIAdd32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + // Compute the overflow CC first as it requires the original operand values, + // which may be overwritten by the result of the addition + if (IR::Inst * overflow{inst.GetAssociatedPseudoOperation(IR::Opcode::GetOverflowFromOp)}) { + // https://stackoverflow.com/questions/55468823/how-to-detect-integer-overflow-in-c + constexpr u32 s32_max{static_cast(std::numeric_limits::max())}; + const auto sub_a{fmt::format("{}u-{}", s32_max, a)}; + const auto positive_result{fmt::format("int({})>int({})", b, sub_a)}; + const auto negative_result{fmt::format("int({})=0?{}:{};", *overflow, a, positive_result, negative_result); + overflow->Invalidate(); + } + const auto result{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + if (IR::Inst* const carry{inst.GetAssociatedPseudoOperation(IR::Opcode::GetCarryFromOp)}) { + ctx.uses_cc_carry = true; + ctx.Add("{}=uaddCarry({},{},carry);", result, a, b); + ctx.AddU1("{}=carry!=0;", *carry); + carry->Invalidate(); + } else { + ctx.Add("{}={}+{};", result, a, b); + } + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); +} + +void EmitIAdd64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU64("{}={}+{};", inst, a, b); +} + +void EmitISub32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU32("{}={}-{};", inst, a, b); +} + +void EmitISub64(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU64("{}={}-{};", inst, a, b); +} + +void EmitIMul32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU32("{}=uint({}*{});", inst, a, b); +} + +void EmitINeg32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=uint(-({}));", inst, value); +} + +void EmitINeg64(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU64("{}=-({});", inst, value); +} + +void EmitIAbs32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=abs(int({}));", inst, value); +} + +void EmitShiftLeftLogical32(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift) { + ctx.AddU32("{}={}<<{};", inst, base, shift); +} + +void EmitShiftLeftLogical64(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift) { + ctx.AddU64("{}={}<<{};", inst, base, shift); +} + +void EmitShiftRightLogical32(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift) { + ctx.AddU32("{}={}>>{};", inst, base, shift); +} + +void EmitShiftRightLogical64(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift) { + ctx.AddU64("{}={}>>{};", inst, base, shift); +} + +void EmitShiftRightArithmetic32(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift) { + ctx.AddU32("{}=int({})>>{};", inst, base, shift); +} + +void EmitShiftRightArithmetic64(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view shift) { + ctx.AddU64("{}=int64_t({})>>{};", inst, base, shift); +} + +void EmitBitwiseAnd32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + BitwiseLogicalOp(ctx, inst, a, b, '&'); +} + +void EmitBitwiseOr32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + BitwiseLogicalOp(ctx, inst, a, b, '|'); +} + +void EmitBitwiseXor32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + BitwiseLogicalOp(ctx, inst, a, b, '^'); +} + +void EmitBitFieldInsert(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view insert, std::string_view offset, std::string_view count) { + ctx.AddU32("{}=bitfieldInsert({},{},int({}),int({}));", inst, base, insert, offset, count); +} + +void EmitBitFieldSExtract(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view offset, std::string_view count) { + const auto result{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + ctx.Add("{}=uint(bitfieldExtract(int({}),int({}),int({})));", result, base, offset, count); + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); +} + +void EmitBitFieldUExtract(EmitContext& ctx, IR::Inst& inst, std::string_view base, + std::string_view offset, std::string_view count) { + const auto result{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + ctx.Add("{}=uint(bitfieldExtract(uint({}),int({}),int({})));", result, base, offset, count); + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); +} + +void EmitBitReverse32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=bitfieldReverse({});", inst, value); +} + +void EmitBitCount32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=bitCount({});", inst, value); +} + +void EmitBitwiseNot32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=~{};", inst, value); +} + +void EmitFindSMsb32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=findMSB(int({}));", inst, value); +} + +void EmitFindUMsb32(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU32("{}=findMSB(uint({}));", inst, value); +} + +void EmitSMin32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU32("{}=min(int({}),int({}));", inst, a, b); +} + +void EmitUMin32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU32("{}=min(uint({}),uint({}));", inst, a, b); +} + +void EmitSMax32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU32("{}=max(int({}),int({}));", inst, a, b); +} + +void EmitUMax32(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU32("{}=max(uint({}),uint({}));", inst, a, b); +} + +void EmitSClamp32(EmitContext& ctx, IR::Inst& inst, std::string_view value, std::string_view min, + std::string_view max) { + const auto result{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + ctx.Add("{}=clamp(int({}),int({}),int({}));", result, value, min, max); + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); +} + +void EmitUClamp32(EmitContext& ctx, IR::Inst& inst, std::string_view value, std::string_view min, + std::string_view max) { + const auto result{ctx.var_alloc.Define(inst, GlslVarType::U32)}; + ctx.Add("{}=clamp(uint({}),uint({}),uint({}));", result, value, min, max); + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); +} + +void EmitSLessThan(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs) { + ctx.AddU1("{}=int({})int({});", inst, lhs, rhs); +} + +void EmitUGreaterThan(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + ctx.AddU1("{}=uint({})>uint({});", inst, lhs, rhs); +} + +void EmitINotEqual(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, std::string_view rhs) { + ctx.AddU1("{}={}!={};", inst, lhs, rhs); +} + +void EmitSGreaterThanEqual(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + ctx.AddU1("{}=int({})>=int({});", inst, lhs, rhs); +} + +void EmitUGreaterThanEqual(EmitContext& ctx, IR::Inst& inst, std::string_view lhs, + std::string_view rhs) { + ctx.AddU1("{}=uint({})>=uint({});", inst, lhs, rhs); +} +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_logical.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_logical.cpp new file mode 100755 index 000000000..338ff4bd6 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_logical.cpp @@ -0,0 +1,28 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { + +void EmitLogicalOr(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU1("{}={}||{};", inst, a, b); +} + +void EmitLogicalAnd(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU1("{}={}&&{};", inst, a, b); +} + +void EmitLogicalXor(EmitContext& ctx, IR::Inst& inst, std::string_view a, std::string_view b) { + ctx.AddU1("{}={}^^{};", inst, a, b); +} + +void EmitLogicalNot(EmitContext& ctx, IR::Inst& inst, std::string_view value) { + ctx.AddU1("{}=!{};", inst, value); +} +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp new file mode 100755 index 000000000..e3957491f --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp @@ -0,0 +1,202 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/profile.h" + +namespace Shader::Backend::GLSL { +namespace { +constexpr char cas_loop[]{"for(;;){{uint old_value={};uint " + "cas_result=atomicCompSwap({},old_value,bitfieldInsert({},{},{},{}));" + "if(cas_result==old_value){{break;}}}}"}; + +void SsboWriteCas(EmitContext& ctx, const IR::Value& binding, std::string_view offset_var, + std::string_view value, std::string_view bit_offset, u32 num_bits) { + const auto ssbo{fmt::format("{}_ssbo{}[{}>>2]", ctx.stage_name, binding.U32(), offset_var)}; + ctx.Add(cas_loop, ssbo, ssbo, ssbo, value, bit_offset, num_bits); +} +} // Anonymous namespace + +void EmitLoadGlobalU8(EmitContext&) { + NotImplemented(); +} + +void EmitLoadGlobalS8(EmitContext&) { + NotImplemented(); +} + +void EmitLoadGlobalU16(EmitContext&) { + NotImplemented(); +} + +void EmitLoadGlobalS16(EmitContext&) { + NotImplemented(); +} + +void EmitLoadGlobal32(EmitContext& ctx, IR::Inst& inst, std::string_view address) { + if (ctx.profile.support_int64) { + return ctx.AddU32("{}=LoadGlobal32({});", inst, address); + } + LOG_WARNING(Shader_GLSL, "Int64 not supported, ignoring memory operation"); + ctx.AddU32("{}=0u;", inst); +} + +void EmitLoadGlobal64(EmitContext& ctx, IR::Inst& inst, std::string_view address) { + if (ctx.profile.support_int64) { + return ctx.AddU32x2("{}=LoadGlobal64({});", inst, address); + } + LOG_WARNING(Shader_GLSL, "Int64 not supported, ignoring memory operation"); + ctx.AddU32x2("{}=uvec2(0);", inst); +} + +void EmitLoadGlobal128(EmitContext& ctx, IR::Inst& inst, std::string_view address) { + if (ctx.profile.support_int64) { + return ctx.AddU32x4("{}=LoadGlobal128({});", inst, address); + } + LOG_WARNING(Shader_GLSL, "Int64 not supported, ignoring memory operation"); + ctx.AddU32x4("{}=uvec4(0);", inst); +} + +void EmitWriteGlobalU8(EmitContext&) { + NotImplemented(); +} + +void EmitWriteGlobalS8(EmitContext&) { + NotImplemented(); +} + +void EmitWriteGlobalU16(EmitContext&) { + NotImplemented(); +} + +void EmitWriteGlobalS16(EmitContext&) { + NotImplemented(); +} + +void EmitWriteGlobal32(EmitContext& ctx, std::string_view address, std::string_view value) { + if (ctx.profile.support_int64) { + return ctx.Add("WriteGlobal32({},{});", address, value); + } + LOG_WARNING(Shader_GLSL, "Int64 not supported, ignoring memory operation"); +} + +void EmitWriteGlobal64(EmitContext& ctx, std::string_view address, std::string_view value) { + if (ctx.profile.support_int64) { + return ctx.Add("WriteGlobal64({},{});", address, value); + } + LOG_WARNING(Shader_GLSL, "Int64 not supported, ignoring memory operation"); +} + +void EmitWriteGlobal128(EmitContext& ctx, std::string_view address, std::string_view value) { + if (ctx.profile.support_int64) { + return ctx.Add("WriteGlobal128({},{});", address, value); + } + LOG_WARNING(Shader_GLSL, "Int64 not supported, ignoring memory operation"); +} + +void EmitLoadStorageU8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + ctx.AddU32("{}=bitfieldExtract({}_ssbo{}[{}>>2],int({}%4)*8,8);", inst, ctx.stage_name, + binding.U32(), offset_var, offset_var); +} + +void EmitLoadStorageS8(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + ctx.AddU32("{}=bitfieldExtract(int({}_ssbo{}[{}>>2]),int({}%4)*8,8);", inst, ctx.stage_name, + binding.U32(), offset_var, offset_var); +} + +void EmitLoadStorageU16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + ctx.AddU32("{}=bitfieldExtract({}_ssbo{}[{}>>2],int(({}>>1)%2)*16,16);", inst, ctx.stage_name, + binding.U32(), offset_var, offset_var); +} + +void EmitLoadStorageS16(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + ctx.AddU32("{}=bitfieldExtract(int({}_ssbo{}[{}>>2]),int(({}>>1)%2)*16,16);", inst, + ctx.stage_name, binding.U32(), offset_var, offset_var); +} + +void EmitLoadStorage32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + ctx.AddU32("{}={}_ssbo{}[{}>>2];", inst, ctx.stage_name, binding.U32(), offset_var); +} + +void EmitLoadStorage64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + ctx.AddU32x2("{}=uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}+4)>>2]);", inst, ctx.stage_name, + binding.U32(), offset_var, ctx.stage_name, binding.U32(), offset_var); +} + +void EmitLoadStorage128(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, + const IR::Value& offset) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + ctx.AddU32x4("{}=uvec4({}_ssbo{}[{}>>2],{}_ssbo{}[({}+4)>>2],{}_ssbo{}[({}+8)>>2],{}_ssbo{}[({}" + "+12)>>2]);", + inst, ctx.stage_name, binding.U32(), offset_var, ctx.stage_name, binding.U32(), + offset_var, ctx.stage_name, binding.U32(), offset_var, ctx.stage_name, + binding.U32(), offset_var); +} + +void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + const auto bit_offset{fmt::format("int({}%4)*8", offset_var)}; + SsboWriteCas(ctx, binding, offset_var, value, bit_offset, 8); +} + +void EmitWriteStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + const auto bit_offset{fmt::format("int({}%4)*8", offset_var)}; + SsboWriteCas(ctx, binding, offset_var, value, bit_offset, 8); +} + +void EmitWriteStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + const auto bit_offset{fmt::format("int(({}>>1)%2)*16", offset_var)}; + SsboWriteCas(ctx, binding, offset_var, value, bit_offset, 16); +} + +void EmitWriteStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + const auto bit_offset{fmt::format("int(({}>>1)%2)*16", offset_var)}; + SsboWriteCas(ctx, binding, offset_var, value, bit_offset, 16); +} + +void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + ctx.Add("{}_ssbo{}[{}>>2]={};", ctx.stage_name, binding.U32(), offset_var, value); +} + +void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + ctx.Add("{}_ssbo{}[{}>>2]={}.x;", ctx.stage_name, binding.U32(), offset_var, value); + ctx.Add("{}_ssbo{}[({}+4)>>2]={}.y;", ctx.stage_name, binding.U32(), offset_var, value); +} + +void EmitWriteStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + std::string_view value) { + const auto offset_var{ctx.var_alloc.Consume(offset)}; + ctx.Add("{}_ssbo{}[{}>>2]={}.x;", ctx.stage_name, binding.U32(), offset_var, value); + ctx.Add("{}_ssbo{}[({}+4)>>2]={}.y;", ctx.stage_name, binding.U32(), offset_var, value); + ctx.Add("{}_ssbo{}[({}+8)>>2]={}.z;", ctx.stage_name, binding.U32(), offset_var, value); + ctx.Add("{}_ssbo{}[({}+12)>>2]={}.w;", ctx.stage_name, binding.U32(), offset_var, value); +} +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp new file mode 100755 index 000000000..f420fe388 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp @@ -0,0 +1,105 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +#ifdef _MSC_VER +#pragma warning(disable : 4100) +#endif + +namespace Shader::Backend::GLSL { + +void EmitGetRegister(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetRegister(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetPred(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetPred(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetGotoVariable(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetGotoVariable(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetIndirectBranchVariable(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetIndirectBranchVariable(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetZFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetSFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetCFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetOFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetZFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetSFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetCFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitSetOFlag(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetZeroFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetSignFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetCarryFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetOverflowFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetSparseFromOp(EmitContext& ctx) { + NotImplemented(); +} + +void EmitGetInBoundsFromOp(EmitContext& ctx) { + NotImplemented(); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_select.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_select.cpp new file mode 100755 index 000000000..49fba9073 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_select.cpp @@ -0,0 +1,55 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { +void EmitSelectU1(EmitContext& ctx, IR::Inst& inst, std::string_view cond, + std::string_view true_value, std::string_view false_value) { + ctx.AddU1("{}={}?{}:{};", inst, cond, true_value, false_value); +} + +void EmitSelectU8([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] std::string_view cond, + [[maybe_unused]] std::string_view true_value, + [[maybe_unused]] std::string_view false_value) { + NotImplemented(); +} + +void EmitSelectU16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] std::string_view cond, + [[maybe_unused]] std::string_view true_value, + [[maybe_unused]] std::string_view false_value) { + NotImplemented(); +} + +void EmitSelectU32(EmitContext& ctx, IR::Inst& inst, std::string_view cond, + std::string_view true_value, std::string_view false_value) { + ctx.AddU32("{}={}?{}:{};", inst, cond, true_value, false_value); +} + +void EmitSelectU64(EmitContext& ctx, IR::Inst& inst, std::string_view cond, + std::string_view true_value, std::string_view false_value) { + ctx.AddU64("{}={}?{}:{};", inst, cond, true_value, false_value); +} + +void EmitSelectF16([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] std::string_view cond, + [[maybe_unused]] std::string_view true_value, + [[maybe_unused]] std::string_view false_value) { + NotImplemented(); +} + +void EmitSelectF32(EmitContext& ctx, IR::Inst& inst, std::string_view cond, + std::string_view true_value, std::string_view false_value) { + ctx.AddF32("{}={}?{}:{};", inst, cond, true_value, false_value); +} + +void EmitSelectF64(EmitContext& ctx, IR::Inst& inst, std::string_view cond, + std::string_view true_value, std::string_view false_value) { + ctx.AddF64("{}={}?{}:{};", inst, cond, true_value, false_value); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_shared_memory.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_shared_memory.cpp new file mode 100755 index 000000000..518b78f06 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_shared_memory.cpp @@ -0,0 +1,79 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { +namespace { +constexpr char cas_loop[]{"for(;;){{uint old_value={};uint " + "cas_result=atomicCompSwap({},old_value,bitfieldInsert({},{},{},{}));" + "if(cas_result==old_value){{break;}}}}"}; + +void SharedWriteCas(EmitContext& ctx, std::string_view offset, std::string_view value, + std::string_view bit_offset, u32 num_bits) { + const auto smem{fmt::format("smem[{}>>2]", offset)}; + ctx.Add(cas_loop, smem, smem, smem, value, bit_offset, num_bits); +} +} // Anonymous namespace + +void EmitLoadSharedU8(EmitContext& ctx, IR::Inst& inst, std::string_view offset) { + ctx.AddU32("{}=bitfieldExtract(smem[{}>>2],int({}%4)*8,8);", inst, offset, offset); +} + +void EmitLoadSharedS8(EmitContext& ctx, IR::Inst& inst, std::string_view offset) { + ctx.AddU32("{}=bitfieldExtract(int(smem[{}>>2]),int({}%4)*8,8);", inst, offset, offset); +} + +void EmitLoadSharedU16(EmitContext& ctx, IR::Inst& inst, std::string_view offset) { + ctx.AddU32("{}=bitfieldExtract(smem[{}>>2],int(({}>>1)%2)*16,16);", inst, offset, offset); +} + +void EmitLoadSharedS16(EmitContext& ctx, IR::Inst& inst, std::string_view offset) { + ctx.AddU32("{}=bitfieldExtract(int(smem[{}>>2]),int(({}>>1)%2)*16,16);", inst, offset, offset); +} + +void EmitLoadSharedU32(EmitContext& ctx, IR::Inst& inst, std::string_view offset) { + ctx.AddU32("{}=smem[{}>>2];", inst, offset); +} + +void EmitLoadSharedU64(EmitContext& ctx, IR::Inst& inst, std::string_view offset) { + ctx.AddU32x2("{}=uvec2(smem[{}>>2],smem[({}+4)>>2]);", inst, offset, offset); +} + +void EmitLoadSharedU128(EmitContext& ctx, IR::Inst& inst, std::string_view offset) { + ctx.AddU32x4("{}=uvec4(smem[{}>>2],smem[({}+4)>>2],smem[({}+8)>>2],smem[({}+12)>>2]);", inst, + offset, offset, offset, offset); +} + +void EmitWriteSharedU8(EmitContext& ctx, std::string_view offset, std::string_view value) { + const auto bit_offset{fmt::format("int({}%4)*8", offset)}; + SharedWriteCas(ctx, offset, value, bit_offset, 8); +} + +void EmitWriteSharedU16(EmitContext& ctx, std::string_view offset, std::string_view value) { + const auto bit_offset{fmt::format("int(({}>>1)%2)*16", offset)}; + SharedWriteCas(ctx, offset, value, bit_offset, 16); +} + +void EmitWriteSharedU32(EmitContext& ctx, std::string_view offset, std::string_view value) { + ctx.Add("smem[{}>>2]={};", offset, value); +} + +void EmitWriteSharedU64(EmitContext& ctx, std::string_view offset, std::string_view value) { + ctx.Add("smem[{}>>2]={}.x;", offset, value); + ctx.Add("smem[({}+4)>>2]={}.y;", offset, value); +} + +void EmitWriteSharedU128(EmitContext& ctx, std::string_view offset, std::string_view value) { + ctx.Add("smem[{}>>2]={}.x;", offset, value); + ctx.Add("smem[({}+4)>>2]={}.y;", offset, value); + ctx.Add("smem[({}+8)>>2]={}.z;", offset, value); + ctx.Add("smem[({}+12)>>2]={}.w;", offset, value); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_special.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_special.cpp new file mode 100755 index 000000000..9b866f889 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_special.cpp @@ -0,0 +1,111 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/profile.h" + +namespace Shader::Backend::GLSL { +namespace { +std::string_view OutputVertexIndex(EmitContext& ctx) { + return ctx.stage == Stage::TessellationControl ? "[gl_InvocationID]" : ""; +} + +void InitializeOutputVaryings(EmitContext& ctx) { + if (ctx.uses_geometry_passthrough) { + return; + } + if (ctx.stage == Stage::VertexB || ctx.stage == Stage::Geometry) { + ctx.Add("gl_Position=vec4(0,0,0,1);"); + } + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + if (!ctx.info.stores.Generic(index)) { + continue; + } + const auto& info_array{ctx.output_generics.at(index)}; + const auto output_decorator{OutputVertexIndex(ctx)}; + size_t element{}; + while (element < info_array.size()) { + const auto& info{info_array.at(element)}; + const auto varying_name{fmt::format("{}{}", info.name, output_decorator)}; + switch (info.num_components) { + case 1: { + const char value{element == 3 ? '1' : '0'}; + ctx.Add("{}={}.f;", varying_name, value); + break; + } + case 2: + case 3: + if (element + info.num_components < 4) { + ctx.Add("{}=vec{}(0);", varying_name, info.num_components); + } else { + // last element is the w component, must be initialized to 1 + const auto zeros{info.num_components == 3 ? "0,0," : "0,"}; + ctx.Add("{}=vec{}({}1);", varying_name, info.num_components, zeros); + } + break; + case 4: + ctx.Add("{}=vec4(0,0,0,1);", varying_name); + break; + default: + break; + } + element += info.num_components; + } + } +} +} // Anonymous namespace + +void EmitPhi(EmitContext& ctx, IR::Inst& phi) { + const size_t num_args{phi.NumArgs()}; + for (size_t i = 0; i < num_args; ++i) { + ctx.var_alloc.Consume(phi.Arg(i)); + } + if (!phi.Definition().is_valid) { + // The phi node wasn't forward defined + ctx.var_alloc.PhiDefine(phi, phi.Arg(0).Type()); + } +} + +void EmitVoid(EmitContext&) {} + +void EmitReference(EmitContext& ctx, const IR::Value& value) { + ctx.var_alloc.Consume(value); +} + +void EmitPhiMove(EmitContext& ctx, const IR::Value& phi_value, const IR::Value& value) { + IR::Inst& phi{*phi_value.InstRecursive()}; + const auto phi_type{phi.Arg(0).Type()}; + if (!phi.Definition().is_valid) { + // The phi node wasn't forward defined + ctx.var_alloc.PhiDefine(phi, phi_type); + } + const auto phi_reg{ctx.var_alloc.Consume(IR::Value{&phi})}; + const auto val_reg{ctx.var_alloc.Consume(value)}; + if (phi_reg == val_reg) { + return; + } + ctx.Add("{}={};", phi_reg, val_reg); +} + +void EmitPrologue(EmitContext& ctx) { + InitializeOutputVaryings(ctx); +} + +void EmitEpilogue(EmitContext&) {} + +void EmitEmitVertex(EmitContext& ctx, const IR::Value& stream) { + ctx.Add("EmitStreamVertex(int({}));", ctx.var_alloc.Consume(stream)); + InitializeOutputVaryings(ctx); +} + +void EmitEndPrimitive(EmitContext& ctx, const IR::Value& stream) { + ctx.Add("EndStreamPrimitive(int({}));", ctx.var_alloc.Consume(stream)); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_undefined.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_undefined.cpp new file mode 100755 index 000000000..15bf02dd6 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_undefined.cpp @@ -0,0 +1,32 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" + +namespace Shader::Backend::GLSL { + +void EmitUndefU1(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU1("{}=false;", inst); +} + +void EmitUndefU8(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=0u;", inst); +} + +void EmitUndefU16(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=0u;", inst); +} + +void EmitUndefU32(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=0u;", inst); +} + +void EmitUndefU64(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU64("{}=0u;", inst); +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_warp.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_warp.cpp new file mode 100755 index 000000000..a982dd8a2 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/emit_glsl_warp.cpp @@ -0,0 +1,217 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/glsl/emit_context.h" +#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/profile.h" + +namespace Shader::Backend::GLSL { +namespace { +void SetInBoundsFlag(EmitContext& ctx, IR::Inst& inst) { + IR::Inst* const in_bounds{inst.GetAssociatedPseudoOperation(IR::Opcode::GetInBoundsFromOp)}; + if (!in_bounds) { + return; + } + ctx.AddU1("{}=shfl_in_bounds;", *in_bounds); + in_bounds->Invalidate(); +} + +std::string ComputeMinThreadId(std::string_view thread_id, std::string_view segmentation_mask) { + return fmt::format("({}&{})", thread_id, segmentation_mask); +} + +std::string ComputeMaxThreadId(std::string_view min_thread_id, std::string_view clamp, + std::string_view not_seg_mask) { + return fmt::format("({})|({}&{})", min_thread_id, clamp, not_seg_mask); +} + +std::string GetMaxThreadId(std::string_view thread_id, std::string_view clamp, + std::string_view segmentation_mask) { + const auto not_seg_mask{fmt::format("(~{})", segmentation_mask)}; + const auto min_thread_id{ComputeMinThreadId(thread_id, segmentation_mask)}; + return ComputeMaxThreadId(min_thread_id, clamp, not_seg_mask); +} + +void UseShuffleNv(EmitContext& ctx, IR::Inst& inst, std::string_view shfl_op, + std::string_view value, std::string_view index, + [[maybe_unused]] std::string_view clamp, std::string_view segmentation_mask) { + const auto width{fmt::format("32u>>(bitCount({}&31u))", segmentation_mask)}; + ctx.AddU32("{}={}({},{},{},shfl_in_bounds);", inst, shfl_op, value, index, width); + SetInBoundsFlag(ctx, inst); +} +} // Anonymous namespace + +void EmitLaneId(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=gl_SubGroupInvocationARB&31u;", inst); +} + +void EmitVoteAll(EmitContext& ctx, IR::Inst& inst, std::string_view pred) { + if (!ctx.profile.warp_size_potentially_larger_than_guest) { + ctx.AddU1("{}=allInvocationsEqualARB({});", inst, pred); + } else { + const auto active_mask{fmt::format("uvec2(ballotARB(true))[gl_SubGroupInvocationARB]")}; + const auto ballot{fmt::format("uvec2(ballotARB({}))[gl_SubGroupInvocationARB]", pred)}; + ctx.AddU1("{}=({}&{})=={};", inst, ballot, active_mask, active_mask); + } +} + +void EmitVoteAny(EmitContext& ctx, IR::Inst& inst, std::string_view pred) { + if (!ctx.profile.warp_size_potentially_larger_than_guest) { + ctx.AddU1("{}=anyInvocationARB({});", inst, pred); + } else { + const auto active_mask{fmt::format("uvec2(ballotARB(true))[gl_SubGroupInvocationARB]")}; + const auto ballot{fmt::format("uvec2(ballotARB({}))[gl_SubGroupInvocationARB]", pred)}; + ctx.AddU1("{}=({}&{})!=0u;", inst, ballot, active_mask, active_mask); + } +} + +void EmitVoteEqual(EmitContext& ctx, IR::Inst& inst, std::string_view pred) { + if (!ctx.profile.warp_size_potentially_larger_than_guest) { + ctx.AddU1("{}=allInvocationsEqualARB({});", inst, pred); + } else { + const auto active_mask{fmt::format("uvec2(ballotARB(true))[gl_SubGroupInvocationARB]")}; + const auto ballot{fmt::format("uvec2(ballotARB({}))[gl_SubGroupInvocationARB]", pred)}; + const auto value{fmt::format("({}^{})", ballot, active_mask)}; + ctx.AddU1("{}=({}==0)||({}=={});", inst, value, value, active_mask); + } +} + +void EmitSubgroupBallot(EmitContext& ctx, IR::Inst& inst, std::string_view pred) { + if (!ctx.profile.warp_size_potentially_larger_than_guest) { + ctx.AddU32("{}=uvec2(ballotARB({})).x;", inst, pred); + } else { + ctx.AddU32("{}=uvec2(ballotARB({}))[gl_SubGroupInvocationARB];", inst, pred); + } +} + +void EmitSubgroupEqMask(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=uint(gl_SubGroupEqMaskARB.x);", inst); +} + +void EmitSubgroupLtMask(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=uint(gl_SubGroupLtMaskARB.x);", inst); +} + +void EmitSubgroupLeMask(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=uint(gl_SubGroupLeMaskARB.x);", inst); +} + +void EmitSubgroupGtMask(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=uint(gl_SubGroupGtMaskARB.x);", inst); +} + +void EmitSubgroupGeMask(EmitContext& ctx, IR::Inst& inst) { + ctx.AddU32("{}=uint(gl_SubGroupGeMaskARB.x);", inst); +} + +void EmitShuffleIndex(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view index, std::string_view clamp, + std::string_view segmentation_mask) { + if (ctx.profile.support_gl_warp_intrinsics) { + UseShuffleNv(ctx, inst, "shuffleNV", value, index, clamp, segmentation_mask); + return; + } + const auto not_seg_mask{fmt::format("(~{})", segmentation_mask)}; + const auto thread_id{"gl_SubGroupInvocationARB"}; + const auto min_thread_id{ComputeMinThreadId(thread_id, segmentation_mask)}; + const auto max_thread_id{ComputeMaxThreadId(min_thread_id, clamp, not_seg_mask)}; + + const auto lhs{fmt::format("({}&{})", index, not_seg_mask)}; + const auto src_thread_id{fmt::format("({})|({})", lhs, min_thread_id)}; + ctx.Add("shfl_in_bounds=int({})<=int({});", src_thread_id, max_thread_id); + SetInBoundsFlag(ctx, inst); + ctx.AddU32("{}=shfl_in_bounds?readInvocationARB({},{}):{};", inst, value, src_thread_id, value); +} + +void EmitShuffleUp(EmitContext& ctx, IR::Inst& inst, std::string_view value, std::string_view index, + std::string_view clamp, std::string_view segmentation_mask) { + if (ctx.profile.support_gl_warp_intrinsics) { + UseShuffleNv(ctx, inst, "shuffleUpNV", value, index, clamp, segmentation_mask); + return; + } + const auto thread_id{"gl_SubGroupInvocationARB"}; + const auto max_thread_id{GetMaxThreadId(thread_id, clamp, segmentation_mask)}; + const auto src_thread_id{fmt::format("({}-{})", thread_id, index)}; + ctx.Add("shfl_in_bounds=int({})>=int({});", src_thread_id, max_thread_id); + SetInBoundsFlag(ctx, inst); + ctx.AddU32("{}=shfl_in_bounds?readInvocationARB({},{}):{};", inst, value, src_thread_id, value); +} + +void EmitShuffleDown(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view index, std::string_view clamp, + std::string_view segmentation_mask) { + if (ctx.profile.support_gl_warp_intrinsics) { + UseShuffleNv(ctx, inst, "shuffleDownNV", value, index, clamp, segmentation_mask); + return; + } + const auto thread_id{"gl_SubGroupInvocationARB"}; + const auto max_thread_id{GetMaxThreadId(thread_id, clamp, segmentation_mask)}; + const auto src_thread_id{fmt::format("({}+{})", thread_id, index)}; + ctx.Add("shfl_in_bounds=int({})<=int({});", src_thread_id, max_thread_id); + SetInBoundsFlag(ctx, inst); + ctx.AddU32("{}=shfl_in_bounds?readInvocationARB({},{}):{};", inst, value, src_thread_id, value); +} + +void EmitShuffleButterfly(EmitContext& ctx, IR::Inst& inst, std::string_view value, + std::string_view index, std::string_view clamp, + std::string_view segmentation_mask) { + if (ctx.profile.support_gl_warp_intrinsics) { + UseShuffleNv(ctx, inst, "shuffleXorNV", value, index, clamp, segmentation_mask); + return; + } + const auto thread_id{"gl_SubGroupInvocationARB"}; + const auto max_thread_id{GetMaxThreadId(thread_id, clamp, segmentation_mask)}; + const auto src_thread_id{fmt::format("({}^{})", thread_id, index)}; + ctx.Add("shfl_in_bounds=int({})<=int({});", src_thread_id, max_thread_id); + SetInBoundsFlag(ctx, inst); + ctx.AddU32("{}=shfl_in_bounds?readInvocationARB({},{}):{};", inst, value, src_thread_id, value); +} + +void EmitFSwizzleAdd(EmitContext& ctx, IR::Inst& inst, std::string_view op_a, std::string_view op_b, + std::string_view swizzle) { + const auto mask{fmt::format("({}>>((gl_SubGroupInvocationARB&3)<<1))&3", swizzle)}; + const std::string modifier_a = fmt::format("FSWZ_A[{}]", mask); + const std::string modifier_b = fmt::format("FSWZ_B[{}]", mask); + ctx.AddF32("{}=({}*{})+({}*{});", inst, op_a, modifier_a, op_b, modifier_b); +} + +void EmitDPdxFine(EmitContext& ctx, IR::Inst& inst, std::string_view op_a) { + if (ctx.profile.support_gl_derivative_control) { + ctx.AddF32("{}=dFdxFine({});", inst, op_a); + } else { + LOG_WARNING(Shader_GLSL, "Device does not support dFdxFine, fallback to dFdx"); + ctx.AddF32("{}=dFdx({});", inst, op_a); + } +} + +void EmitDPdyFine(EmitContext& ctx, IR::Inst& inst, std::string_view op_a) { + if (ctx.profile.support_gl_derivative_control) { + ctx.AddF32("{}=dFdyFine({});", inst, op_a); + } else { + LOG_WARNING(Shader_GLSL, "Device does not support dFdyFine, fallback to dFdy"); + ctx.AddF32("{}=dFdy({});", inst, op_a); + } +} + +void EmitDPdxCoarse(EmitContext& ctx, IR::Inst& inst, std::string_view op_a) { + if (ctx.profile.support_gl_derivative_control) { + ctx.AddF32("{}=dFdxCoarse({});", inst, op_a); + } else { + LOG_WARNING(Shader_GLSL, "Device does not support dFdxCoarse, fallback to dFdx"); + ctx.AddF32("{}=dFdx({});", inst, op_a); + } +} + +void EmitDPdyCoarse(EmitContext& ctx, IR::Inst& inst, std::string_view op_a) { + if (ctx.profile.support_gl_derivative_control) { + ctx.AddF32("{}=dFdyCoarse({});", inst, op_a); + } else { + LOG_WARNING(Shader_GLSL, "Device does not support dFdyCoarse, fallback to dFdy"); + ctx.AddF32("{}=dFdy({});", inst, op_a); + } +} +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/var_alloc.cpp b/src/shader_recompiler/backend/glsl/var_alloc.cpp new file mode 100755 index 000000000..194f926ca --- /dev/null +++ b/src/shader_recompiler/backend/glsl/var_alloc.cpp @@ -0,0 +1,308 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include + +#include "shader_recompiler/backend/glsl/var_alloc.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::Backend::GLSL { +namespace { +std::string TypePrefix(GlslVarType type) { + switch (type) { + case GlslVarType::U1: + return "b_"; + case GlslVarType::F16x2: + return "f16x2_"; + case GlslVarType::U32: + return "u_"; + case GlslVarType::F32: + return "f_"; + case GlslVarType::U64: + return "u64_"; + case GlslVarType::F64: + return "d_"; + case GlslVarType::U32x2: + return "u2_"; + case GlslVarType::F32x2: + return "f2_"; + case GlslVarType::U32x3: + return "u3_"; + case GlslVarType::F32x3: + return "f3_"; + case GlslVarType::U32x4: + return "u4_"; + case GlslVarType::F32x4: + return "f4_"; + case GlslVarType::PrecF32: + return "pf_"; + case GlslVarType::PrecF64: + return "pd_"; + case GlslVarType::Void: + return ""; + default: + throw NotImplementedException("Type {}", type); + } +} + +std::string FormatFloat(std::string_view value, IR::Type type) { + // TODO: Confirm FP64 nan/inf + if (type == IR::Type::F32) { + if (value == "nan") { + return "utof(0x7fc00000)"; + } + if (value == "inf") { + return "utof(0x7f800000)"; + } + if (value == "-inf") { + return "utof(0xff800000)"; + } + } + if (value.find_first_of('e') != std::string_view::npos) { + // scientific notation + const auto cast{type == IR::Type::F32 ? "float" : "double"}; + return fmt::format("{}({})", cast, value); + } + const bool needs_dot{value.find_first_of('.') == std::string_view::npos}; + const bool needs_suffix{!value.ends_with('f')}; + const auto suffix{type == IR::Type::F32 ? "f" : "lf"}; + return fmt::format("{}{}{}", value, needs_dot ? "." : "", needs_suffix ? suffix : ""); +} + +std::string MakeImm(const IR::Value& value) { + switch (value.Type()) { + case IR::Type::U1: + return fmt::format("{}", value.U1() ? "true" : "false"); + case IR::Type::U32: + return fmt::format("{}u", value.U32()); + case IR::Type::F32: + return FormatFloat(fmt::format("{}", value.F32()), IR::Type::F32); + case IR::Type::U64: + return fmt::format("{}ul", value.U64()); + case IR::Type::F64: + return FormatFloat(fmt::format("{}", value.F64()), IR::Type::F64); + case IR::Type::Void: + return ""; + default: + throw NotImplementedException("Immediate type {}", value.Type()); + } +} +} // Anonymous namespace + +std::string VarAlloc::Representation(u32 index, GlslVarType type) const { + const auto prefix{TypePrefix(type)}; + return fmt::format("{}{}", prefix, index); +} + +std::string VarAlloc::Representation(Id id) const { + return Representation(id.index, id.type); +} + +std::string VarAlloc::Define(IR::Inst& inst, GlslVarType type) { + if (inst.HasUses()) { + inst.SetDefinition(Alloc(type)); + return Representation(inst.Definition()); + } else { + Id id{}; + id.type.Assign(type); + GetUseTracker(type).uses_temp = true; + inst.SetDefinition(id); + return 't' + Representation(inst.Definition()); + } +} + +std::string VarAlloc::Define(IR::Inst& inst, IR::Type type) { + return Define(inst, RegType(type)); +} + +std::string VarAlloc::PhiDefine(IR::Inst& inst, IR::Type type) { + return AddDefine(inst, RegType(type)); +} + +std::string VarAlloc::AddDefine(IR::Inst& inst, GlslVarType type) { + if (inst.HasUses()) { + inst.SetDefinition(Alloc(type)); + return Representation(inst.Definition()); + } else { + return ""; + } + return Representation(inst.Definition()); +} + +std::string VarAlloc::Consume(const IR::Value& value) { + return value.IsImmediate() ? MakeImm(value) : ConsumeInst(*value.InstRecursive()); +} + +std::string VarAlloc::ConsumeInst(IR::Inst& inst) { + inst.DestructiveRemoveUsage(); + if (!inst.HasUses()) { + Free(inst.Definition()); + } + return Representation(inst.Definition()); +} + +std::string VarAlloc::GetGlslType(IR::Type type) const { + return GetGlslType(RegType(type)); +} + +Id VarAlloc::Alloc(GlslVarType type) { + auto& use_tracker{GetUseTracker(type)}; + const auto num_vars{use_tracker.var_use.size()}; + for (size_t var = 0; var < num_vars; ++var) { + if (use_tracker.var_use[var]) { + continue; + } + use_tracker.num_used = std::max(use_tracker.num_used, var + 1); + use_tracker.var_use[var] = true; + Id ret{}; + ret.is_valid.Assign(1); + ret.type.Assign(type); + ret.index.Assign(static_cast(var)); + return ret; + } + // Allocate a new variable + use_tracker.var_use.push_back(true); + Id ret{}; + ret.is_valid.Assign(1); + ret.type.Assign(type); + ret.index.Assign(static_cast(use_tracker.num_used)); + ++use_tracker.num_used; + return ret; +} + +void VarAlloc::Free(Id id) { + if (id.is_valid == 0) { + throw LogicError("Freeing invalid variable"); + } + auto& use_tracker{GetUseTracker(id.type)}; + use_tracker.var_use[id.index] = false; +} + +GlslVarType VarAlloc::RegType(IR::Type type) const { + switch (type) { + case IR::Type::U1: + return GlslVarType::U1; + case IR::Type::U32: + return GlslVarType::U32; + case IR::Type::F32: + return GlslVarType::F32; + case IR::Type::U64: + return GlslVarType::U64; + case IR::Type::F64: + return GlslVarType::F64; + default: + throw NotImplementedException("IR type {}", type); + } +} + +std::string VarAlloc::GetGlslType(GlslVarType type) const { + switch (type) { + case GlslVarType::U1: + return "bool"; + case GlslVarType::F16x2: + return "f16vec2"; + case GlslVarType::U32: + return "uint"; + case GlslVarType::F32: + case GlslVarType::PrecF32: + return "float"; + case GlslVarType::U64: + return "uint64_t"; + case GlslVarType::F64: + case GlslVarType::PrecF64: + return "double"; + case GlslVarType::U32x2: + return "uvec2"; + case GlslVarType::F32x2: + return "vec2"; + case GlslVarType::U32x3: + return "uvec3"; + case GlslVarType::F32x3: + return "vec3"; + case GlslVarType::U32x4: + return "uvec4"; + case GlslVarType::F32x4: + return "vec4"; + case GlslVarType::Void: + return ""; + default: + throw NotImplementedException("Type {}", type); + } +} + +VarAlloc::UseTracker& VarAlloc::GetUseTracker(GlslVarType type) { + switch (type) { + case GlslVarType::U1: + return var_bool; + case GlslVarType::F16x2: + return var_f16x2; + case GlslVarType::U32: + return var_u32; + case GlslVarType::F32: + return var_f32; + case GlslVarType::U64: + return var_u64; + case GlslVarType::F64: + return var_f64; + case GlslVarType::U32x2: + return var_u32x2; + case GlslVarType::F32x2: + return var_f32x2; + case GlslVarType::U32x3: + return var_u32x3; + case GlslVarType::F32x3: + return var_f32x3; + case GlslVarType::U32x4: + return var_u32x4; + case GlslVarType::F32x4: + return var_f32x4; + case GlslVarType::PrecF32: + return var_precf32; + case GlslVarType::PrecF64: + return var_precf64; + default: + throw NotImplementedException("Type {}", type); + } +} + +const VarAlloc::UseTracker& VarAlloc::GetUseTracker(GlslVarType type) const { + switch (type) { + case GlslVarType::U1: + return var_bool; + case GlslVarType::F16x2: + return var_f16x2; + case GlslVarType::U32: + return var_u32; + case GlslVarType::F32: + return var_f32; + case GlslVarType::U64: + return var_u64; + case GlslVarType::F64: + return var_f64; + case GlslVarType::U32x2: + return var_u32x2; + case GlslVarType::F32x2: + return var_f32x2; + case GlslVarType::U32x3: + return var_u32x3; + case GlslVarType::F32x3: + return var_f32x3; + case GlslVarType::U32x4: + return var_u32x4; + case GlslVarType::F32x4: + return var_f32x4; + case GlslVarType::PrecF32: + return var_precf32; + case GlslVarType::PrecF64: + return var_precf64; + default: + throw NotImplementedException("Type {}", type); + } +} + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/glsl/var_alloc.h b/src/shader_recompiler/backend/glsl/var_alloc.h new file mode 100755 index 000000000..8b49f32a6 --- /dev/null +++ b/src/shader_recompiler/backend/glsl/var_alloc.h @@ -0,0 +1,105 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include + +#include "common/bit_field.h" +#include "common/common_types.h" + +namespace Shader::IR { +class Inst; +class Value; +enum class Type; +} // namespace Shader::IR + +namespace Shader::Backend::GLSL { +enum class GlslVarType : u32 { + U1, + F16x2, + U32, + F32, + U64, + F64, + U32x2, + F32x2, + U32x3, + F32x3, + U32x4, + F32x4, + PrecF32, + PrecF64, + Void, +}; + +struct Id { + union { + u32 raw; + BitField<0, 1, u32> is_valid; + BitField<1, 4, GlslVarType> type; + BitField<6, 26, u32> index; + }; + + bool operator==(Id rhs) const noexcept { + return raw == rhs.raw; + } + bool operator!=(Id rhs) const noexcept { + return !operator==(rhs); + } +}; +static_assert(sizeof(Id) == sizeof(u32)); + +class VarAlloc { +public: + struct UseTracker { + bool uses_temp{}; + size_t num_used{}; + std::vector var_use; + }; + + /// Used for explicit usages of variables, may revert to temporaries + std::string Define(IR::Inst& inst, GlslVarType type); + std::string Define(IR::Inst& inst, IR::Type type); + + /// Used to assign variables used by the IR. May return a blank string if + /// the instruction's result is unused in the IR. + std::string AddDefine(IR::Inst& inst, GlslVarType type); + std::string PhiDefine(IR::Inst& inst, IR::Type type); + + std::string Consume(const IR::Value& value); + std::string ConsumeInst(IR::Inst& inst); + + std::string GetGlslType(GlslVarType type) const; + std::string GetGlslType(IR::Type type) const; + + const UseTracker& GetUseTracker(GlslVarType type) const; + std::string Representation(u32 index, GlslVarType type) const; + +private: + GlslVarType RegType(IR::Type type) const; + Id Alloc(GlslVarType type); + void Free(Id id); + UseTracker& GetUseTracker(GlslVarType type); + std::string Representation(Id id) const; + + UseTracker var_bool{}; + UseTracker var_f16x2{}; + UseTracker var_u32{}; + UseTracker var_u32x2{}; + UseTracker var_u32x3{}; + UseTracker var_u32x4{}; + UseTracker var_f32{}; + UseTracker var_f32x2{}; + UseTracker var_f32x3{}; + UseTracker var_f32x4{}; + UseTracker var_u64{}; + UseTracker var_f64{}; + UseTracker var_precf32{}; + UseTracker var_precf64{}; +}; + +} // namespace Shader::Backend::GLSL diff --git a/src/shader_recompiler/backend/spirv/emit_context.cpp b/src/shader_recompiler/backend/spirv/emit_context.cpp new file mode 100755 index 000000000..2d29d8c14 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_context.cpp @@ -0,0 +1,1368 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include + +#include + +#include "common/common_types.h" +#include "common/div_ceil.h" +#include "shader_recompiler/backend/spirv/emit_context.h" + +namespace Shader::Backend::SPIRV { +namespace { +enum class Operation { + Increment, + Decrement, + FPAdd, + FPMin, + FPMax, +}; + +struct AttrInfo { + Id pointer; + Id id; + bool needs_cast; +}; + +Id ImageType(EmitContext& ctx, const TextureDescriptor& desc) { + const spv::ImageFormat format{spv::ImageFormat::Unknown}; + const Id type{ctx.F32[1]}; + const bool depth{desc.is_depth}; + switch (desc.type) { + case TextureType::Color1D: + return ctx.TypeImage(type, spv::Dim::Dim1D, depth, false, false, 1, format); + case TextureType::ColorArray1D: + return ctx.TypeImage(type, spv::Dim::Dim1D, depth, true, false, 1, format); + case TextureType::Color2D: + return ctx.TypeImage(type, spv::Dim::Dim2D, depth, false, false, 1, format); + case TextureType::ColorArray2D: + return ctx.TypeImage(type, spv::Dim::Dim2D, depth, true, false, 1, format); + case TextureType::Color3D: + return ctx.TypeImage(type, spv::Dim::Dim3D, depth, false, false, 1, format); + case TextureType::ColorCube: + return ctx.TypeImage(type, spv::Dim::Cube, depth, false, false, 1, format); + case TextureType::ColorArrayCube: + return ctx.TypeImage(type, spv::Dim::Cube, depth, true, false, 1, format); + case TextureType::Buffer: + break; + } + throw InvalidArgument("Invalid texture type {}", desc.type); +} + +spv::ImageFormat GetImageFormat(ImageFormat format) { + switch (format) { + case ImageFormat::Typeless: + return spv::ImageFormat::Unknown; + case ImageFormat::R8_UINT: + return spv::ImageFormat::R8ui; + case ImageFormat::R8_SINT: + return spv::ImageFormat::R8i; + case ImageFormat::R16_UINT: + return spv::ImageFormat::R16ui; + case ImageFormat::R16_SINT: + return spv::ImageFormat::R16i; + case ImageFormat::R32_UINT: + return spv::ImageFormat::R32ui; + case ImageFormat::R32G32_UINT: + return spv::ImageFormat::Rg32ui; + case ImageFormat::R32G32B32A32_UINT: + return spv::ImageFormat::Rgba32ui; + } + throw InvalidArgument("Invalid image format {}", format); +} + +Id ImageType(EmitContext& ctx, const ImageDescriptor& desc) { + const spv::ImageFormat format{GetImageFormat(desc.format)}; + const Id type{ctx.U32[1]}; + switch (desc.type) { + case TextureType::Color1D: + return ctx.TypeImage(type, spv::Dim::Dim1D, false, false, false, 2, format); + case TextureType::ColorArray1D: + return ctx.TypeImage(type, spv::Dim::Dim1D, false, true, false, 2, format); + case TextureType::Color2D: + return ctx.TypeImage(type, spv::Dim::Dim2D, false, false, false, 2, format); + case TextureType::ColorArray2D: + return ctx.TypeImage(type, spv::Dim::Dim2D, false, true, false, 2, format); + case TextureType::Color3D: + return ctx.TypeImage(type, spv::Dim::Dim3D, false, false, false, 2, format); + case TextureType::Buffer: + throw NotImplementedException("Image buffer"); + default: + break; + } + throw InvalidArgument("Invalid texture type {}", desc.type); +} + +Id DefineVariable(EmitContext& ctx, Id type, std::optional builtin, + spv::StorageClass storage_class) { + const Id pointer_type{ctx.TypePointer(storage_class, type)}; + const Id id{ctx.AddGlobalVariable(pointer_type, storage_class)}; + if (builtin) { + ctx.Decorate(id, spv::Decoration::BuiltIn, *builtin); + } + ctx.interfaces.push_back(id); + return id; +} + +u32 NumVertices(InputTopology input_topology) { + switch (input_topology) { + case InputTopology::Points: + return 1; + case InputTopology::Lines: + return 2; + case InputTopology::LinesAdjacency: + return 4; + case InputTopology::Triangles: + return 3; + case InputTopology::TrianglesAdjacency: + return 6; + } + throw InvalidArgument("Invalid input topology {}", input_topology); +} + +Id DefineInput(EmitContext& ctx, Id type, bool per_invocation, + std::optional builtin = std::nullopt) { + switch (ctx.stage) { + case Stage::TessellationControl: + case Stage::TessellationEval: + if (per_invocation) { + type = ctx.TypeArray(type, ctx.Const(32u)); + } + break; + case Stage::Geometry: + if (per_invocation) { + const u32 num_vertices{NumVertices(ctx.runtime_info.input_topology)}; + type = ctx.TypeArray(type, ctx.Const(num_vertices)); + } + break; + default: + break; + } + return DefineVariable(ctx, type, builtin, spv::StorageClass::Input); +} + +Id DefineOutput(EmitContext& ctx, Id type, std::optional invocations, + std::optional builtin = std::nullopt) { + if (invocations && ctx.stage == Stage::TessellationControl) { + type = ctx.TypeArray(type, ctx.Const(*invocations)); + } + return DefineVariable(ctx, type, builtin, spv::StorageClass::Output); +} + +void DefineGenericOutput(EmitContext& ctx, size_t index, std::optional invocations) { + static constexpr std::string_view swizzle{"xyzw"}; + const size_t base_attr_index{static_cast(IR::Attribute::Generic0X) + index * 4}; + u32 element{0}; + while (element < 4) { + const u32 remainder{4 - element}; + const TransformFeedbackVarying* xfb_varying{}; + if (!ctx.runtime_info.xfb_varyings.empty()) { + xfb_varying = &ctx.runtime_info.xfb_varyings[base_attr_index + element]; + xfb_varying = xfb_varying && xfb_varying->components > 0 ? xfb_varying : nullptr; + } + const u32 num_components{xfb_varying ? xfb_varying->components : remainder}; + + const Id id{DefineOutput(ctx, ctx.F32[num_components], invocations)}; + ctx.Decorate(id, spv::Decoration::Location, static_cast(index)); + if (element > 0) { + ctx.Decorate(id, spv::Decoration::Component, element); + } + if (xfb_varying) { + ctx.Decorate(id, spv::Decoration::XfbBuffer, xfb_varying->buffer); + ctx.Decorate(id, spv::Decoration::XfbStride, xfb_varying->stride); + ctx.Decorate(id, spv::Decoration::Offset, xfb_varying->offset); + } + if (num_components < 4 || element > 0) { + const std::string_view subswizzle{swizzle.substr(element, num_components)}; + ctx.Name(id, fmt::format("out_attr{}_{}", index, subswizzle)); + } else { + ctx.Name(id, fmt::format("out_attr{}", index)); + } + const GenericElementInfo info{ + .id = id, + .first_element = element, + .num_components = num_components, + }; + std::fill_n(ctx.output_generics[index].begin() + element, num_components, info); + element += num_components; + } +} + +Id GetAttributeType(EmitContext& ctx, AttributeType type) { + switch (type) { + case AttributeType::Float: + return ctx.F32[4]; + case AttributeType::SignedInt: + return ctx.TypeVector(ctx.TypeInt(32, true), 4); + case AttributeType::UnsignedInt: + return ctx.U32[4]; + case AttributeType::Disabled: + break; + } + throw InvalidArgument("Invalid attribute type {}", type); +} + +std::optional AttrTypes(EmitContext& ctx, u32 index) { + const AttributeType type{ctx.runtime_info.generic_input_types.at(index)}; + switch (type) { + case AttributeType::Float: + return AttrInfo{ctx.input_f32, ctx.F32[1], false}; + case AttributeType::UnsignedInt: + return AttrInfo{ctx.input_u32, ctx.U32[1], true}; + case AttributeType::SignedInt: + return AttrInfo{ctx.input_s32, ctx.TypeInt(32, true), true}; + case AttributeType::Disabled: + return std::nullopt; + } + throw InvalidArgument("Invalid attribute type {}", type); +} + +std::string_view StageName(Stage stage) { + switch (stage) { + case Stage::VertexA: + return "vs_a"; + case Stage::VertexB: + return "vs"; + case Stage::TessellationControl: + return "tcs"; + case Stage::TessellationEval: + return "tes"; + case Stage::Geometry: + return "gs"; + case Stage::Fragment: + return "fs"; + case Stage::Compute: + return "cs"; + } + throw InvalidArgument("Invalid stage {}", stage); +} + +template +void Name(EmitContext& ctx, Id object, std::string_view format_str, Args&&... args) { + ctx.Name(object, fmt::format(fmt::runtime(format_str), StageName(ctx.stage), + std::forward(args)...) + .c_str()); +} + +void DefineConstBuffers(EmitContext& ctx, const Info& info, Id UniformDefinitions::*member_type, + u32 binding, Id type, char type_char, u32 element_size) { + const Id array_type{ctx.TypeArray(type, ctx.Const(65536U / element_size))}; + ctx.Decorate(array_type, spv::Decoration::ArrayStride, element_size); + + const Id struct_type{ctx.TypeStruct(array_type)}; + Name(ctx, struct_type, "{}_cbuf_block_{}{}", ctx.stage, type_char, element_size * CHAR_BIT); + ctx.Decorate(struct_type, spv::Decoration::Block); + ctx.MemberName(struct_type, 0, "data"); + ctx.MemberDecorate(struct_type, 0, spv::Decoration::Offset, 0U); + + const Id struct_pointer_type{ctx.TypePointer(spv::StorageClass::Uniform, struct_type)}; + const Id uniform_type{ctx.TypePointer(spv::StorageClass::Uniform, type)}; + ctx.uniform_types.*member_type = uniform_type; + + for (const ConstantBufferDescriptor& desc : info.constant_buffer_descriptors) { + const Id id{ctx.AddGlobalVariable(struct_pointer_type, spv::StorageClass::Uniform)}; + ctx.Decorate(id, spv::Decoration::Binding, binding); + ctx.Decorate(id, spv::Decoration::DescriptorSet, 0U); + ctx.Name(id, fmt::format("c{}", desc.index)); + for (size_t i = 0; i < desc.count; ++i) { + ctx.cbufs[desc.index + i].*member_type = id; + } + if (ctx.profile.supported_spirv >= 0x00010400) { + ctx.interfaces.push_back(id); + } + binding += desc.count; + } +} + +void DefineSsbos(EmitContext& ctx, StorageTypeDefinition& type_def, + Id StorageDefinitions::*member_type, const Info& info, u32 binding, Id type, + u32 stride) { + const Id array_type{ctx.TypeRuntimeArray(type)}; + ctx.Decorate(array_type, spv::Decoration::ArrayStride, stride); + + const Id struct_type{ctx.TypeStruct(array_type)}; + ctx.Decorate(struct_type, spv::Decoration::Block); + ctx.MemberDecorate(struct_type, 0, spv::Decoration::Offset, 0U); + + const Id struct_pointer{ctx.TypePointer(spv::StorageClass::StorageBuffer, struct_type)}; + type_def.array = struct_pointer; + type_def.element = ctx.TypePointer(spv::StorageClass::StorageBuffer, type); + + u32 index{}; + for (const StorageBufferDescriptor& desc : info.storage_buffers_descriptors) { + const Id id{ctx.AddGlobalVariable(struct_pointer, spv::StorageClass::StorageBuffer)}; + ctx.Decorate(id, spv::Decoration::Binding, binding); + ctx.Decorate(id, spv::Decoration::DescriptorSet, 0U); + ctx.Name(id, fmt::format("ssbo{}", index)); + if (ctx.profile.supported_spirv >= 0x00010400) { + ctx.interfaces.push_back(id); + } + for (size_t i = 0; i < desc.count; ++i) { + ctx.ssbos[index + i].*member_type = id; + } + index += desc.count; + binding += desc.count; + } +} + +Id CasFunction(EmitContext& ctx, Operation operation, Id value_type) { + const Id func_type{ctx.TypeFunction(value_type, value_type, value_type)}; + const Id func{ctx.OpFunction(value_type, spv::FunctionControlMask::MaskNone, func_type)}; + const Id op_a{ctx.OpFunctionParameter(value_type)}; + const Id op_b{ctx.OpFunctionParameter(value_type)}; + ctx.AddLabel(); + Id result{}; + switch (operation) { + case Operation::Increment: { + const Id pred{ctx.OpUGreaterThanEqual(ctx.U1, op_a, op_b)}; + const Id incr{ctx.OpIAdd(value_type, op_a, ctx.Constant(value_type, 1))}; + result = ctx.OpSelect(value_type, pred, ctx.u32_zero_value, incr); + break; + } + case Operation::Decrement: { + const Id lhs{ctx.OpIEqual(ctx.U1, op_a, ctx.Constant(value_type, 0u))}; + const Id rhs{ctx.OpUGreaterThan(ctx.U1, op_a, op_b)}; + const Id pred{ctx.OpLogicalOr(ctx.U1, lhs, rhs)}; + const Id decr{ctx.OpISub(value_type, op_a, ctx.Constant(value_type, 1))}; + result = ctx.OpSelect(value_type, pred, op_b, decr); + break; + } + case Operation::FPAdd: + result = ctx.OpFAdd(value_type, op_a, op_b); + break; + case Operation::FPMin: + result = ctx.OpFMin(value_type, op_a, op_b); + break; + case Operation::FPMax: + result = ctx.OpFMax(value_type, op_a, op_b); + break; + default: + break; + } + ctx.OpReturnValue(result); + ctx.OpFunctionEnd(); + return func; +} + +Id CasLoop(EmitContext& ctx, Operation operation, Id array_pointer, Id element_pointer, + Id value_type, Id memory_type, spv::Scope scope) { + const bool is_shared{scope == spv::Scope::Workgroup}; + const bool is_struct{!is_shared || ctx.profile.support_explicit_workgroup_layout}; + const Id cas_func{CasFunction(ctx, operation, value_type)}; + const Id zero{ctx.u32_zero_value}; + const Id scope_id{ctx.Const(static_cast(scope))}; + + const Id loop_header{ctx.OpLabel()}; + const Id continue_block{ctx.OpLabel()}; + const Id merge_block{ctx.OpLabel()}; + const Id func_type{is_shared + ? ctx.TypeFunction(value_type, ctx.U32[1], value_type) + : ctx.TypeFunction(value_type, ctx.U32[1], value_type, array_pointer)}; + + const Id func{ctx.OpFunction(value_type, spv::FunctionControlMask::MaskNone, func_type)}; + const Id index{ctx.OpFunctionParameter(ctx.U32[1])}; + const Id op_b{ctx.OpFunctionParameter(value_type)}; + const Id base{is_shared ? ctx.shared_memory_u32 : ctx.OpFunctionParameter(array_pointer)}; + ctx.AddLabel(); + ctx.OpBranch(loop_header); + ctx.AddLabel(loop_header); + + ctx.OpLoopMerge(merge_block, continue_block, spv::LoopControlMask::MaskNone); + ctx.OpBranch(continue_block); + + ctx.AddLabel(continue_block); + const Id word_pointer{is_struct ? ctx.OpAccessChain(element_pointer, base, zero, index) + : ctx.OpAccessChain(element_pointer, base, index)}; + if (value_type.value == ctx.F32[2].value) { + const Id u32_value{ctx.OpLoad(ctx.U32[1], word_pointer)}; + const Id value{ctx.OpUnpackHalf2x16(ctx.F32[2], u32_value)}; + const Id new_value{ctx.OpFunctionCall(value_type, cas_func, value, op_b)}; + const Id u32_new_value{ctx.OpPackHalf2x16(ctx.U32[1], new_value)}; + const Id atomic_res{ctx.OpAtomicCompareExchange(ctx.U32[1], word_pointer, scope_id, zero, + zero, u32_new_value, u32_value)}; + const Id success{ctx.OpIEqual(ctx.U1, atomic_res, u32_value)}; + ctx.OpBranchConditional(success, merge_block, loop_header); + + ctx.AddLabel(merge_block); + ctx.OpReturnValue(ctx.OpUnpackHalf2x16(ctx.F32[2], atomic_res)); + } else { + const Id value{ctx.OpLoad(memory_type, word_pointer)}; + const bool matching_type{value_type.value == memory_type.value}; + const Id bitcast_value{matching_type ? value : ctx.OpBitcast(value_type, value)}; + const Id cal_res{ctx.OpFunctionCall(value_type, cas_func, bitcast_value, op_b)}; + const Id new_value{matching_type ? cal_res : ctx.OpBitcast(memory_type, cal_res)}; + const Id atomic_res{ctx.OpAtomicCompareExchange(ctx.U32[1], word_pointer, scope_id, zero, + zero, new_value, value)}; + const Id success{ctx.OpIEqual(ctx.U1, atomic_res, value)}; + ctx.OpBranchConditional(success, merge_block, loop_header); + + ctx.AddLabel(merge_block); + ctx.OpReturnValue(ctx.OpBitcast(value_type, atomic_res)); + } + ctx.OpFunctionEnd(); + return func; +} + +template +std::string NameOf(Stage stage, const Desc& desc, std::string_view prefix) { + if (desc.count > 1) { + return fmt::format("{}_{}{}_{:02x}x{}", StageName(stage), prefix, desc.cbuf_index, + desc.cbuf_offset, desc.count); + } else { + return fmt::format("{}_{}{}_{:02x}", StageName(stage), prefix, desc.cbuf_index, + desc.cbuf_offset); + } +} + +Id DescType(EmitContext& ctx, Id sampled_type, Id pointer_type, u32 count) { + if (count > 1) { + const Id array_type{ctx.TypeArray(sampled_type, ctx.Const(count))}; + return ctx.TypePointer(spv::StorageClass::UniformConstant, array_type); + } else { + return pointer_type; + } +} +} // Anonymous namespace + +void VectorTypes::Define(Sirit::Module& sirit_ctx, Id base_type, std::string_view name) { + defs[0] = sirit_ctx.Name(base_type, name); + + std::array def_name; + for (int i = 1; i < 4; ++i) { + const std::string_view def_name_view( + def_name.data(), + fmt::format_to_n(def_name.data(), def_name.size(), "{}x{}", name, i + 1).size); + defs[static_cast(i)] = + sirit_ctx.Name(sirit_ctx.TypeVector(base_type, i + 1), def_name_view); + } +} + +EmitContext::EmitContext(const Profile& profile_, const RuntimeInfo& runtime_info_, + IR::Program& program, Bindings& bindings) + : Sirit::Module(profile_.supported_spirv), profile{profile_}, + runtime_info{runtime_info_}, stage{program.stage} { + const bool is_unified{profile.unified_descriptor_binding}; + u32& uniform_binding{is_unified ? bindings.unified : bindings.uniform_buffer}; + u32& storage_binding{is_unified ? bindings.unified : bindings.storage_buffer}; + u32& texture_binding{is_unified ? bindings.unified : bindings.texture}; + u32& image_binding{is_unified ? bindings.unified : bindings.image}; + AddCapability(spv::Capability::Shader); + DefineCommonTypes(program.info); + DefineCommonConstants(); + DefineInterfaces(program); + DefineLocalMemory(program); + DefineSharedMemory(program); + DefineSharedMemoryFunctions(program); + DefineConstantBuffers(program.info, uniform_binding); + DefineStorageBuffers(program.info, storage_binding); + DefineTextureBuffers(program.info, texture_binding); + DefineImageBuffers(program.info, image_binding); + DefineTextures(program.info, texture_binding); + DefineImages(program.info, image_binding); + DefineAttributeMemAccess(program.info); + DefineGlobalMemoryFunctions(program.info); +} + +EmitContext::~EmitContext() = default; + +Id EmitContext::Def(const IR::Value& value) { + if (!value.IsImmediate()) { + return value.InstRecursive()->Definition(); + } + switch (value.Type()) { + case IR::Type::Void: + // Void instructions are used for optional arguments (e.g. texture offsets) + // They are not meant to be used in the SPIR-V module + return Id{}; + case IR::Type::U1: + return value.U1() ? true_value : false_value; + case IR::Type::U32: + return Const(value.U32()); + case IR::Type::U64: + return Constant(U64, value.U64()); + case IR::Type::F32: + return Const(value.F32()); + case IR::Type::F64: + return Constant(F64[1], value.F64()); + default: + throw NotImplementedException("Immediate type {}", value.Type()); + } +} + +Id EmitContext::BitOffset8(const IR::Value& offset) { + if (offset.IsImmediate()) { + return Const((offset.U32() % 4) * 8); + } + return OpBitwiseAnd(U32[1], OpShiftLeftLogical(U32[1], Def(offset), Const(3u)), Const(24u)); +} + +Id EmitContext::BitOffset16(const IR::Value& offset) { + if (offset.IsImmediate()) { + return Const(((offset.U32() / 2) % 2) * 16); + } + return OpBitwiseAnd(U32[1], OpShiftLeftLogical(U32[1], Def(offset), Const(3u)), Const(16u)); +} + +void EmitContext::DefineCommonTypes(const Info& info) { + void_id = TypeVoid(); + + U1 = Name(TypeBool(), "u1"); + + F32.Define(*this, TypeFloat(32), "f32"); + U32.Define(*this, TypeInt(32, false), "u32"); + S32.Define(*this, TypeInt(32, true), "s32"); + + private_u32 = Name(TypePointer(spv::StorageClass::Private, U32[1]), "private_u32"); + + input_f32 = Name(TypePointer(spv::StorageClass::Input, F32[1]), "input_f32"); + input_u32 = Name(TypePointer(spv::StorageClass::Input, U32[1]), "input_u32"); + input_s32 = Name(TypePointer(spv::StorageClass::Input, TypeInt(32, true)), "input_s32"); + + output_f32 = Name(TypePointer(spv::StorageClass::Output, F32[1]), "output_f32"); + output_u32 = Name(TypePointer(spv::StorageClass::Output, U32[1]), "output_u32"); + + if (info.uses_int8 && profile.support_int8) { + AddCapability(spv::Capability::Int8); + U8 = Name(TypeInt(8, false), "u8"); + S8 = Name(TypeInt(8, true), "s8"); + } + if (info.uses_int16 && profile.support_int16) { + AddCapability(spv::Capability::Int16); + U16 = Name(TypeInt(16, false), "u16"); + S16 = Name(TypeInt(16, true), "s16"); + } + if (info.uses_int64) { + AddCapability(spv::Capability::Int64); + U64 = Name(TypeInt(64, false), "u64"); + } + if (info.uses_fp16) { + AddCapability(spv::Capability::Float16); + F16.Define(*this, TypeFloat(16), "f16"); + } + if (info.uses_fp64) { + AddCapability(spv::Capability::Float64); + F64.Define(*this, TypeFloat(64), "f64"); + } +} + +void EmitContext::DefineCommonConstants() { + true_value = ConstantTrue(U1); + false_value = ConstantFalse(U1); + u32_zero_value = Const(0U); + f32_zero_value = Const(0.0f); +} + +void EmitContext::DefineInterfaces(const IR::Program& program) { + DefineInputs(program); + DefineOutputs(program); +} + +void EmitContext::DefineLocalMemory(const IR::Program& program) { + if (program.local_memory_size == 0) { + return; + } + const u32 num_elements{Common::DivCeil(program.local_memory_size, 4U)}; + const Id type{TypeArray(U32[1], Const(num_elements))}; + const Id pointer{TypePointer(spv::StorageClass::Private, type)}; + local_memory = AddGlobalVariable(pointer, spv::StorageClass::Private); + if (profile.supported_spirv >= 0x00010400) { + interfaces.push_back(local_memory); + } +} + +void EmitContext::DefineSharedMemory(const IR::Program& program) { + if (program.shared_memory_size == 0) { + return; + } + const auto make{[&](Id element_type, u32 element_size) { + const u32 num_elements{Common::DivCeil(program.shared_memory_size, element_size)}; + const Id array_type{TypeArray(element_type, Const(num_elements))}; + Decorate(array_type, spv::Decoration::ArrayStride, element_size); + + const Id struct_type{TypeStruct(array_type)}; + MemberDecorate(struct_type, 0U, spv::Decoration::Offset, 0U); + Decorate(struct_type, spv::Decoration::Block); + + const Id pointer{TypePointer(spv::StorageClass::Workgroup, struct_type)}; + const Id element_pointer{TypePointer(spv::StorageClass::Workgroup, element_type)}; + const Id variable{AddGlobalVariable(pointer, spv::StorageClass::Workgroup)}; + Decorate(variable, spv::Decoration::Aliased); + interfaces.push_back(variable); + + return std::make_tuple(variable, element_pointer, pointer); + }}; + if (profile.support_explicit_workgroup_layout) { + AddExtension("SPV_KHR_workgroup_memory_explicit_layout"); + AddCapability(spv::Capability::WorkgroupMemoryExplicitLayoutKHR); + if (program.info.uses_int8) { + AddCapability(spv::Capability::WorkgroupMemoryExplicitLayout8BitAccessKHR); + std::tie(shared_memory_u8, shared_u8, std::ignore) = make(U8, 1); + } + if (program.info.uses_int16) { + AddCapability(spv::Capability::WorkgroupMemoryExplicitLayout16BitAccessKHR); + std::tie(shared_memory_u16, shared_u16, std::ignore) = make(U16, 2); + } + if (program.info.uses_int64) { + std::tie(shared_memory_u64, shared_u64, std::ignore) = make(U64, 8); + } + std::tie(shared_memory_u32, shared_u32, shared_memory_u32_type) = make(U32[1], 4); + std::tie(shared_memory_u32x2, shared_u32x2, std::ignore) = make(U32[2], 8); + std::tie(shared_memory_u32x4, shared_u32x4, std::ignore) = make(U32[4], 16); + return; + } + const u32 num_elements{Common::DivCeil(program.shared_memory_size, 4U)}; + const Id type{TypeArray(U32[1], Const(num_elements))}; + shared_memory_u32_type = TypePointer(spv::StorageClass::Workgroup, type); + + shared_u32 = TypePointer(spv::StorageClass::Workgroup, U32[1]); + shared_memory_u32 = AddGlobalVariable(shared_memory_u32_type, spv::StorageClass::Workgroup); + interfaces.push_back(shared_memory_u32); + + const Id func_type{TypeFunction(void_id, U32[1], U32[1])}; + const auto make_function{[&](u32 mask, u32 size) { + const Id loop_header{OpLabel()}; + const Id continue_block{OpLabel()}; + const Id merge_block{OpLabel()}; + + const Id func{OpFunction(void_id, spv::FunctionControlMask::MaskNone, func_type)}; + const Id offset{OpFunctionParameter(U32[1])}; + const Id insert_value{OpFunctionParameter(U32[1])}; + AddLabel(); + OpBranch(loop_header); + + AddLabel(loop_header); + const Id word_offset{OpShiftRightArithmetic(U32[1], offset, Const(2U))}; + const Id shift_offset{OpShiftLeftLogical(U32[1], offset, Const(3U))}; + const Id bit_offset{OpBitwiseAnd(U32[1], shift_offset, Const(mask))}; + const Id count{Const(size)}; + OpLoopMerge(merge_block, continue_block, spv::LoopControlMask::MaskNone); + OpBranch(continue_block); + + AddLabel(continue_block); + const Id word_pointer{OpAccessChain(shared_u32, shared_memory_u32, word_offset)}; + const Id old_value{OpLoad(U32[1], word_pointer)}; + const Id new_value{OpBitFieldInsert(U32[1], old_value, insert_value, bit_offset, count)}; + const Id atomic_res{OpAtomicCompareExchange(U32[1], word_pointer, Const(1U), u32_zero_value, + u32_zero_value, new_value, old_value)}; + const Id success{OpIEqual(U1, atomic_res, old_value)}; + OpBranchConditional(success, merge_block, loop_header); + + AddLabel(merge_block); + OpReturn(); + OpFunctionEnd(); + return func; + }}; + if (program.info.uses_int8) { + shared_store_u8_func = make_function(24, 8); + } + if (program.info.uses_int16) { + shared_store_u16_func = make_function(16, 16); + } +} + +void EmitContext::DefineSharedMemoryFunctions(const IR::Program& program) { + if (program.info.uses_shared_increment) { + increment_cas_shared = CasLoop(*this, Operation::Increment, shared_memory_u32_type, + shared_u32, U32[1], U32[1], spv::Scope::Workgroup); + } + if (program.info.uses_shared_decrement) { + decrement_cas_shared = CasLoop(*this, Operation::Decrement, shared_memory_u32_type, + shared_u32, U32[1], U32[1], spv::Scope::Workgroup); + } +} + +void EmitContext::DefineAttributeMemAccess(const Info& info) { + const auto make_load{[&] { + const bool is_array{stage == Stage::Geometry}; + const Id end_block{OpLabel()}; + const Id default_label{OpLabel()}; + + const Id func_type_load{is_array ? TypeFunction(F32[1], U32[1], U32[1]) + : TypeFunction(F32[1], U32[1])}; + const Id func{OpFunction(F32[1], spv::FunctionControlMask::MaskNone, func_type_load)}; + const Id offset{OpFunctionParameter(U32[1])}; + const Id vertex{is_array ? OpFunctionParameter(U32[1]) : Id{}}; + + AddLabel(); + const Id base_index{OpShiftRightArithmetic(U32[1], offset, Const(2U))}; + const Id masked_index{OpBitwiseAnd(U32[1], base_index, Const(3U))}; + const Id compare_index{OpShiftRightArithmetic(U32[1], base_index, Const(2U))}; + std::vector literals; + std::vector labels; + if (info.loads.AnyComponent(IR::Attribute::PositionX)) { + literals.push_back(static_cast(IR::Attribute::PositionX) >> 2); + labels.push_back(OpLabel()); + } + const u32 base_attribute_value = static_cast(IR::Attribute::Generic0X) >> 2; + for (u32 index = 0; index < static_cast(IR::NUM_GENERICS); ++index) { + if (!info.loads.Generic(index)) { + continue; + } + literals.push_back(base_attribute_value + index); + labels.push_back(OpLabel()); + } + OpSelectionMerge(end_block, spv::SelectionControlMask::MaskNone); + OpSwitch(compare_index, default_label, literals, labels); + AddLabel(default_label); + OpReturnValue(Const(0.0f)); + size_t label_index{0}; + if (info.loads.AnyComponent(IR::Attribute::PositionX)) { + AddLabel(labels[label_index]); + const Id pointer{is_array + ? OpAccessChain(input_f32, input_position, vertex, masked_index) + : OpAccessChain(input_f32, input_position, masked_index)}; + const Id result{OpLoad(F32[1], pointer)}; + OpReturnValue(result); + ++label_index; + } + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + if (!info.loads.Generic(index)) { + continue; + } + AddLabel(labels[label_index]); + const auto type{AttrTypes(*this, static_cast(index))}; + if (!type) { + OpReturnValue(Const(0.0f)); + ++label_index; + continue; + } + const Id generic_id{input_generics.at(index)}; + const Id pointer{is_array + ? OpAccessChain(type->pointer, generic_id, vertex, masked_index) + : OpAccessChain(type->pointer, generic_id, masked_index)}; + const Id value{OpLoad(type->id, pointer)}; + const Id result{type->needs_cast ? OpBitcast(F32[1], value) : value}; + OpReturnValue(result); + ++label_index; + } + AddLabel(end_block); + OpUnreachable(); + OpFunctionEnd(); + return func; + }}; + const auto make_store{[&] { + const Id end_block{OpLabel()}; + const Id default_label{OpLabel()}; + + const Id func_type_store{TypeFunction(void_id, U32[1], F32[1])}; + const Id func{OpFunction(void_id, spv::FunctionControlMask::MaskNone, func_type_store)}; + const Id offset{OpFunctionParameter(U32[1])}; + const Id store_value{OpFunctionParameter(F32[1])}; + AddLabel(); + const Id base_index{OpShiftRightArithmetic(U32[1], offset, Const(2U))}; + const Id masked_index{OpBitwiseAnd(U32[1], base_index, Const(3U))}; + const Id compare_index{OpShiftRightArithmetic(U32[1], base_index, Const(2U))}; + std::vector literals; + std::vector labels; + if (info.stores.AnyComponent(IR::Attribute::PositionX)) { + literals.push_back(static_cast(IR::Attribute::PositionX) >> 2); + labels.push_back(OpLabel()); + } + const u32 base_attribute_value = static_cast(IR::Attribute::Generic0X) >> 2; + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + if (!info.stores.Generic(index)) { + continue; + } + literals.push_back(base_attribute_value + static_cast(index)); + labels.push_back(OpLabel()); + } + if (info.stores.ClipDistances()) { + literals.push_back(static_cast(IR::Attribute::ClipDistance0) >> 2); + labels.push_back(OpLabel()); + literals.push_back(static_cast(IR::Attribute::ClipDistance4) >> 2); + labels.push_back(OpLabel()); + } + OpSelectionMerge(end_block, spv::SelectionControlMask::MaskNone); + OpSwitch(compare_index, default_label, literals, labels); + AddLabel(default_label); + OpReturn(); + size_t label_index{0}; + if (info.stores.AnyComponent(IR::Attribute::PositionX)) { + AddLabel(labels[label_index]); + const Id pointer{OpAccessChain(output_f32, output_position, masked_index)}; + OpStore(pointer, store_value); + OpReturn(); + ++label_index; + } + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + if (!info.stores.Generic(index)) { + continue; + } + if (output_generics[index][0].num_components != 4) { + throw NotImplementedException("Physical stores and transform feedbacks"); + } + AddLabel(labels[label_index]); + const Id generic_id{output_generics[index][0].id}; + const Id pointer{OpAccessChain(output_f32, generic_id, masked_index)}; + OpStore(pointer, store_value); + OpReturn(); + ++label_index; + } + if (info.stores.ClipDistances()) { + AddLabel(labels[label_index]); + const Id pointer{OpAccessChain(output_f32, clip_distances, masked_index)}; + OpStore(pointer, store_value); + OpReturn(); + ++label_index; + AddLabel(labels[label_index]); + const Id fixed_index{OpIAdd(U32[1], masked_index, Const(4U))}; + const Id pointer2{OpAccessChain(output_f32, clip_distances, fixed_index)}; + OpStore(pointer2, store_value); + OpReturn(); + ++label_index; + } + AddLabel(end_block); + OpUnreachable(); + OpFunctionEnd(); + return func; + }}; + if (info.loads_indexed_attributes) { + indexed_load_func = make_load(); + } + if (info.stores_indexed_attributes) { + indexed_store_func = make_store(); + } +} + +void EmitContext::DefineGlobalMemoryFunctions(const Info& info) { + if (!info.uses_global_memory || !profile.support_int64) { + return; + } + using DefPtr = Id StorageDefinitions::*; + const Id zero{u32_zero_value}; + const auto define_body{[&](DefPtr ssbo_member, Id addr, Id element_pointer, u32 shift, + auto&& callback) { + AddLabel(); + const size_t num_buffers{info.storage_buffers_descriptors.size()}; + for (size_t index = 0; index < num_buffers; ++index) { + if (!info.nvn_buffer_used[index]) { + continue; + } + const auto& ssbo{info.storage_buffers_descriptors[index]}; + const Id ssbo_addr_cbuf_offset{Const(ssbo.cbuf_offset / 8)}; + const Id ssbo_size_cbuf_offset{Const(ssbo.cbuf_offset / 4 + 2)}; + const Id ssbo_addr_pointer{OpAccessChain( + uniform_types.U32x2, cbufs[ssbo.cbuf_index].U32x2, zero, ssbo_addr_cbuf_offset)}; + const Id ssbo_size_pointer{OpAccessChain(uniform_types.U32, cbufs[ssbo.cbuf_index].U32, + zero, ssbo_size_cbuf_offset)}; + + const Id ssbo_addr{OpBitcast(U64, OpLoad(U32[2], ssbo_addr_pointer))}; + const Id ssbo_size{OpUConvert(U64, OpLoad(U32[1], ssbo_size_pointer))}; + const Id ssbo_end{OpIAdd(U64, ssbo_addr, ssbo_size)}; + const Id cond{OpLogicalAnd(U1, OpUGreaterThanEqual(U1, addr, ssbo_addr), + OpULessThan(U1, addr, ssbo_end))}; + const Id then_label{OpLabel()}; + const Id else_label{OpLabel()}; + OpSelectionMerge(else_label, spv::SelectionControlMask::MaskNone); + OpBranchConditional(cond, then_label, else_label); + AddLabel(then_label); + const Id ssbo_id{ssbos[index].*ssbo_member}; + const Id ssbo_offset{OpUConvert(U32[1], OpISub(U64, addr, ssbo_addr))}; + const Id ssbo_index{OpShiftRightLogical(U32[1], ssbo_offset, Const(shift))}; + const Id ssbo_pointer{OpAccessChain(element_pointer, ssbo_id, zero, ssbo_index)}; + callback(ssbo_pointer); + AddLabel(else_label); + } + }}; + const auto define_load{[&](DefPtr ssbo_member, Id element_pointer, Id type, u32 shift) { + const Id function_type{TypeFunction(type, U64)}; + const Id func_id{OpFunction(type, spv::FunctionControlMask::MaskNone, function_type)}; + const Id addr{OpFunctionParameter(U64)}; + define_body(ssbo_member, addr, element_pointer, shift, + [&](Id ssbo_pointer) { OpReturnValue(OpLoad(type, ssbo_pointer)); }); + OpReturnValue(ConstantNull(type)); + OpFunctionEnd(); + return func_id; + }}; + const auto define_write{[&](DefPtr ssbo_member, Id element_pointer, Id type, u32 shift) { + const Id function_type{TypeFunction(void_id, U64, type)}; + const Id func_id{OpFunction(void_id, spv::FunctionControlMask::MaskNone, function_type)}; + const Id addr{OpFunctionParameter(U64)}; + const Id data{OpFunctionParameter(type)}; + define_body(ssbo_member, addr, element_pointer, shift, [&](Id ssbo_pointer) { + OpStore(ssbo_pointer, data); + OpReturn(); + }); + OpReturn(); + OpFunctionEnd(); + return func_id; + }}; + const auto define{ + [&](DefPtr ssbo_member, const StorageTypeDefinition& type_def, Id type, size_t size) { + const Id element_type{type_def.element}; + const u32 shift{static_cast(std::countr_zero(size))}; + const Id load_func{define_load(ssbo_member, element_type, type, shift)}; + const Id write_func{define_write(ssbo_member, element_type, type, shift)}; + return std::make_pair(load_func, write_func); + }}; + std::tie(load_global_func_u32, write_global_func_u32) = + define(&StorageDefinitions::U32, storage_types.U32, U32[1], sizeof(u32)); + std::tie(load_global_func_u32x2, write_global_func_u32x2) = + define(&StorageDefinitions::U32x2, storage_types.U32x2, U32[2], sizeof(u32[2])); + std::tie(load_global_func_u32x4, write_global_func_u32x4) = + define(&StorageDefinitions::U32x4, storage_types.U32x4, U32[4], sizeof(u32[4])); +} + +void EmitContext::DefineConstantBuffers(const Info& info, u32& binding) { + if (info.constant_buffer_descriptors.empty()) { + return; + } + if (!profile.support_descriptor_aliasing) { + DefineConstBuffers(*this, info, &UniformDefinitions::U32x4, binding, U32[4], 'u', + sizeof(u32[4])); + for (const ConstantBufferDescriptor& desc : info.constant_buffer_descriptors) { + binding += desc.count; + } + return; + } + IR::Type types{info.used_constant_buffer_types}; + if (True(types & IR::Type::U8)) { + if (profile.support_int8) { + DefineConstBuffers(*this, info, &UniformDefinitions::U8, binding, U8, 'u', sizeof(u8)); + DefineConstBuffers(*this, info, &UniformDefinitions::S8, binding, S8, 's', sizeof(s8)); + } else { + types |= IR::Type::U32; + } + } + if (True(types & IR::Type::U16)) { + if (profile.support_int16) { + DefineConstBuffers(*this, info, &UniformDefinitions::U16, binding, U16, 'u', + sizeof(u16)); + DefineConstBuffers(*this, info, &UniformDefinitions::S16, binding, S16, 's', + sizeof(s16)); + } else { + types |= IR::Type::U32; + } + } + if (True(types & IR::Type::U32)) { + DefineConstBuffers(*this, info, &UniformDefinitions::U32, binding, U32[1], 'u', + sizeof(u32)); + } + if (True(types & IR::Type::F32)) { + DefineConstBuffers(*this, info, &UniformDefinitions::F32, binding, F32[1], 'f', + sizeof(f32)); + } + if (True(types & IR::Type::U32x2)) { + DefineConstBuffers(*this, info, &UniformDefinitions::U32x2, binding, U32[2], 'u', + sizeof(u32[2])); + } + binding += static_cast(info.constant_buffer_descriptors.size()); +} + +void EmitContext::DefineStorageBuffers(const Info& info, u32& binding) { + if (info.storage_buffers_descriptors.empty()) { + return; + } + AddExtension("SPV_KHR_storage_buffer_storage_class"); + + const IR::Type used_types{profile.support_descriptor_aliasing ? info.used_storage_buffer_types + : IR::Type::U32}; + if (profile.support_int8 && True(used_types & IR::Type::U8)) { + DefineSsbos(*this, storage_types.U8, &StorageDefinitions::U8, info, binding, U8, + sizeof(u8)); + DefineSsbos(*this, storage_types.S8, &StorageDefinitions::S8, info, binding, S8, + sizeof(u8)); + } + if (profile.support_int16 && True(used_types & IR::Type::U16)) { + DefineSsbos(*this, storage_types.U16, &StorageDefinitions::U16, info, binding, U16, + sizeof(u16)); + DefineSsbos(*this, storage_types.S16, &StorageDefinitions::S16, info, binding, S16, + sizeof(u16)); + } + if (True(used_types & IR::Type::U32)) { + DefineSsbos(*this, storage_types.U32, &StorageDefinitions::U32, info, binding, U32[1], + sizeof(u32)); + } + if (True(used_types & IR::Type::F32)) { + DefineSsbos(*this, storage_types.F32, &StorageDefinitions::F32, info, binding, F32[1], + sizeof(f32)); + } + if (True(used_types & IR::Type::U64)) { + DefineSsbos(*this, storage_types.U64, &StorageDefinitions::U64, info, binding, U64, + sizeof(u64)); + } + if (True(used_types & IR::Type::U32x2)) { + DefineSsbos(*this, storage_types.U32x2, &StorageDefinitions::U32x2, info, binding, U32[2], + sizeof(u32[2])); + } + if (True(used_types & IR::Type::U32x4)) { + DefineSsbos(*this, storage_types.U32x4, &StorageDefinitions::U32x4, info, binding, U32[4], + sizeof(u32[4])); + } + for (const StorageBufferDescriptor& desc : info.storage_buffers_descriptors) { + binding += desc.count; + } + const bool needs_function{ + info.uses_global_increment || info.uses_global_decrement || info.uses_atomic_f32_add || + info.uses_atomic_f16x2_add || info.uses_atomic_f16x2_min || info.uses_atomic_f16x2_max || + info.uses_atomic_f32x2_add || info.uses_atomic_f32x2_min || info.uses_atomic_f32x2_max}; + if (needs_function) { + AddCapability(spv::Capability::VariablePointersStorageBuffer); + } + if (info.uses_global_increment) { + increment_cas_ssbo = CasLoop(*this, Operation::Increment, storage_types.U32.array, + storage_types.U32.element, U32[1], U32[1], spv::Scope::Device); + } + if (info.uses_global_decrement) { + decrement_cas_ssbo = CasLoop(*this, Operation::Decrement, storage_types.U32.array, + storage_types.U32.element, U32[1], U32[1], spv::Scope::Device); + } + if (info.uses_atomic_f32_add) { + f32_add_cas = CasLoop(*this, Operation::FPAdd, storage_types.U32.array, + storage_types.U32.element, F32[1], U32[1], spv::Scope::Device); + } + if (info.uses_atomic_f16x2_add) { + f16x2_add_cas = CasLoop(*this, Operation::FPAdd, storage_types.U32.array, + storage_types.U32.element, F16[2], F16[2], spv::Scope::Device); + } + if (info.uses_atomic_f16x2_min) { + f16x2_min_cas = CasLoop(*this, Operation::FPMin, storage_types.U32.array, + storage_types.U32.element, F16[2], F16[2], spv::Scope::Device); + } + if (info.uses_atomic_f16x2_max) { + f16x2_max_cas = CasLoop(*this, Operation::FPMax, storage_types.U32.array, + storage_types.U32.element, F16[2], F16[2], spv::Scope::Device); + } + if (info.uses_atomic_f32x2_add) { + f32x2_add_cas = CasLoop(*this, Operation::FPAdd, storage_types.U32.array, + storage_types.U32.element, F32[2], F32[2], spv::Scope::Device); + } + if (info.uses_atomic_f32x2_min) { + f32x2_min_cas = CasLoop(*this, Operation::FPMin, storage_types.U32.array, + storage_types.U32.element, F32[2], F32[2], spv::Scope::Device); + } + if (info.uses_atomic_f32x2_max) { + f32x2_max_cas = CasLoop(*this, Operation::FPMax, storage_types.U32.array, + storage_types.U32.element, F32[2], F32[2], spv::Scope::Device); + } +} + +void EmitContext::DefineTextureBuffers(const Info& info, u32& binding) { + if (info.texture_buffer_descriptors.empty()) { + return; + } + const spv::ImageFormat format{spv::ImageFormat::Unknown}; + image_buffer_type = TypeImage(F32[1], spv::Dim::Buffer, 0U, false, false, 1, format); + sampled_texture_buffer_type = TypeSampledImage(image_buffer_type); + + const Id type{TypePointer(spv::StorageClass::UniformConstant, sampled_texture_buffer_type)}; + texture_buffers.reserve(info.texture_buffer_descriptors.size()); + for (const TextureBufferDescriptor& desc : info.texture_buffer_descriptors) { + if (desc.count != 1) { + throw NotImplementedException("Array of texture buffers"); + } + const Id id{AddGlobalVariable(type, spv::StorageClass::UniformConstant)}; + Decorate(id, spv::Decoration::Binding, binding); + Decorate(id, spv::Decoration::DescriptorSet, 0U); + Name(id, NameOf(stage, desc, "texbuf")); + texture_buffers.push_back({ + .id = id, + .count = desc.count, + }); + if (profile.supported_spirv >= 0x00010400) { + interfaces.push_back(id); + } + ++binding; + } +} + +void EmitContext::DefineImageBuffers(const Info& info, u32& binding) { + image_buffers.reserve(info.image_buffer_descriptors.size()); + for (const ImageBufferDescriptor& desc : info.image_buffer_descriptors) { + if (desc.count != 1) { + throw NotImplementedException("Array of image buffers"); + } + const spv::ImageFormat format{GetImageFormat(desc.format)}; + const Id image_type{TypeImage(U32[1], spv::Dim::Buffer, false, false, false, 2, format)}; + const Id pointer_type{TypePointer(spv::StorageClass::UniformConstant, image_type)}; + const Id id{AddGlobalVariable(pointer_type, spv::StorageClass::UniformConstant)}; + Decorate(id, spv::Decoration::Binding, binding); + Decorate(id, spv::Decoration::DescriptorSet, 0U); + Name(id, NameOf(stage, desc, "imgbuf")); + image_buffers.push_back({ + .id = id, + .image_type = image_type, + .count = desc.count, + }); + if (profile.supported_spirv >= 0x00010400) { + interfaces.push_back(id); + } + ++binding; + } +} + +void EmitContext::DefineTextures(const Info& info, u32& binding) { + textures.reserve(info.texture_descriptors.size()); + for (const TextureDescriptor& desc : info.texture_descriptors) { + const Id image_type{ImageType(*this, desc)}; + const Id sampled_type{TypeSampledImage(image_type)}; + const Id pointer_type{TypePointer(spv::StorageClass::UniformConstant, sampled_type)}; + const Id desc_type{DescType(*this, sampled_type, pointer_type, desc.count)}; + const Id id{AddGlobalVariable(desc_type, spv::StorageClass::UniformConstant)}; + Decorate(id, spv::Decoration::Binding, binding); + Decorate(id, spv::Decoration::DescriptorSet, 0U); + Name(id, NameOf(stage, desc, "tex")); + textures.push_back({ + .id = id, + .sampled_type = sampled_type, + .pointer_type = pointer_type, + .image_type = image_type, + .count = desc.count, + }); + if (profile.supported_spirv >= 0x00010400) { + interfaces.push_back(id); + } + ++binding; + } + if (info.uses_atomic_image_u32) { + image_u32 = TypePointer(spv::StorageClass::Image, U32[1]); + } +} + +void EmitContext::DefineImages(const Info& info, u32& binding) { + images.reserve(info.image_descriptors.size()); + for (const ImageDescriptor& desc : info.image_descriptors) { + if (desc.count != 1) { + throw NotImplementedException("Array of images"); + } + const Id image_type{ImageType(*this, desc)}; + const Id pointer_type{TypePointer(spv::StorageClass::UniformConstant, image_type)}; + const Id id{AddGlobalVariable(pointer_type, spv::StorageClass::UniformConstant)}; + Decorate(id, spv::Decoration::Binding, binding); + Decorate(id, spv::Decoration::DescriptorSet, 0U); + Name(id, NameOf(stage, desc, "img")); + images.push_back({ + .id = id, + .image_type = image_type, + .count = desc.count, + }); + if (profile.supported_spirv >= 0x00010400) { + interfaces.push_back(id); + } + ++binding; + } +} + +void EmitContext::DefineInputs(const IR::Program& program) { + const Info& info{program.info}; + const VaryingState loads{info.loads.mask | info.passthrough.mask}; + + if (info.uses_workgroup_id) { + workgroup_id = DefineInput(*this, U32[3], false, spv::BuiltIn::WorkgroupId); + } + if (info.uses_local_invocation_id) { + local_invocation_id = DefineInput(*this, U32[3], false, spv::BuiltIn::LocalInvocationId); + } + if (info.uses_invocation_id) { + invocation_id = DefineInput(*this, U32[1], false, spv::BuiltIn::InvocationId); + } + if (info.uses_sample_id) { + sample_id = DefineInput(*this, U32[1], false, spv::BuiltIn::SampleId); + } + if (info.uses_is_helper_invocation) { + is_helper_invocation = DefineInput(*this, U1, false, spv::BuiltIn::HelperInvocation); + } + if (info.uses_subgroup_mask) { + subgroup_mask_eq = DefineInput(*this, U32[4], false, spv::BuiltIn::SubgroupEqMaskKHR); + subgroup_mask_lt = DefineInput(*this, U32[4], false, spv::BuiltIn::SubgroupLtMaskKHR); + subgroup_mask_le = DefineInput(*this, U32[4], false, spv::BuiltIn::SubgroupLeMaskKHR); + subgroup_mask_gt = DefineInput(*this, U32[4], false, spv::BuiltIn::SubgroupGtMaskKHR); + subgroup_mask_ge = DefineInput(*this, U32[4], false, spv::BuiltIn::SubgroupGeMaskKHR); + } + if (info.uses_subgroup_invocation_id || info.uses_subgroup_shuffles || + (profile.warp_size_potentially_larger_than_guest && + (info.uses_subgroup_vote || info.uses_subgroup_mask))) { + subgroup_local_invocation_id = + DefineInput(*this, U32[1], false, spv::BuiltIn::SubgroupLocalInvocationId); + } + if (info.uses_fswzadd) { + const Id f32_one{Const(1.0f)}; + const Id f32_minus_one{Const(-1.0f)}; + const Id f32_zero{Const(0.0f)}; + fswzadd_lut_a = ConstantComposite(F32[4], f32_minus_one, f32_one, f32_minus_one, f32_zero); + fswzadd_lut_b = + ConstantComposite(F32[4], f32_minus_one, f32_minus_one, f32_one, f32_minus_one); + } + if (loads[IR::Attribute::PrimitiveId]) { + primitive_id = DefineInput(*this, U32[1], false, spv::BuiltIn::PrimitiveId); + } + if (loads.AnyComponent(IR::Attribute::PositionX)) { + const bool is_fragment{stage != Stage::Fragment}; + const spv::BuiltIn built_in{is_fragment ? spv::BuiltIn::Position : spv::BuiltIn::FragCoord}; + input_position = DefineInput(*this, F32[4], true, built_in); + if (profile.support_geometry_shader_passthrough) { + if (info.passthrough.AnyComponent(IR::Attribute::PositionX)) { + Decorate(input_position, spv::Decoration::PassthroughNV); + } + } + } + if (loads[IR::Attribute::InstanceId]) { + if (profile.support_vertex_instance_id) { + instance_id = DefineInput(*this, U32[1], true, spv::BuiltIn::InstanceId); + } else { + instance_index = DefineInput(*this, U32[1], true, spv::BuiltIn::InstanceIndex); + base_instance = DefineInput(*this, U32[1], true, spv::BuiltIn::BaseInstance); + } + } + if (loads[IR::Attribute::VertexId]) { + if (profile.support_vertex_instance_id) { + vertex_id = DefineInput(*this, U32[1], true, spv::BuiltIn::VertexId); + } else { + vertex_index = DefineInput(*this, U32[1], true, spv::BuiltIn::VertexIndex); + base_vertex = DefineInput(*this, U32[1], true, spv::BuiltIn::BaseVertex); + } + } + if (loads[IR::Attribute::FrontFace]) { + front_face = DefineInput(*this, U1, true, spv::BuiltIn::FrontFacing); + } + if (loads[IR::Attribute::PointSpriteS] || loads[IR::Attribute::PointSpriteT]) { + point_coord = DefineInput(*this, F32[2], true, spv::BuiltIn::PointCoord); + } + if (loads[IR::Attribute::TessellationEvaluationPointU] || + loads[IR::Attribute::TessellationEvaluationPointV]) { + tess_coord = DefineInput(*this, F32[3], false, spv::BuiltIn::TessCoord); + } + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + const AttributeType input_type{runtime_info.generic_input_types[index]}; + if (!runtime_info.previous_stage_stores.Generic(index)) { + continue; + } + if (!loads.Generic(index)) { + continue; + } + if (input_type == AttributeType::Disabled) { + continue; + } + const Id type{GetAttributeType(*this, input_type)}; + const Id id{DefineInput(*this, type, true)}; + Decorate(id, spv::Decoration::Location, static_cast(index)); + Name(id, fmt::format("in_attr{}", index)); + input_generics[index] = id; + + if (info.passthrough.Generic(index) && profile.support_geometry_shader_passthrough) { + Decorate(id, spv::Decoration::PassthroughNV); + } + if (stage != Stage::Fragment) { + continue; + } + switch (info.interpolation[index]) { + case Interpolation::Smooth: + // Default + // Decorate(id, spv::Decoration::Smooth); + break; + case Interpolation::NoPerspective: + Decorate(id, spv::Decoration::NoPerspective); + break; + case Interpolation::Flat: + Decorate(id, spv::Decoration::Flat); + break; + } + } + if (stage == Stage::TessellationEval) { + for (size_t index = 0; index < info.uses_patches.size(); ++index) { + if (!info.uses_patches[index]) { + continue; + } + const Id id{DefineInput(*this, F32[4], false)}; + Decorate(id, spv::Decoration::Patch); + Decorate(id, spv::Decoration::Location, static_cast(index)); + patches[index] = id; + } + } +} + +void EmitContext::DefineOutputs(const IR::Program& program) { + const Info& info{program.info}; + const std::optional invocations{program.invocations}; + if (info.stores.AnyComponent(IR::Attribute::PositionX) || stage == Stage::VertexB) { + output_position = DefineOutput(*this, F32[4], invocations, spv::BuiltIn::Position); + } + if (info.stores[IR::Attribute::PointSize] || runtime_info.fixed_state_point_size) { + if (stage == Stage::Fragment) { + throw NotImplementedException("Storing PointSize in fragment stage"); + } + output_point_size = DefineOutput(*this, F32[1], invocations, spv::BuiltIn::PointSize); + } + if (info.stores.ClipDistances()) { + if (stage == Stage::Fragment) { + throw NotImplementedException("Storing ClipDistance in fragment stage"); + } + const Id type{TypeArray(F32[1], Const(8U))}; + clip_distances = DefineOutput(*this, type, invocations, spv::BuiltIn::ClipDistance); + } + if (info.stores[IR::Attribute::Layer] && + (profile.support_viewport_index_layer_non_geometry || stage == Stage::Geometry)) { + if (stage == Stage::Fragment) { + throw NotImplementedException("Storing Layer in fragment stage"); + } + layer = DefineOutput(*this, U32[1], invocations, spv::BuiltIn::Layer); + } + if (info.stores[IR::Attribute::ViewportIndex] && + (profile.support_viewport_index_layer_non_geometry || stage == Stage::Geometry)) { + if (stage == Stage::Fragment) { + throw NotImplementedException("Storing ViewportIndex in fragment stage"); + } + viewport_index = DefineOutput(*this, U32[1], invocations, spv::BuiltIn::ViewportIndex); + } + if (info.stores[IR::Attribute::ViewportMask] && profile.support_viewport_mask) { + viewport_mask = DefineOutput(*this, TypeArray(U32[1], Const(1u)), std::nullopt, + spv::BuiltIn::ViewportMaskNV); + } + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + if (info.stores.Generic(index)) { + DefineGenericOutput(*this, index, invocations); + } + } + switch (stage) { + case Stage::TessellationControl: + if (info.stores_tess_level_outer) { + const Id type{TypeArray(F32[1], Const(4U))}; + output_tess_level_outer = + DefineOutput(*this, type, std::nullopt, spv::BuiltIn::TessLevelOuter); + Decorate(output_tess_level_outer, spv::Decoration::Patch); + } + if (info.stores_tess_level_inner) { + const Id type{TypeArray(F32[1], Const(2U))}; + output_tess_level_inner = + DefineOutput(*this, type, std::nullopt, spv::BuiltIn::TessLevelInner); + Decorate(output_tess_level_inner, spv::Decoration::Patch); + } + for (size_t index = 0; index < info.uses_patches.size(); ++index) { + if (!info.uses_patches[index]) { + continue; + } + const Id id{DefineOutput(*this, F32[4], std::nullopt)}; + Decorate(id, spv::Decoration::Patch); + Decorate(id, spv::Decoration::Location, static_cast(index)); + patches[index] = id; + } + break; + case Stage::Fragment: + for (u32 index = 0; index < 8; ++index) { + if (!info.stores_frag_color[index] && !profile.need_declared_frag_colors) { + continue; + } + frag_color[index] = DefineOutput(*this, F32[4], std::nullopt); + Decorate(frag_color[index], spv::Decoration::Location, index); + Name(frag_color[index], fmt::format("frag_color{}", index)); + } + if (info.stores_frag_depth) { + frag_depth = DefineOutput(*this, F32[1], std::nullopt); + Decorate(frag_depth, spv::Decoration::BuiltIn, spv::BuiltIn::FragDepth); + } + if (info.stores_sample_mask) { + sample_mask = DefineOutput(*this, U32[1], std::nullopt); + Decorate(sample_mask, spv::Decoration::BuiltIn, spv::BuiltIn::SampleMask); + } + break; + default: + break; + } +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_context.h b/src/shader_recompiler/backend/spirv/emit_context.h new file mode 100755 index 000000000..e277bc358 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_context.h @@ -0,0 +1,307 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include + +#include "shader_recompiler/backend/bindings.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/profile.h" +#include "shader_recompiler/runtime_info.h" +#include "shader_recompiler/shader_info.h" + +namespace Shader::Backend::SPIRV { + +using Sirit::Id; + +class VectorTypes { +public: + void Define(Sirit::Module& sirit_ctx, Id base_type, std::string_view name); + + [[nodiscard]] Id operator[](size_t size) const noexcept { + return defs[size - 1]; + } + +private: + std::array defs{}; +}; + +struct TextureDefinition { + Id id; + Id sampled_type; + Id pointer_type; + Id image_type; + u32 count; +}; + +struct TextureBufferDefinition { + Id id; + u32 count; +}; + +struct ImageBufferDefinition { + Id id; + Id image_type; + u32 count; +}; + +struct ImageDefinition { + Id id; + Id image_type; + u32 count; +}; + +struct UniformDefinitions { + Id U8{}; + Id S8{}; + Id U16{}; + Id S16{}; + Id U32{}; + Id F32{}; + Id U32x2{}; + Id U32x4{}; +}; + +struct StorageTypeDefinition { + Id array{}; + Id element{}; +}; + +struct StorageTypeDefinitions { + StorageTypeDefinition U8{}; + StorageTypeDefinition S8{}; + StorageTypeDefinition U16{}; + StorageTypeDefinition S16{}; + StorageTypeDefinition U32{}; + StorageTypeDefinition U64{}; + StorageTypeDefinition F32{}; + StorageTypeDefinition U32x2{}; + StorageTypeDefinition U32x4{}; +}; + +struct StorageDefinitions { + Id U8{}; + Id S8{}; + Id U16{}; + Id S16{}; + Id U32{}; + Id F32{}; + Id U64{}; + Id U32x2{}; + Id U32x4{}; +}; + +struct GenericElementInfo { + Id id{}; + u32 first_element{}; + u32 num_components{}; +}; + +class EmitContext final : public Sirit::Module { +public: + explicit EmitContext(const Profile& profile, const RuntimeInfo& runtime_info, + IR::Program& program, Bindings& binding); + ~EmitContext(); + + [[nodiscard]] Id Def(const IR::Value& value); + + [[nodiscard]] Id BitOffset8(const IR::Value& offset); + [[nodiscard]] Id BitOffset16(const IR::Value& offset); + + Id Const(u32 value) { + return Constant(U32[1], value); + } + + Id Const(u32 element_1, u32 element_2) { + return ConstantComposite(U32[2], Const(element_1), Const(element_2)); + } + + Id Const(u32 element_1, u32 element_2, u32 element_3) { + return ConstantComposite(U32[3], Const(element_1), Const(element_2), Const(element_3)); + } + + Id Const(u32 element_1, u32 element_2, u32 element_3, u32 element_4) { + return ConstantComposite(U32[4], Const(element_1), Const(element_2), Const(element_3), + Const(element_4)); + } + + Id SConst(s32 value) { + return Constant(S32[1], value); + } + + Id SConst(s32 element_1, s32 element_2) { + return ConstantComposite(S32[2], SConst(element_1), SConst(element_2)); + } + + Id SConst(s32 element_1, s32 element_2, s32 element_3) { + return ConstantComposite(S32[3], SConst(element_1), SConst(element_2), SConst(element_3)); + } + + Id SConst(s32 element_1, s32 element_2, s32 element_3, s32 element_4) { + return ConstantComposite(S32[4], SConst(element_1), SConst(element_2), SConst(element_3), + SConst(element_4)); + } + + Id Const(f32 value) { + return Constant(F32[1], value); + } + + const Profile& profile; + const RuntimeInfo& runtime_info; + Stage stage{}; + + Id void_id{}; + Id U1{}; + Id U8{}; + Id S8{}; + Id U16{}; + Id S16{}; + Id U64{}; + VectorTypes F32; + VectorTypes U32; + VectorTypes S32; + VectorTypes F16; + VectorTypes F64; + + Id true_value{}; + Id false_value{}; + Id u32_zero_value{}; + Id f32_zero_value{}; + + UniformDefinitions uniform_types; + StorageTypeDefinitions storage_types; + + Id private_u32{}; + + Id shared_u8{}; + Id shared_u16{}; + Id shared_u32{}; + Id shared_u64{}; + Id shared_u32x2{}; + Id shared_u32x4{}; + + Id input_f32{}; + Id input_u32{}; + Id input_s32{}; + + Id output_f32{}; + Id output_u32{}; + + Id image_buffer_type{}; + Id sampled_texture_buffer_type{}; + Id image_u32{}; + + std::array cbufs{}; + std::array ssbos{}; + std::vector texture_buffers; + std::vector image_buffers; + std::vector textures; + std::vector images; + + Id workgroup_id{}; + Id local_invocation_id{}; + Id invocation_id{}; + Id sample_id{}; + Id is_helper_invocation{}; + Id subgroup_local_invocation_id{}; + Id subgroup_mask_eq{}; + Id subgroup_mask_lt{}; + Id subgroup_mask_le{}; + Id subgroup_mask_gt{}; + Id subgroup_mask_ge{}; + Id instance_id{}; + Id instance_index{}; + Id base_instance{}; + Id vertex_id{}; + Id vertex_index{}; + Id base_vertex{}; + Id front_face{}; + Id point_coord{}; + Id tess_coord{}; + Id clip_distances{}; + Id layer{}; + Id viewport_index{}; + Id viewport_mask{}; + Id primitive_id{}; + + Id fswzadd_lut_a{}; + Id fswzadd_lut_b{}; + + Id indexed_load_func{}; + Id indexed_store_func{}; + + Id local_memory{}; + + Id shared_memory_u8{}; + Id shared_memory_u16{}; + Id shared_memory_u32{}; + Id shared_memory_u64{}; + Id shared_memory_u32x2{}; + Id shared_memory_u32x4{}; + + Id shared_memory_u32_type{}; + + Id shared_store_u8_func{}; + Id shared_store_u16_func{}; + Id increment_cas_shared{}; + Id increment_cas_ssbo{}; + Id decrement_cas_shared{}; + Id decrement_cas_ssbo{}; + Id f32_add_cas{}; + Id f16x2_add_cas{}; + Id f16x2_min_cas{}; + Id f16x2_max_cas{}; + Id f32x2_add_cas{}; + Id f32x2_min_cas{}; + Id f32x2_max_cas{}; + + Id load_global_func_u32{}; + Id load_global_func_u32x2{}; + Id load_global_func_u32x4{}; + Id write_global_func_u32{}; + Id write_global_func_u32x2{}; + Id write_global_func_u32x4{}; + + Id input_position{}; + std::array input_generics{}; + + Id output_point_size{}; + Id output_position{}; + std::array, 32> output_generics{}; + + Id output_tess_level_outer{}; + Id output_tess_level_inner{}; + std::array patches{}; + + std::array frag_color{}; + Id sample_mask{}; + Id frag_depth{}; + + std::vector interfaces; + +private: + void DefineCommonTypes(const Info& info); + void DefineCommonConstants(); + void DefineInterfaces(const IR::Program& program); + void DefineLocalMemory(const IR::Program& program); + void DefineSharedMemory(const IR::Program& program); + void DefineSharedMemoryFunctions(const IR::Program& program); + void DefineConstantBuffers(const Info& info, u32& binding); + void DefineStorageBuffers(const Info& info, u32& binding); + void DefineTextureBuffers(const Info& info, u32& binding); + void DefineImageBuffers(const Info& info, u32& binding); + void DefineTextures(const Info& info, u32& binding); + void DefineImages(const Info& info, u32& binding); + void DefineAttributeMemAccess(const Info& info); + void DefineGlobalMemoryFunctions(const Info& info); + + void DefineInputs(const IR::Program& program); + void DefineOutputs(const IR::Program& program); +}; + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv.cpp b/src/shader_recompiler/backend/spirv/emit_spirv.cpp new file mode 100755 index 000000000..d7a86e270 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv.cpp @@ -0,0 +1,541 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include + +#include "common/settings.h" +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/program.h" + +namespace Shader::Backend::SPIRV { +namespace { +template +struct FuncTraits {}; + +template +struct FuncTraits { + using ReturnType = ReturnType_; + + static constexpr size_t NUM_ARGS = sizeof...(Args); + + template + using ArgType = std::tuple_element_t>; +}; + +template +void SetDefinition(EmitContext& ctx, IR::Inst* inst, Args... args) { + inst->SetDefinition(func(ctx, std::forward(args)...)); +} + +template +ArgType Arg(EmitContext& ctx, const IR::Value& arg) { + if constexpr (std::is_same_v) { + return ctx.Def(arg); + } else if constexpr (std::is_same_v) { + return arg; + } else if constexpr (std::is_same_v) { + return arg.U32(); + } else if constexpr (std::is_same_v) { + return arg.Attribute(); + } else if constexpr (std::is_same_v) { + return arg.Patch(); + } else if constexpr (std::is_same_v) { + return arg.Reg(); + } +} + +template +void Invoke(EmitContext& ctx, IR::Inst* inst, std::index_sequence) { + using Traits = FuncTraits; + if constexpr (std::is_same_v) { + if constexpr (is_first_arg_inst) { + SetDefinition( + ctx, inst, inst, + Arg>(ctx, inst->Arg(I))...); + } else { + SetDefinition( + ctx, inst, Arg>(ctx, inst->Arg(I))...); + } + } else { + if constexpr (is_first_arg_inst) { + func(ctx, inst, Arg>(ctx, inst->Arg(I))...); + } else { + func(ctx, Arg>(ctx, inst->Arg(I))...); + } + } +} + +template +void Invoke(EmitContext& ctx, IR::Inst* inst) { + using Traits = FuncTraits; + static_assert(Traits::NUM_ARGS >= 1, "Insufficient arguments"); + if constexpr (Traits::NUM_ARGS == 1) { + Invoke(ctx, inst, std::make_index_sequence<0>{}); + } else { + using FirstArgType = typename Traits::template ArgType<1>; + static constexpr bool is_first_arg_inst = std::is_same_v; + using Indices = std::make_index_sequence; + Invoke(ctx, inst, Indices{}); + } +} + +void EmitInst(EmitContext& ctx, IR::Inst* inst) { + switch (inst->GetOpcode()) { +#define OPCODE(name, result_type, ...) \ + case IR::Opcode::name: \ + return Invoke<&Emit##name>(ctx, inst); +#include "shader_recompiler/frontend/ir/opcodes.inc" +#undef OPCODE + } + throw LogicError("Invalid opcode {}", inst->GetOpcode()); +} + +Id TypeId(const EmitContext& ctx, IR::Type type) { + switch (type) { + case IR::Type::U1: + return ctx.U1; + case IR::Type::U32: + return ctx.U32[1]; + default: + throw NotImplementedException("Phi node type {}", type); + } +} + +void Traverse(EmitContext& ctx, IR::Program& program) { + IR::Block* current_block{}; + for (const IR::AbstractSyntaxNode& node : program.syntax_list) { + switch (node.type) { + case IR::AbstractSyntaxNode::Type::Block: { + const Id label{node.data.block->Definition()}; + if (current_block) { + ctx.OpBranch(label); + } + current_block = node.data.block; + ctx.AddLabel(label); + for (IR::Inst& inst : node.data.block->Instructions()) { + EmitInst(ctx, &inst); + } + break; + } + case IR::AbstractSyntaxNode::Type::If: { + const Id if_label{node.data.if_node.body->Definition()}; + const Id endif_label{node.data.if_node.merge->Definition()}; + ctx.OpSelectionMerge(endif_label, spv::SelectionControlMask::MaskNone); + ctx.OpBranchConditional(ctx.Def(node.data.if_node.cond), if_label, endif_label); + break; + } + case IR::AbstractSyntaxNode::Type::Loop: { + const Id body_label{node.data.loop.body->Definition()}; + const Id continue_label{node.data.loop.continue_block->Definition()}; + const Id endloop_label{node.data.loop.merge->Definition()}; + + ctx.OpLoopMerge(endloop_label, continue_label, spv::LoopControlMask::MaskNone); + ctx.OpBranch(body_label); + break; + } + case IR::AbstractSyntaxNode::Type::Break: { + const Id break_label{node.data.break_node.merge->Definition()}; + const Id skip_label{node.data.break_node.skip->Definition()}; + ctx.OpBranchConditional(ctx.Def(node.data.break_node.cond), break_label, skip_label); + break; + } + case IR::AbstractSyntaxNode::Type::EndIf: + if (current_block) { + ctx.OpBranch(node.data.end_if.merge->Definition()); + } + break; + case IR::AbstractSyntaxNode::Type::Repeat: { + Id cond{ctx.Def(node.data.repeat.cond)}; + if (!Settings::values.disable_shader_loop_safety_checks) { + const Id pointer_type{ctx.TypePointer(spv::StorageClass::Private, ctx.U32[1])}; + const Id safety_counter{ctx.AddGlobalVariable( + pointer_type, spv::StorageClass::Private, ctx.Const(0x2000u))}; + if (ctx.profile.supported_spirv >= 0x00010400) { + ctx.interfaces.push_back(safety_counter); + } + const Id old_counter{ctx.OpLoad(ctx.U32[1], safety_counter)}; + const Id new_counter{ctx.OpISub(ctx.U32[1], old_counter, ctx.Const(1u))}; + ctx.OpStore(safety_counter, new_counter); + + const Id safety_cond{ + ctx.OpSGreaterThanEqual(ctx.U1, new_counter, ctx.u32_zero_value)}; + cond = ctx.OpLogicalAnd(ctx.U1, cond, safety_cond); + } + const Id loop_header_label{node.data.repeat.loop_header->Definition()}; + const Id merge_label{node.data.repeat.merge->Definition()}; + ctx.OpBranchConditional(cond, loop_header_label, merge_label); + break; + } + case IR::AbstractSyntaxNode::Type::Return: + ctx.OpReturn(); + break; + case IR::AbstractSyntaxNode::Type::Unreachable: + ctx.OpUnreachable(); + break; + } + if (node.type != IR::AbstractSyntaxNode::Type::Block) { + current_block = nullptr; + } + } +} + +Id DefineMain(EmitContext& ctx, IR::Program& program) { + const Id void_function{ctx.TypeFunction(ctx.void_id)}; + const Id main{ctx.OpFunction(ctx.void_id, spv::FunctionControlMask::MaskNone, void_function)}; + for (IR::Block* const block : program.blocks) { + block->SetDefinition(ctx.OpLabel()); + } + Traverse(ctx, program); + ctx.OpFunctionEnd(); + return main; +} + +spv::ExecutionMode ExecutionMode(TessPrimitive primitive) { + switch (primitive) { + case TessPrimitive::Isolines: + return spv::ExecutionMode::Isolines; + case TessPrimitive::Triangles: + return spv::ExecutionMode::Triangles; + case TessPrimitive::Quads: + return spv::ExecutionMode::Quads; + } + throw InvalidArgument("Tessellation primitive {}", primitive); +} + +spv::ExecutionMode ExecutionMode(TessSpacing spacing) { + switch (spacing) { + case TessSpacing::Equal: + return spv::ExecutionMode::SpacingEqual; + case TessSpacing::FractionalOdd: + return spv::ExecutionMode::SpacingFractionalOdd; + case TessSpacing::FractionalEven: + return spv::ExecutionMode::SpacingFractionalEven; + } + throw InvalidArgument("Tessellation spacing {}", spacing); +} + +void DefineEntryPoint(const IR::Program& program, EmitContext& ctx, Id main) { + const std::span interfaces(ctx.interfaces.data(), ctx.interfaces.size()); + spv::ExecutionModel execution_model{}; + switch (program.stage) { + case Stage::Compute: { + const std::array workgroup_size{program.workgroup_size}; + execution_model = spv::ExecutionModel::GLCompute; + ctx.AddExecutionMode(main, spv::ExecutionMode::LocalSize, workgroup_size[0], + workgroup_size[1], workgroup_size[2]); + break; + } + case Stage::VertexB: + execution_model = spv::ExecutionModel::Vertex; + break; + case Stage::TessellationControl: + execution_model = spv::ExecutionModel::TessellationControl; + ctx.AddCapability(spv::Capability::Tessellation); + ctx.AddExecutionMode(main, spv::ExecutionMode::OutputVertices, program.invocations); + break; + case Stage::TessellationEval: + execution_model = spv::ExecutionModel::TessellationEvaluation; + ctx.AddCapability(spv::Capability::Tessellation); + ctx.AddExecutionMode(main, ExecutionMode(ctx.runtime_info.tess_primitive)); + ctx.AddExecutionMode(main, ExecutionMode(ctx.runtime_info.tess_spacing)); + ctx.AddExecutionMode(main, ctx.runtime_info.tess_clockwise + ? spv::ExecutionMode::VertexOrderCw + : spv::ExecutionMode::VertexOrderCcw); + break; + case Stage::Geometry: + execution_model = spv::ExecutionModel::Geometry; + ctx.AddCapability(spv::Capability::Geometry); + ctx.AddCapability(spv::Capability::GeometryStreams); + switch (ctx.runtime_info.input_topology) { + case InputTopology::Points: + ctx.AddExecutionMode(main, spv::ExecutionMode::InputPoints); + break; + case InputTopology::Lines: + ctx.AddExecutionMode(main, spv::ExecutionMode::InputLines); + break; + case InputTopology::LinesAdjacency: + ctx.AddExecutionMode(main, spv::ExecutionMode::InputLinesAdjacency); + break; + case InputTopology::Triangles: + ctx.AddExecutionMode(main, spv::ExecutionMode::Triangles); + break; + case InputTopology::TrianglesAdjacency: + ctx.AddExecutionMode(main, spv::ExecutionMode::InputTrianglesAdjacency); + break; + } + switch (program.output_topology) { + case OutputTopology::PointList: + ctx.AddExecutionMode(main, spv::ExecutionMode::OutputPoints); + break; + case OutputTopology::LineStrip: + ctx.AddExecutionMode(main, spv::ExecutionMode::OutputLineStrip); + break; + case OutputTopology::TriangleStrip: + ctx.AddExecutionMode(main, spv::ExecutionMode::OutputTriangleStrip); + break; + } + if (program.info.stores[IR::Attribute::PointSize]) { + ctx.AddCapability(spv::Capability::GeometryPointSize); + } + ctx.AddExecutionMode(main, spv::ExecutionMode::OutputVertices, program.output_vertices); + ctx.AddExecutionMode(main, spv::ExecutionMode::Invocations, program.invocations); + if (program.is_geometry_passthrough) { + if (ctx.profile.support_geometry_shader_passthrough) { + ctx.AddExtension("SPV_NV_geometry_shader_passthrough"); + ctx.AddCapability(spv::Capability::GeometryShaderPassthroughNV); + } else { + LOG_WARNING(Shader_SPIRV, "Geometry shader passthrough used with no support"); + } + } + break; + case Stage::Fragment: + execution_model = spv::ExecutionModel::Fragment; + if (ctx.profile.lower_left_origin_mode) { + ctx.AddExecutionMode(main, spv::ExecutionMode::OriginLowerLeft); + } else { + ctx.AddExecutionMode(main, spv::ExecutionMode::OriginUpperLeft); + } + if (program.info.stores_frag_depth) { + ctx.AddExecutionMode(main, spv::ExecutionMode::DepthReplacing); + } + if (ctx.runtime_info.force_early_z) { + ctx.AddExecutionMode(main, spv::ExecutionMode::EarlyFragmentTests); + } + break; + default: + throw NotImplementedException("Stage {}", program.stage); + } + ctx.AddEntryPoint(execution_model, main, "main", interfaces); +} + +void SetupDenormControl(const Profile& profile, const IR::Program& program, EmitContext& ctx, + Id main_func) { + const Info& info{program.info}; + if (info.uses_fp32_denorms_flush && info.uses_fp32_denorms_preserve) { + LOG_DEBUG(Shader_SPIRV, "Fp32 denorm flush and preserve on the same shader"); + } else if (info.uses_fp32_denorms_flush) { + if (profile.support_fp32_denorm_flush) { + ctx.AddCapability(spv::Capability::DenormFlushToZero); + ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormFlushToZero, 32U); + } else { + // Drivers will most likely flush denorms by default, no need to warn + } + } else if (info.uses_fp32_denorms_preserve) { + if (profile.support_fp32_denorm_preserve) { + ctx.AddCapability(spv::Capability::DenormPreserve); + ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormPreserve, 32U); + } else { + LOG_DEBUG(Shader_SPIRV, "Fp32 denorm preserve used in shader without host support"); + } + } + if (!profile.support_separate_denorm_behavior || profile.has_broken_fp16_float_controls) { + // No separate denorm behavior + return; + } + if (info.uses_fp16_denorms_flush && info.uses_fp16_denorms_preserve) { + LOG_DEBUG(Shader_SPIRV, "Fp16 denorm flush and preserve on the same shader"); + } else if (info.uses_fp16_denorms_flush) { + if (profile.support_fp16_denorm_flush) { + ctx.AddCapability(spv::Capability::DenormFlushToZero); + ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormFlushToZero, 16U); + } else { + // Same as fp32, no need to warn as most drivers will flush by default + } + } else if (info.uses_fp16_denorms_preserve) { + if (profile.support_fp16_denorm_preserve) { + ctx.AddCapability(spv::Capability::DenormPreserve); + ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormPreserve, 16U); + } else { + LOG_DEBUG(Shader_SPIRV, "Fp16 denorm preserve used in shader without host support"); + } + } +} + +void SetupSignedNanCapabilities(const Profile& profile, const IR::Program& program, + EmitContext& ctx, Id main_func) { + if (profile.has_broken_fp16_float_controls && program.info.uses_fp16) { + return; + } + if (program.info.uses_fp16 && profile.support_fp16_signed_zero_nan_preserve) { + ctx.AddCapability(spv::Capability::SignedZeroInfNanPreserve); + ctx.AddExecutionMode(main_func, spv::ExecutionMode::SignedZeroInfNanPreserve, 16U); + } + if (profile.support_fp32_signed_zero_nan_preserve) { + ctx.AddCapability(spv::Capability::SignedZeroInfNanPreserve); + ctx.AddExecutionMode(main_func, spv::ExecutionMode::SignedZeroInfNanPreserve, 32U); + } + if (program.info.uses_fp64 && profile.support_fp64_signed_zero_nan_preserve) { + ctx.AddCapability(spv::Capability::SignedZeroInfNanPreserve); + ctx.AddExecutionMode(main_func, spv::ExecutionMode::SignedZeroInfNanPreserve, 64U); + } +} + +void SetupCapabilities(const Profile& profile, const Info& info, EmitContext& ctx) { + if (info.uses_sampled_1d) { + ctx.AddCapability(spv::Capability::Sampled1D); + } + if (info.uses_sparse_residency) { + ctx.AddCapability(spv::Capability::SparseResidency); + } + if (info.uses_demote_to_helper_invocation && profile.support_demote_to_helper_invocation) { + ctx.AddExtension("SPV_EXT_demote_to_helper_invocation"); + ctx.AddCapability(spv::Capability::DemoteToHelperInvocationEXT); + } + if (info.stores[IR::Attribute::ViewportIndex]) { + ctx.AddCapability(spv::Capability::MultiViewport); + } + if (info.stores[IR::Attribute::ViewportMask] && profile.support_viewport_mask) { + ctx.AddExtension("SPV_NV_viewport_array2"); + ctx.AddCapability(spv::Capability::ShaderViewportMaskNV); + } + if (info.stores[IR::Attribute::Layer] || info.stores[IR::Attribute::ViewportIndex]) { + if (profile.support_viewport_index_layer_non_geometry && ctx.stage != Stage::Geometry) { + ctx.AddExtension("SPV_EXT_shader_viewport_index_layer"); + ctx.AddCapability(spv::Capability::ShaderViewportIndexLayerEXT); + } + } + if (!profile.support_vertex_instance_id && + (info.loads[IR::Attribute::InstanceId] || info.loads[IR::Attribute::VertexId])) { + ctx.AddExtension("SPV_KHR_shader_draw_parameters"); + ctx.AddCapability(spv::Capability::DrawParameters); + } + if ((info.uses_subgroup_vote || info.uses_subgroup_invocation_id || + info.uses_subgroup_shuffles) && + profile.support_vote) { + ctx.AddExtension("SPV_KHR_shader_ballot"); + ctx.AddCapability(spv::Capability::SubgroupBallotKHR); + if (!profile.warp_size_potentially_larger_than_guest) { + // vote ops are only used when not taking the long path + ctx.AddExtension("SPV_KHR_subgroup_vote"); + ctx.AddCapability(spv::Capability::SubgroupVoteKHR); + } + } + if (info.uses_int64_bit_atomics && profile.support_int64_atomics) { + ctx.AddCapability(spv::Capability::Int64Atomics); + } + if (info.uses_typeless_image_reads && profile.support_typeless_image_loads) { + ctx.AddCapability(spv::Capability::StorageImageReadWithoutFormat); + } + if (info.uses_typeless_image_writes) { + ctx.AddCapability(spv::Capability::StorageImageWriteWithoutFormat); + } + if (info.uses_image_buffers) { + ctx.AddCapability(spv::Capability::ImageBuffer); + } + if (info.uses_sample_id) { + ctx.AddCapability(spv::Capability::SampleRateShading); + } + if (!ctx.runtime_info.xfb_varyings.empty()) { + ctx.AddCapability(spv::Capability::TransformFeedback); + } + if (info.uses_derivatives) { + ctx.AddCapability(spv::Capability::DerivativeControl); + } + // TODO: Track this usage + ctx.AddCapability(spv::Capability::ImageGatherExtended); + ctx.AddCapability(spv::Capability::ImageQuery); + ctx.AddCapability(spv::Capability::SampledBuffer); +} + +void PatchPhiNodes(IR::Program& program, EmitContext& ctx) { + auto inst{program.blocks.front()->begin()}; + size_t block_index{0}; + ctx.PatchDeferredPhi([&](size_t phi_arg) { + if (phi_arg == 0) { + ++inst; + if (inst == program.blocks[block_index]->end() || + inst->GetOpcode() != IR::Opcode::Phi) { + do { + ++block_index; + inst = program.blocks[block_index]->begin(); + } while (inst->GetOpcode() != IR::Opcode::Phi); + } + } + return ctx.Def(inst->Arg(phi_arg)); + }); +} +} // Anonymous namespace + +std::vector EmitSPIRV(const Profile& profile, const RuntimeInfo& runtime_info, + IR::Program& program, Bindings& bindings) { + EmitContext ctx{profile, runtime_info, program, bindings}; + const Id main{DefineMain(ctx, program)}; + DefineEntryPoint(program, ctx, main); + if (profile.support_float_controls) { + ctx.AddExtension("SPV_KHR_float_controls"); + SetupDenormControl(profile, program, ctx, main); + SetupSignedNanCapabilities(profile, program, ctx, main); + } + SetupCapabilities(profile, program.info, ctx); + PatchPhiNodes(program, ctx); + return ctx.Assemble(); +} + +Id EmitPhi(EmitContext& ctx, IR::Inst* inst) { + const size_t num_args{inst->NumArgs()}; + boost::container::small_vector blocks; + blocks.reserve(num_args); + for (size_t index = 0; index < num_args; ++index) { + blocks.push_back(inst->PhiBlock(index)->Definition()); + } + // The type of a phi instruction is stored in its flags + const Id result_type{TypeId(ctx, inst->Flags())}; + return ctx.DeferredOpPhi(result_type, std::span(blocks.data(), blocks.size())); +} + +void EmitVoid(EmitContext&) {} + +Id EmitIdentity(EmitContext& ctx, const IR::Value& value) { + const Id id{ctx.Def(value)}; + if (!Sirit::ValidId(id)) { + throw NotImplementedException("Forward identity declaration"); + } + return id; +} + +Id EmitConditionRef(EmitContext& ctx, const IR::Value& value) { + const Id id{ctx.Def(value)}; + if (!Sirit::ValidId(id)) { + throw NotImplementedException("Forward identity declaration"); + } + return id; +} + +void EmitReference(EmitContext&) {} + +void EmitPhiMove(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitGetZeroFromOp(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitGetSignFromOp(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitGetCarryFromOp(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitGetOverflowFromOp(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitGetSparseFromOp(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitGetInBoundsFromOp(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv.h b/src/shader_recompiler/backend/spirv/emit_spirv.h new file mode 100755 index 000000000..db0c935fe --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv.h @@ -0,0 +1,27 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include + +#include "common/common_types.h" +#include "shader_recompiler/backend/bindings.h" +#include "shader_recompiler/backend/spirv/emit_context.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/profile.h" + +namespace Shader::Backend::SPIRV { + +[[nodiscard]] std::vector EmitSPIRV(const Profile& profile, const RuntimeInfo& runtime_info, + IR::Program& program, Bindings& bindings); + +[[nodiscard]] inline std::vector EmitSPIRV(const Profile& profile, IR::Program& program) { + Bindings binding; + return EmitSPIRV(profile, {}, program, binding); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp new file mode 100755 index 000000000..9af8bb9e1 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp @@ -0,0 +1,448 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { +namespace { +Id SharedPointer(EmitContext& ctx, Id offset, u32 index_offset = 0) { + const Id shift_id{ctx.Const(2U)}; + Id index{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift_id)}; + if (index_offset > 0) { + index = ctx.OpIAdd(ctx.U32[1], index, ctx.Const(index_offset)); + } + return ctx.profile.support_explicit_workgroup_layout + ? ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, ctx.u32_zero_value, index) + : ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, index); +} + +Id StorageIndex(EmitContext& ctx, const IR::Value& offset, size_t element_size) { + if (offset.IsImmediate()) { + const u32 imm_offset{static_cast(offset.U32() / element_size)}; + return ctx.Const(imm_offset); + } + const u32 shift{static_cast(std::countr_zero(element_size))}; + const Id index{ctx.Def(offset)}; + if (shift == 0) { + return index; + } + const Id shift_id{ctx.Const(shift)}; + return ctx.OpShiftRightLogical(ctx.U32[1], index, shift_id); +} + +Id StoragePointer(EmitContext& ctx, const StorageTypeDefinition& type_def, + Id StorageDefinitions::*member_ptr, const IR::Value& binding, + const IR::Value& offset, size_t element_size) { + if (!binding.IsImmediate()) { + throw NotImplementedException("Dynamic storage buffer indexing"); + } + const Id ssbo{ctx.ssbos[binding.U32()].*member_ptr}; + const Id index{StorageIndex(ctx, offset, element_size)}; + return ctx.OpAccessChain(type_def.element, ssbo, ctx.u32_zero_value, index); +} + +std::pair AtomicArgs(EmitContext& ctx) { + const Id scope{ctx.Const(static_cast(spv::Scope::Device))}; + const Id semantics{ctx.u32_zero_value}; + return {scope, semantics}; +} + +Id SharedAtomicU32(EmitContext& ctx, Id offset, Id value, + Id (Sirit::Module::*atomic_func)(Id, Id, Id, Id, Id)) { + const Id pointer{SharedPointer(ctx, offset)}; + const auto [scope, semantics]{AtomicArgs(ctx)}; + return (ctx.*atomic_func)(ctx.U32[1], pointer, scope, semantics, value); +} + +Id StorageAtomicU32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value, + Id (Sirit::Module::*atomic_func)(Id, Id, Id, Id, Id)) { + const Id pointer{StoragePointer(ctx, ctx.storage_types.U32, &StorageDefinitions::U32, binding, + offset, sizeof(u32))}; + const auto [scope, semantics]{AtomicArgs(ctx)}; + return (ctx.*atomic_func)(ctx.U32[1], pointer, scope, semantics, value); +} + +Id StorageAtomicU64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value, + Id (Sirit::Module::*atomic_func)(Id, Id, Id, Id, Id), + Id (Sirit::Module::*non_atomic_func)(Id, Id, Id)) { + if (ctx.profile.support_int64_atomics) { + const Id pointer{StoragePointer(ctx, ctx.storage_types.U64, &StorageDefinitions::U64, + binding, offset, sizeof(u64))}; + const auto [scope, semantics]{AtomicArgs(ctx)}; + return (ctx.*atomic_func)(ctx.U64, pointer, scope, semantics, value); + } + LOG_ERROR(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic"); + const Id pointer{StoragePointer(ctx, ctx.storage_types.U32x2, &StorageDefinitions::U32x2, + binding, offset, sizeof(u32[2]))}; + const Id original_value{ctx.OpBitcast(ctx.U64, ctx.OpLoad(ctx.U32[2], pointer))}; + const Id result{(ctx.*non_atomic_func)(ctx.U64, value, original_value)}; + ctx.OpStore(pointer, ctx.OpBitcast(ctx.U32[2], result)); + return original_value; +} +} // Anonymous namespace + +Id EmitSharedAtomicIAdd32(EmitContext& ctx, Id offset, Id value) { + return SharedAtomicU32(ctx, offset, value, &Sirit::Module::OpAtomicIAdd); +} + +Id EmitSharedAtomicSMin32(EmitContext& ctx, Id offset, Id value) { + return SharedAtomicU32(ctx, offset, value, &Sirit::Module::OpAtomicSMin); +} + +Id EmitSharedAtomicUMin32(EmitContext& ctx, Id offset, Id value) { + return SharedAtomicU32(ctx, offset, value, &Sirit::Module::OpAtomicUMin); +} + +Id EmitSharedAtomicSMax32(EmitContext& ctx, Id offset, Id value) { + return SharedAtomicU32(ctx, offset, value, &Sirit::Module::OpAtomicSMax); +} + +Id EmitSharedAtomicUMax32(EmitContext& ctx, Id offset, Id value) { + return SharedAtomicU32(ctx, offset, value, &Sirit::Module::OpAtomicUMax); +} + +Id EmitSharedAtomicInc32(EmitContext& ctx, Id offset, Id value) { + const Id shift_id{ctx.Const(2U)}; + const Id index{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift_id)}; + return ctx.OpFunctionCall(ctx.U32[1], ctx.increment_cas_shared, index, value); +} + +Id EmitSharedAtomicDec32(EmitContext& ctx, Id offset, Id value) { + const Id shift_id{ctx.Const(2U)}; + const Id index{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift_id)}; + return ctx.OpFunctionCall(ctx.U32[1], ctx.decrement_cas_shared, index, value); +} + +Id EmitSharedAtomicAnd32(EmitContext& ctx, Id offset, Id value) { + return SharedAtomicU32(ctx, offset, value, &Sirit::Module::OpAtomicAnd); +} + +Id EmitSharedAtomicOr32(EmitContext& ctx, Id offset, Id value) { + return SharedAtomicU32(ctx, offset, value, &Sirit::Module::OpAtomicOr); +} + +Id EmitSharedAtomicXor32(EmitContext& ctx, Id offset, Id value) { + return SharedAtomicU32(ctx, offset, value, &Sirit::Module::OpAtomicXor); +} + +Id EmitSharedAtomicExchange32(EmitContext& ctx, Id offset, Id value) { + return SharedAtomicU32(ctx, offset, value, &Sirit::Module::OpAtomicExchange); +} + +Id EmitSharedAtomicExchange64(EmitContext& ctx, Id offset, Id value) { + if (ctx.profile.support_int64_atomics && ctx.profile.support_explicit_workgroup_layout) { + const Id shift_id{ctx.Const(3U)}; + const Id index{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift_id)}; + const Id pointer{ + ctx.OpAccessChain(ctx.shared_u64, ctx.shared_memory_u64, ctx.u32_zero_value, index)}; + const auto [scope, semantics]{AtomicArgs(ctx)}; + return ctx.OpAtomicExchange(ctx.U64, pointer, scope, semantics, value); + } + LOG_ERROR(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic"); + const Id pointer_1{SharedPointer(ctx, offset, 0)}; + const Id pointer_2{SharedPointer(ctx, offset, 1)}; + const Id value_1{ctx.OpLoad(ctx.U32[1], pointer_1)}; + const Id value_2{ctx.OpLoad(ctx.U32[1], pointer_2)}; + const Id new_vector{ctx.OpBitcast(ctx.U32[2], value)}; + ctx.OpStore(pointer_1, ctx.OpCompositeExtract(ctx.U32[1], new_vector, 0U)); + ctx.OpStore(pointer_2, ctx.OpCompositeExtract(ctx.U32[1], new_vector, 1U)); + return ctx.OpBitcast(ctx.U64, ctx.OpCompositeConstruct(ctx.U32[2], value_1, value_2)); +} + +Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicIAdd); +} + +Id EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicSMin); +} + +Id EmitStorageAtomicUMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicUMin); +} + +Id EmitStorageAtomicSMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicSMax); +} + +Id EmitStorageAtomicUMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicUMax); +} + +Id EmitStorageAtomicInc32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + const Id ssbo{ctx.ssbos[binding.U32()].U32}; + const Id base_index{StorageIndex(ctx, offset, sizeof(u32))}; + return ctx.OpFunctionCall(ctx.U32[1], ctx.increment_cas_ssbo, base_index, value, ssbo); +} + +Id EmitStorageAtomicDec32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + const Id ssbo{ctx.ssbos[binding.U32()].U32}; + const Id base_index{StorageIndex(ctx, offset, sizeof(u32))}; + return ctx.OpFunctionCall(ctx.U32[1], ctx.decrement_cas_ssbo, base_index, value, ssbo); +} + +Id EmitStorageAtomicAnd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicAnd); +} + +Id EmitStorageAtomicOr32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicOr); +} + +Id EmitStorageAtomicXor32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicXor); +} + +Id EmitStorageAtomicExchange32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicExchange); +} + +Id EmitStorageAtomicIAdd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU64(ctx, binding, offset, value, &Sirit::Module::OpAtomicIAdd, + &Sirit::Module::OpIAdd); +} + +Id EmitStorageAtomicSMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU64(ctx, binding, offset, value, &Sirit::Module::OpAtomicSMin, + &Sirit::Module::OpSMin); +} + +Id EmitStorageAtomicUMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU64(ctx, binding, offset, value, &Sirit::Module::OpAtomicUMin, + &Sirit::Module::OpUMin); +} + +Id EmitStorageAtomicSMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU64(ctx, binding, offset, value, &Sirit::Module::OpAtomicSMax, + &Sirit::Module::OpSMax); +} + +Id EmitStorageAtomicUMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU64(ctx, binding, offset, value, &Sirit::Module::OpAtomicUMax, + &Sirit::Module::OpUMax); +} + +Id EmitStorageAtomicAnd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU64(ctx, binding, offset, value, &Sirit::Module::OpAtomicAnd, + &Sirit::Module::OpBitwiseAnd); +} + +Id EmitStorageAtomicOr64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU64(ctx, binding, offset, value, &Sirit::Module::OpAtomicOr, + &Sirit::Module::OpBitwiseOr); +} + +Id EmitStorageAtomicXor64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + return StorageAtomicU64(ctx, binding, offset, value, &Sirit::Module::OpAtomicXor, + &Sirit::Module::OpBitwiseXor); +} + +Id EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + if (ctx.profile.support_int64_atomics) { + const Id pointer{StoragePointer(ctx, ctx.storage_types.U64, &StorageDefinitions::U64, + binding, offset, sizeof(u64))}; + const auto [scope, semantics]{AtomicArgs(ctx)}; + return ctx.OpAtomicExchange(ctx.U64, pointer, scope, semantics, value); + } + LOG_ERROR(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic"); + const Id pointer{StoragePointer(ctx, ctx.storage_types.U32x2, &StorageDefinitions::U32x2, + binding, offset, sizeof(u32[2]))}; + const Id original{ctx.OpBitcast(ctx.U64, ctx.OpLoad(ctx.U32[2], pointer))}; + ctx.OpStore(pointer, value); + return original; +} + +Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + const Id ssbo{ctx.ssbos[binding.U32()].U32}; + const Id base_index{StorageIndex(ctx, offset, sizeof(u32))}; + return ctx.OpFunctionCall(ctx.F32[1], ctx.f32_add_cas, base_index, value, ssbo); +} + +Id EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + const Id ssbo{ctx.ssbos[binding.U32()].U32}; + const Id base_index{StorageIndex(ctx, offset, sizeof(u32))}; + const Id result{ctx.OpFunctionCall(ctx.F16[2], ctx.f16x2_add_cas, base_index, value, ssbo)}; + return ctx.OpBitcast(ctx.U32[1], result); +} + +Id EmitStorageAtomicAddF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + const Id ssbo{ctx.ssbos[binding.U32()].U32}; + const Id base_index{StorageIndex(ctx, offset, sizeof(u32))}; + const Id result{ctx.OpFunctionCall(ctx.F32[2], ctx.f32x2_add_cas, base_index, value, ssbo)}; + return ctx.OpPackHalf2x16(ctx.U32[1], result); +} + +Id EmitStorageAtomicMinF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + const Id ssbo{ctx.ssbos[binding.U32()].U32}; + const Id base_index{StorageIndex(ctx, offset, sizeof(u32))}; + const Id result{ctx.OpFunctionCall(ctx.F16[2], ctx.f16x2_min_cas, base_index, value, ssbo)}; + return ctx.OpBitcast(ctx.U32[1], result); +} + +Id EmitStorageAtomicMinF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + const Id ssbo{ctx.ssbos[binding.U32()].U32}; + const Id base_index{StorageIndex(ctx, offset, sizeof(u32))}; + const Id result{ctx.OpFunctionCall(ctx.F32[2], ctx.f32x2_min_cas, base_index, value, ssbo)}; + return ctx.OpPackHalf2x16(ctx.U32[1], result); +} + +Id EmitStorageAtomicMaxF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + const Id ssbo{ctx.ssbos[binding.U32()].U32}; + const Id base_index{StorageIndex(ctx, offset, sizeof(u32))}; + const Id result{ctx.OpFunctionCall(ctx.F16[2], ctx.f16x2_max_cas, base_index, value, ssbo)}; + return ctx.OpBitcast(ctx.U32[1], result); +} + +Id EmitStorageAtomicMaxF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + const Id ssbo{ctx.ssbos[binding.U32()].U32}; + const Id base_index{StorageIndex(ctx, offset, sizeof(u32))}; + const Id result{ctx.OpFunctionCall(ctx.F32[2], ctx.f32x2_max_cas, base_index, value, ssbo)}; + return ctx.OpPackHalf2x16(ctx.U32[1], result); +} + +Id EmitGlobalAtomicIAdd32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicSMin32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicUMin32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicSMax32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicUMax32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicInc32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicDec32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicAnd32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicOr32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicXor32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicExchange32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicIAdd64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicSMin64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicUMin64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicSMax64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicUMax64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicInc64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicDec64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicAnd64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicOr64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicXor64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicExchange64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicAddF32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicAddF16x2(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicAddF32x2(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicMinF16x2(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicMinF32x2(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicMaxF16x2(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitGlobalAtomicMaxF32x2(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_barriers.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_barriers.cpp new file mode 100755 index 000000000..e0b52a001 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_barriers.cpp @@ -0,0 +1,38 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" +#include "shader_recompiler/frontend/ir/modifiers.h" + +namespace Shader::Backend::SPIRV { +namespace { +void MemoryBarrier(EmitContext& ctx, spv::Scope scope) { + const auto semantics{ + spv::MemorySemanticsMask::AcquireRelease | spv::MemorySemanticsMask::UniformMemory | + spv::MemorySemanticsMask::WorkgroupMemory | spv::MemorySemanticsMask::AtomicCounterMemory | + spv::MemorySemanticsMask::ImageMemory}; + ctx.OpMemoryBarrier(ctx.Const(static_cast(scope)), ctx.Const(static_cast(semantics))); +} +} // Anonymous namespace + +void EmitBarrier(EmitContext& ctx) { + const auto execution{spv::Scope::Workgroup}; + const auto memory{spv::Scope::Workgroup}; + const auto memory_semantics{spv::MemorySemanticsMask::AcquireRelease | + spv::MemorySemanticsMask::WorkgroupMemory}; + ctx.OpControlBarrier(ctx.Const(static_cast(execution)), + ctx.Const(static_cast(memory)), + ctx.Const(static_cast(memory_semantics))); +} + +void EmitWorkgroupMemoryBarrier(EmitContext& ctx) { + MemoryBarrier(ctx, spv::Scope::Workgroup); +} + +void EmitDeviceMemoryBarrier(EmitContext& ctx) { + MemoryBarrier(ctx, spv::Scope::Device); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_bitwise_conversion.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_bitwise_conversion.cpp new file mode 100755 index 000000000..bb11f4f4e --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_bitwise_conversion.cpp @@ -0,0 +1,66 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { + +void EmitBitCastU16F16(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBitCastU32F32(EmitContext& ctx, Id value) { + return ctx.OpBitcast(ctx.U32[1], value); +} + +void EmitBitCastU64F64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitBitCastF16U16(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBitCastF32U32(EmitContext& ctx, Id value) { + return ctx.OpBitcast(ctx.F32[1], value); +} + +void EmitBitCastF64U64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitPackUint2x32(EmitContext& ctx, Id value) { + return ctx.OpBitcast(ctx.U64, value); +} + +Id EmitUnpackUint2x32(EmitContext& ctx, Id value) { + return ctx.OpBitcast(ctx.U32[2], value); +} + +Id EmitPackFloat2x16(EmitContext& ctx, Id value) { + return ctx.OpBitcast(ctx.U32[1], value); +} + +Id EmitUnpackFloat2x16(EmitContext& ctx, Id value) { + return ctx.OpBitcast(ctx.F16[2], value); +} + +Id EmitPackHalf2x16(EmitContext& ctx, Id value) { + return ctx.OpPackHalf2x16(ctx.U32[1], value); +} + +Id EmitUnpackHalf2x16(EmitContext& ctx, Id value) { + return ctx.OpUnpackHalf2x16(ctx.F32[2], value); +} + +Id EmitPackDouble2x32(EmitContext& ctx, Id value) { + return ctx.OpBitcast(ctx.F64[1], value); +} + +Id EmitUnpackDouble2x32(EmitContext& ctx, Id value) { + return ctx.OpBitcast(ctx.U32[2], value); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_composite.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_composite.cpp new file mode 100755 index 000000000..10ff4ecab --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_composite.cpp @@ -0,0 +1,155 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" +#include "shader_recompiler/frontend/ir/modifiers.h" + +namespace Shader::Backend::SPIRV { + +Id EmitCompositeConstructU32x2(EmitContext& ctx, Id e1, Id e2) { + return ctx.OpCompositeConstruct(ctx.U32[2], e1, e2); +} + +Id EmitCompositeConstructU32x3(EmitContext& ctx, Id e1, Id e2, Id e3) { + return ctx.OpCompositeConstruct(ctx.U32[3], e1, e2, e3); +} + +Id EmitCompositeConstructU32x4(EmitContext& ctx, Id e1, Id e2, Id e3, Id e4) { + return ctx.OpCompositeConstruct(ctx.U32[4], e1, e2, e3, e4); +} + +Id EmitCompositeExtractU32x2(EmitContext& ctx, Id composite, u32 index) { + return ctx.OpCompositeExtract(ctx.U32[1], composite, index); +} + +Id EmitCompositeExtractU32x3(EmitContext& ctx, Id composite, u32 index) { + return ctx.OpCompositeExtract(ctx.U32[1], composite, index); +} + +Id EmitCompositeExtractU32x4(EmitContext& ctx, Id composite, u32 index) { + return ctx.OpCompositeExtract(ctx.U32[1], composite, index); +} + +Id EmitCompositeInsertU32x2(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.U32[2], object, composite, index); +} + +Id EmitCompositeInsertU32x3(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.U32[3], object, composite, index); +} + +Id EmitCompositeInsertU32x4(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.U32[4], object, composite, index); +} + +Id EmitCompositeConstructF16x2(EmitContext& ctx, Id e1, Id e2) { + return ctx.OpCompositeConstruct(ctx.F16[2], e1, e2); +} + +Id EmitCompositeConstructF16x3(EmitContext& ctx, Id e1, Id e2, Id e3) { + return ctx.OpCompositeConstruct(ctx.F16[3], e1, e2, e3); +} + +Id EmitCompositeConstructF16x4(EmitContext& ctx, Id e1, Id e2, Id e3, Id e4) { + return ctx.OpCompositeConstruct(ctx.F16[4], e1, e2, e3, e4); +} + +Id EmitCompositeExtractF16x2(EmitContext& ctx, Id composite, u32 index) { + return ctx.OpCompositeExtract(ctx.F16[1], composite, index); +} + +Id EmitCompositeExtractF16x3(EmitContext& ctx, Id composite, u32 index) { + return ctx.OpCompositeExtract(ctx.F16[1], composite, index); +} + +Id EmitCompositeExtractF16x4(EmitContext& ctx, Id composite, u32 index) { + return ctx.OpCompositeExtract(ctx.F16[1], composite, index); +} + +Id EmitCompositeInsertF16x2(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.F16[2], object, composite, index); +} + +Id EmitCompositeInsertF16x3(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.F16[3], object, composite, index); +} + +Id EmitCompositeInsertF16x4(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.F16[4], object, composite, index); +} + +Id EmitCompositeConstructF32x2(EmitContext& ctx, Id e1, Id e2) { + return ctx.OpCompositeConstruct(ctx.F32[2], e1, e2); +} + +Id EmitCompositeConstructF32x3(EmitContext& ctx, Id e1, Id e2, Id e3) { + return ctx.OpCompositeConstruct(ctx.F32[3], e1, e2, e3); +} + +Id EmitCompositeConstructF32x4(EmitContext& ctx, Id e1, Id e2, Id e3, Id e4) { + return ctx.OpCompositeConstruct(ctx.F32[4], e1, e2, e3, e4); +} + +Id EmitCompositeExtractF32x2(EmitContext& ctx, Id composite, u32 index) { + return ctx.OpCompositeExtract(ctx.F32[1], composite, index); +} + +Id EmitCompositeExtractF32x3(EmitContext& ctx, Id composite, u32 index) { + return ctx.OpCompositeExtract(ctx.F32[1], composite, index); +} + +Id EmitCompositeExtractF32x4(EmitContext& ctx, Id composite, u32 index) { + return ctx.OpCompositeExtract(ctx.F32[1], composite, index); +} + +Id EmitCompositeInsertF32x2(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.F32[2], object, composite, index); +} + +Id EmitCompositeInsertF32x3(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.F32[3], object, composite, index); +} + +Id EmitCompositeInsertF32x4(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.F32[4], object, composite, index); +} + +void EmitCompositeConstructF64x2(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitCompositeConstructF64x3(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitCompositeConstructF64x4(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitCompositeExtractF64x2(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitCompositeExtractF64x3(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitCompositeExtractF64x4(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitCompositeInsertF64x2(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.F64[2], object, composite, index); +} + +Id EmitCompositeInsertF64x3(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.F64[3], object, composite, index); +} + +Id EmitCompositeInsertF64x4(EmitContext& ctx, Id composite, Id object, u32 index) { + return ctx.OpCompositeInsert(ctx.F64[4], object, composite, index); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_context_get_set.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_context_get_set.cpp new file mode 100755 index 000000000..756de0a27 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_context_get_set.cpp @@ -0,0 +1,505 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { +namespace { +struct AttrInfo { + Id pointer; + Id id; + bool needs_cast; +}; + +std::optional AttrTypes(EmitContext& ctx, u32 index) { + const AttributeType type{ctx.runtime_info.generic_input_types.at(index)}; + switch (type) { + case AttributeType::Float: + return AttrInfo{ctx.input_f32, ctx.F32[1], false}; + case AttributeType::UnsignedInt: + return AttrInfo{ctx.input_u32, ctx.U32[1], true}; + case AttributeType::SignedInt: + return AttrInfo{ctx.input_s32, ctx.TypeInt(32, true), true}; + case AttributeType::Disabled: + return std::nullopt; + } + throw InvalidArgument("Invalid attribute type {}", type); +} + +template +Id AttrPointer(EmitContext& ctx, Id pointer_type, Id vertex, Id base, Args&&... args) { + switch (ctx.stage) { + case Stage::TessellationControl: + case Stage::TessellationEval: + case Stage::Geometry: + return ctx.OpAccessChain(pointer_type, base, vertex, std::forward(args)...); + default: + return ctx.OpAccessChain(pointer_type, base, std::forward(args)...); + } +} + +template +Id OutputAccessChain(EmitContext& ctx, Id result_type, Id base, Args&&... args) { + if (ctx.stage == Stage::TessellationControl) { + const Id invocation_id{ctx.OpLoad(ctx.U32[1], ctx.invocation_id)}; + return ctx.OpAccessChain(result_type, base, invocation_id, std::forward(args)...); + } else { + return ctx.OpAccessChain(result_type, base, std::forward(args)...); + } +} + +struct OutAttr { + OutAttr(Id pointer_) : pointer{pointer_} {} + OutAttr(Id pointer_, Id type_) : pointer{pointer_}, type{type_} {} + + Id pointer{}; + Id type{}; +}; + +std::optional OutputAttrPointer(EmitContext& ctx, IR::Attribute attr) { + if (IR::IsGeneric(attr)) { + const u32 index{IR::GenericAttributeIndex(attr)}; + const u32 element{IR::GenericAttributeElement(attr)}; + const GenericElementInfo& info{ctx.output_generics.at(index).at(element)}; + if (info.num_components == 1) { + return info.id; + } else { + const u32 index_element{element - info.first_element}; + const Id index_id{ctx.Const(index_element)}; + return OutputAccessChain(ctx, ctx.output_f32, info.id, index_id); + } + } + switch (attr) { + case IR::Attribute::PointSize: + return ctx.output_point_size; + case IR::Attribute::PositionX: + case IR::Attribute::PositionY: + case IR::Attribute::PositionZ: + case IR::Attribute::PositionW: { + const u32 element{static_cast(attr) % 4}; + const Id element_id{ctx.Const(element)}; + return OutputAccessChain(ctx, ctx.output_f32, ctx.output_position, element_id); + } + case IR::Attribute::ClipDistance0: + case IR::Attribute::ClipDistance1: + case IR::Attribute::ClipDistance2: + case IR::Attribute::ClipDistance3: + case IR::Attribute::ClipDistance4: + case IR::Attribute::ClipDistance5: + case IR::Attribute::ClipDistance6: + case IR::Attribute::ClipDistance7: { + const u32 base{static_cast(IR::Attribute::ClipDistance0)}; + const u32 index{static_cast(attr) - base}; + const Id clip_num{ctx.Const(index)}; + return OutputAccessChain(ctx, ctx.output_f32, ctx.clip_distances, clip_num); + } + case IR::Attribute::Layer: + if (ctx.profile.support_viewport_index_layer_non_geometry || + ctx.stage == Shader::Stage::Geometry) { + return OutAttr{ctx.layer, ctx.U32[1]}; + } + return std::nullopt; + case IR::Attribute::ViewportIndex: + if (ctx.profile.support_viewport_index_layer_non_geometry || + ctx.stage == Shader::Stage::Geometry) { + return OutAttr{ctx.viewport_index, ctx.U32[1]}; + } + return std::nullopt; + case IR::Attribute::ViewportMask: + if (!ctx.profile.support_viewport_mask) { + return std::nullopt; + } + return OutAttr{ctx.OpAccessChain(ctx.output_u32, ctx.viewport_mask, ctx.u32_zero_value), + ctx.U32[1]}; + default: + throw NotImplementedException("Read attribute {}", attr); + } +} + +Id GetCbuf(EmitContext& ctx, Id result_type, Id UniformDefinitions::*member_ptr, u32 element_size, + const IR::Value& binding, const IR::Value& offset) { + if (!binding.IsImmediate()) { + throw NotImplementedException("Constant buffer indexing"); + } + const Id cbuf{ctx.cbufs[binding.U32()].*member_ptr}; + const Id uniform_type{ctx.uniform_types.*member_ptr}; + if (!offset.IsImmediate()) { + Id index{ctx.Def(offset)}; + if (element_size > 1) { + const u32 log2_element_size{static_cast(std::countr_zero(element_size))}; + const Id shift{ctx.Const(log2_element_size)}; + index = ctx.OpShiftRightArithmetic(ctx.U32[1], ctx.Def(offset), shift); + } + const Id access_chain{ctx.OpAccessChain(uniform_type, cbuf, ctx.u32_zero_value, index)}; + return ctx.OpLoad(result_type, access_chain); + } + // Hardware been proved to read the aligned offset (e.g. LDC.U32 at 6 will read offset 4) + const Id imm_offset{ctx.Const(offset.U32() / element_size)}; + const Id access_chain{ctx.OpAccessChain(uniform_type, cbuf, ctx.u32_zero_value, imm_offset)}; + return ctx.OpLoad(result_type, access_chain); +} + +Id GetCbufU32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + return GetCbuf(ctx, ctx.U32[1], &UniformDefinitions::U32, sizeof(u32), binding, offset); +} + +Id GetCbufU32x4(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + return GetCbuf(ctx, ctx.U32[4], &UniformDefinitions::U32x4, sizeof(u32[4]), binding, offset); +} + +Id GetCbufElement(EmitContext& ctx, Id vector, const IR::Value& offset, u32 index_offset) { + if (offset.IsImmediate()) { + const u32 element{(offset.U32() / 4) % 4 + index_offset}; + return ctx.OpCompositeExtract(ctx.U32[1], vector, element); + } + const Id shift{ctx.OpShiftRightArithmetic(ctx.U32[1], ctx.Def(offset), ctx.Const(2u))}; + Id element{ctx.OpBitwiseAnd(ctx.U32[1], shift, ctx.Const(3u))}; + if (index_offset > 0) { + element = ctx.OpIAdd(ctx.U32[1], element, ctx.Const(index_offset)); + } + return ctx.OpVectorExtractDynamic(ctx.U32[1], vector, element); +} +} // Anonymous namespace + +void EmitGetRegister(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitSetRegister(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitGetPred(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitSetPred(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitSetGotoVariable(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitGetGotoVariable(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitSetIndirectBranchVariable(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +void EmitGetIndirectBranchVariable(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitGetCbufU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_descriptor_aliasing && ctx.profile.support_int8) { + const Id load{GetCbuf(ctx, ctx.U8, &UniformDefinitions::U8, sizeof(u8), binding, offset)}; + return ctx.OpUConvert(ctx.U32[1], load); + } + Id element{}; + if (ctx.profile.support_descriptor_aliasing) { + element = GetCbufU32(ctx, binding, offset); + } else { + const Id vector{GetCbufU32x4(ctx, binding, offset)}; + element = GetCbufElement(ctx, vector, offset, 0u); + } + const Id bit_offset{ctx.BitOffset8(offset)}; + return ctx.OpBitFieldUExtract(ctx.U32[1], element, bit_offset, ctx.Const(8u)); +} + +Id EmitGetCbufS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_descriptor_aliasing && ctx.profile.support_int8) { + const Id load{GetCbuf(ctx, ctx.S8, &UniformDefinitions::S8, sizeof(s8), binding, offset)}; + return ctx.OpSConvert(ctx.U32[1], load); + } + Id element{}; + if (ctx.profile.support_descriptor_aliasing) { + element = GetCbufU32(ctx, binding, offset); + } else { + const Id vector{GetCbufU32x4(ctx, binding, offset)}; + element = GetCbufElement(ctx, vector, offset, 0u); + } + const Id bit_offset{ctx.BitOffset8(offset)}; + return ctx.OpBitFieldSExtract(ctx.U32[1], element, bit_offset, ctx.Const(8u)); +} + +Id EmitGetCbufU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_descriptor_aliasing && ctx.profile.support_int16) { + const Id load{ + GetCbuf(ctx, ctx.U16, &UniformDefinitions::U16, sizeof(u16), binding, offset)}; + return ctx.OpUConvert(ctx.U32[1], load); + } + Id element{}; + if (ctx.profile.support_descriptor_aliasing) { + element = GetCbufU32(ctx, binding, offset); + } else { + const Id vector{GetCbufU32x4(ctx, binding, offset)}; + element = GetCbufElement(ctx, vector, offset, 0u); + } + const Id bit_offset{ctx.BitOffset16(offset)}; + return ctx.OpBitFieldUExtract(ctx.U32[1], element, bit_offset, ctx.Const(16u)); +} + +Id EmitGetCbufS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_descriptor_aliasing && ctx.profile.support_int16) { + const Id load{ + GetCbuf(ctx, ctx.S16, &UniformDefinitions::S16, sizeof(s16), binding, offset)}; + return ctx.OpSConvert(ctx.U32[1], load); + } + Id element{}; + if (ctx.profile.support_descriptor_aliasing) { + element = GetCbufU32(ctx, binding, offset); + } else { + const Id vector{GetCbufU32x4(ctx, binding, offset)}; + element = GetCbufElement(ctx, vector, offset, 0u); + } + const Id bit_offset{ctx.BitOffset16(offset)}; + return ctx.OpBitFieldSExtract(ctx.U32[1], element, bit_offset, ctx.Const(16u)); +} + +Id EmitGetCbufU32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_descriptor_aliasing) { + return GetCbufU32(ctx, binding, offset); + } else { + const Id vector{GetCbufU32x4(ctx, binding, offset)}; + return GetCbufElement(ctx, vector, offset, 0u); + } +} + +Id EmitGetCbufF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_descriptor_aliasing) { + return GetCbuf(ctx, ctx.F32[1], &UniformDefinitions::F32, sizeof(f32), binding, offset); + } else { + const Id vector{GetCbufU32x4(ctx, binding, offset)}; + return ctx.OpBitcast(ctx.F32[1], GetCbufElement(ctx, vector, offset, 0u)); + } +} + +Id EmitGetCbufU32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_descriptor_aliasing) { + return GetCbuf(ctx, ctx.U32[2], &UniformDefinitions::U32x2, sizeof(u32[2]), binding, + offset); + } else { + const Id vector{GetCbufU32x4(ctx, binding, offset)}; + return ctx.OpCompositeConstruct(ctx.U32[2], GetCbufElement(ctx, vector, offset, 0u), + GetCbufElement(ctx, vector, offset, 1u)); + } +} + +Id EmitGetAttribute(EmitContext& ctx, IR::Attribute attr, Id vertex) { + const u32 element{static_cast(attr) % 4}; + if (IR::IsGeneric(attr)) { + const u32 index{IR::GenericAttributeIndex(attr)}; + const std::optional type{AttrTypes(ctx, index)}; + if (!type) { + // Attribute is disabled + return ctx.Const(0.0f); + } + if (!ctx.runtime_info.previous_stage_stores.Generic(index, element)) { + // Varying component is not written + return ctx.Const(type && element == 3 ? 1.0f : 0.0f); + } + const Id generic_id{ctx.input_generics.at(index)}; + const Id pointer{AttrPointer(ctx, type->pointer, vertex, generic_id, ctx.Const(element))}; + const Id value{ctx.OpLoad(type->id, pointer)}; + return type->needs_cast ? ctx.OpBitcast(ctx.F32[1], value) : value; + } + switch (attr) { + case IR::Attribute::PrimitiveId: + return ctx.OpBitcast(ctx.F32[1], ctx.OpLoad(ctx.U32[1], ctx.primitive_id)); + case IR::Attribute::PositionX: + case IR::Attribute::PositionY: + case IR::Attribute::PositionZ: + case IR::Attribute::PositionW: + return ctx.OpLoad(ctx.F32[1], AttrPointer(ctx, ctx.input_f32, vertex, ctx.input_position, + ctx.Const(element))); + case IR::Attribute::InstanceId: + if (ctx.profile.support_vertex_instance_id) { + return ctx.OpBitcast(ctx.F32[1], ctx.OpLoad(ctx.U32[1], ctx.instance_id)); + } else { + const Id index{ctx.OpLoad(ctx.U32[1], ctx.instance_index)}; + const Id base{ctx.OpLoad(ctx.U32[1], ctx.base_instance)}; + return ctx.OpBitcast(ctx.F32[1], ctx.OpISub(ctx.U32[1], index, base)); + } + case IR::Attribute::VertexId: + if (ctx.profile.support_vertex_instance_id) { + return ctx.OpBitcast(ctx.F32[1], ctx.OpLoad(ctx.U32[1], ctx.vertex_id)); + } else { + const Id index{ctx.OpLoad(ctx.U32[1], ctx.vertex_index)}; + const Id base{ctx.OpLoad(ctx.U32[1], ctx.base_vertex)}; + return ctx.OpBitcast(ctx.F32[1], ctx.OpISub(ctx.U32[1], index, base)); + } + case IR::Attribute::FrontFace: + return ctx.OpSelect(ctx.U32[1], ctx.OpLoad(ctx.U1, ctx.front_face), + ctx.Const(std::numeric_limits::max()), ctx.u32_zero_value); + case IR::Attribute::PointSpriteS: + return ctx.OpLoad(ctx.F32[1], + ctx.OpAccessChain(ctx.input_f32, ctx.point_coord, ctx.u32_zero_value)); + case IR::Attribute::PointSpriteT: + return ctx.OpLoad(ctx.F32[1], + ctx.OpAccessChain(ctx.input_f32, ctx.point_coord, ctx.Const(1U))); + case IR::Attribute::TessellationEvaluationPointU: + return ctx.OpLoad(ctx.F32[1], + ctx.OpAccessChain(ctx.input_f32, ctx.tess_coord, ctx.u32_zero_value)); + case IR::Attribute::TessellationEvaluationPointV: + return ctx.OpLoad(ctx.F32[1], + ctx.OpAccessChain(ctx.input_f32, ctx.tess_coord, ctx.Const(1U))); + + default: + throw NotImplementedException("Read attribute {}", attr); + } +} + +void EmitSetAttribute(EmitContext& ctx, IR::Attribute attr, Id value, [[maybe_unused]] Id vertex) { + const std::optional output{OutputAttrPointer(ctx, attr)}; + if (!output) { + return; + } + if (Sirit::ValidId(output->type)) { + value = ctx.OpBitcast(output->type, value); + } + ctx.OpStore(output->pointer, value); +} + +Id EmitGetAttributeIndexed(EmitContext& ctx, Id offset, Id vertex) { + switch (ctx.stage) { + case Stage::TessellationControl: + case Stage::TessellationEval: + case Stage::Geometry: + return ctx.OpFunctionCall(ctx.F32[1], ctx.indexed_load_func, offset, vertex); + default: + return ctx.OpFunctionCall(ctx.F32[1], ctx.indexed_load_func, offset); + } +} + +void EmitSetAttributeIndexed(EmitContext& ctx, Id offset, Id value, [[maybe_unused]] Id vertex) { + ctx.OpFunctionCall(ctx.void_id, ctx.indexed_store_func, offset, value); +} + +Id EmitGetPatch(EmitContext& ctx, IR::Patch patch) { + if (!IR::IsGeneric(patch)) { + throw NotImplementedException("Non-generic patch load"); + } + const u32 index{IR::GenericPatchIndex(patch)}; + const Id element{ctx.Const(IR::GenericPatchElement(patch))}; + const Id type{ctx.stage == Stage::TessellationControl ? ctx.output_f32 : ctx.input_f32}; + const Id pointer{ctx.OpAccessChain(type, ctx.patches.at(index), element)}; + return ctx.OpLoad(ctx.F32[1], pointer); +} + +void EmitSetPatch(EmitContext& ctx, IR::Patch patch, Id value) { + const Id pointer{[&] { + if (IR::IsGeneric(patch)) { + const u32 index{IR::GenericPatchIndex(patch)}; + const Id element{ctx.Const(IR::GenericPatchElement(patch))}; + return ctx.OpAccessChain(ctx.output_f32, ctx.patches.at(index), element); + } + switch (patch) { + case IR::Patch::TessellationLodLeft: + case IR::Patch::TessellationLodRight: + case IR::Patch::TessellationLodTop: + case IR::Patch::TessellationLodBottom: { + const u32 index{static_cast(patch) - u32(IR::Patch::TessellationLodLeft)}; + const Id index_id{ctx.Const(index)}; + return ctx.OpAccessChain(ctx.output_f32, ctx.output_tess_level_outer, index_id); + } + case IR::Patch::TessellationLodInteriorU: + return ctx.OpAccessChain(ctx.output_f32, ctx.output_tess_level_inner, + ctx.u32_zero_value); + case IR::Patch::TessellationLodInteriorV: + return ctx.OpAccessChain(ctx.output_f32, ctx.output_tess_level_inner, ctx.Const(1u)); + default: + throw NotImplementedException("Patch {}", patch); + } + }()}; + ctx.OpStore(pointer, value); +} + +void EmitSetFragColor(EmitContext& ctx, u32 index, u32 component, Id value) { + const Id component_id{ctx.Const(component)}; + const Id pointer{ctx.OpAccessChain(ctx.output_f32, ctx.frag_color.at(index), component_id)}; + ctx.OpStore(pointer, value); +} + +void EmitSetSampleMask(EmitContext& ctx, Id value) { + ctx.OpStore(ctx.sample_mask, value); +} + +void EmitSetFragDepth(EmitContext& ctx, Id value) { + ctx.OpStore(ctx.frag_depth, value); +} + +void EmitGetZFlag(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitGetSFlag(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitGetCFlag(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitGetOFlag(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitSetZFlag(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitSetSFlag(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitSetCFlag(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitSetOFlag(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitWorkgroupId(EmitContext& ctx) { + return ctx.OpLoad(ctx.U32[3], ctx.workgroup_id); +} + +Id EmitLocalInvocationId(EmitContext& ctx) { + return ctx.OpLoad(ctx.U32[3], ctx.local_invocation_id); +} + +Id EmitInvocationId(EmitContext& ctx) { + return ctx.OpLoad(ctx.U32[1], ctx.invocation_id); +} + +Id EmitSampleId(EmitContext& ctx) { + return ctx.OpLoad(ctx.U32[1], ctx.sample_id); +} + +Id EmitIsHelperInvocation(EmitContext& ctx) { + return ctx.OpLoad(ctx.U1, ctx.is_helper_invocation); +} + +Id EmitYDirection(EmitContext& ctx) { + return ctx.Const(ctx.runtime_info.y_negate ? -1.0f : 1.0f); +} + +Id EmitLoadLocal(EmitContext& ctx, Id word_offset) { + const Id pointer{ctx.OpAccessChain(ctx.private_u32, ctx.local_memory, word_offset)}; + return ctx.OpLoad(ctx.U32[1], pointer); +} + +void EmitWriteLocal(EmitContext& ctx, Id word_offset, Id value) { + const Id pointer{ctx.OpAccessChain(ctx.private_u32, ctx.local_memory, word_offset)}; + ctx.OpStore(pointer, value); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_control_flow.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_control_flow.cpp new file mode 100755 index 000000000..d33486f28 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_control_flow.cpp @@ -0,0 +1,28 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { + +void EmitJoin(EmitContext&) { + throw NotImplementedException("Join shouldn't be emitted"); +} + +void EmitDemoteToHelperInvocation(EmitContext& ctx) { + if (ctx.profile.support_demote_to_helper_invocation) { + ctx.OpDemoteToHelperInvocationEXT(); + } else { + const Id kill_label{ctx.OpLabel()}; + const Id impossible_label{ctx.OpLabel()}; + ctx.OpSelectionMerge(impossible_label, spv::SelectionControlMask::MaskNone); + ctx.OpBranchConditional(ctx.true_value, kill_label, impossible_label); + ctx.AddLabel(kill_label); + ctx.OpKill(); + ctx.AddLabel(impossible_label); + } +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_convert.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_convert.cpp new file mode 100755 index 000000000..fd42b7a16 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_convert.cpp @@ -0,0 +1,269 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { +namespace { +Id ExtractU16(EmitContext& ctx, Id value) { + if (ctx.profile.support_int16) { + return ctx.OpUConvert(ctx.U16, value); + } else { + return ctx.OpBitFieldUExtract(ctx.U32[1], value, ctx.u32_zero_value, ctx.Const(16u)); + } +} + +Id ExtractS16(EmitContext& ctx, Id value) { + if (ctx.profile.support_int16) { + return ctx.OpSConvert(ctx.S16, value); + } else { + return ctx.OpBitFieldSExtract(ctx.U32[1], value, ctx.u32_zero_value, ctx.Const(16u)); + } +} + +Id ExtractU8(EmitContext& ctx, Id value) { + if (ctx.profile.support_int8) { + return ctx.OpUConvert(ctx.U8, value); + } else { + return ctx.OpBitFieldUExtract(ctx.U32[1], value, ctx.u32_zero_value, ctx.Const(8u)); + } +} + +Id ExtractS8(EmitContext& ctx, Id value) { + if (ctx.profile.support_int8) { + return ctx.OpSConvert(ctx.S8, value); + } else { + return ctx.OpBitFieldSExtract(ctx.U32[1], value, ctx.u32_zero_value, ctx.Const(8u)); + } +} +} // Anonymous namespace + +Id EmitConvertS16F16(EmitContext& ctx, Id value) { + if (ctx.profile.support_int16) { + return ctx.OpSConvert(ctx.U32[1], ctx.OpConvertFToS(ctx.U16, value)); + } else { + return ExtractS16(ctx, ctx.OpConvertFToS(ctx.U32[1], value)); + } +} + +Id EmitConvertS16F32(EmitContext& ctx, Id value) { + if (ctx.profile.support_int16) { + return ctx.OpSConvert(ctx.U32[1], ctx.OpConvertFToS(ctx.U16, value)); + } else { + return ExtractS16(ctx, ctx.OpConvertFToS(ctx.U32[1], value)); + } +} + +Id EmitConvertS16F64(EmitContext& ctx, Id value) { + if (ctx.profile.support_int16) { + return ctx.OpSConvert(ctx.U32[1], ctx.OpConvertFToS(ctx.U16, value)); + } else { + return ExtractS16(ctx, ctx.OpConvertFToS(ctx.U32[1], value)); + } +} + +Id EmitConvertS32F16(EmitContext& ctx, Id value) { + return ctx.OpConvertFToS(ctx.U32[1], value); +} + +Id EmitConvertS32F32(EmitContext& ctx, Id value) { + if (ctx.profile.has_broken_signed_operations) { + return ctx.OpBitcast(ctx.U32[1], ctx.OpConvertFToS(ctx.S32[1], value)); + } else { + return ctx.OpConvertFToS(ctx.U32[1], value); + } +} + +Id EmitConvertS32F64(EmitContext& ctx, Id value) { + return ctx.OpConvertFToS(ctx.U32[1], value); +} + +Id EmitConvertS64F16(EmitContext& ctx, Id value) { + return ctx.OpConvertFToS(ctx.U64, value); +} + +Id EmitConvertS64F32(EmitContext& ctx, Id value) { + return ctx.OpConvertFToS(ctx.U64, value); +} + +Id EmitConvertS64F64(EmitContext& ctx, Id value) { + return ctx.OpConvertFToS(ctx.U64, value); +} + +Id EmitConvertU16F16(EmitContext& ctx, Id value) { + if (ctx.profile.support_int16) { + return ctx.OpUConvert(ctx.U32[1], ctx.OpConvertFToU(ctx.U16, value)); + } else { + return ExtractU16(ctx, ctx.OpConvertFToU(ctx.U32[1], value)); + } +} + +Id EmitConvertU16F32(EmitContext& ctx, Id value) { + if (ctx.profile.support_int16) { + return ctx.OpUConvert(ctx.U32[1], ctx.OpConvertFToU(ctx.U16, value)); + } else { + return ExtractU16(ctx, ctx.OpConvertFToU(ctx.U32[1], value)); + } +} + +Id EmitConvertU16F64(EmitContext& ctx, Id value) { + if (ctx.profile.support_int16) { + return ctx.OpUConvert(ctx.U32[1], ctx.OpConvertFToU(ctx.U16, value)); + } else { + return ExtractU16(ctx, ctx.OpConvertFToU(ctx.U32[1], value)); + } +} + +Id EmitConvertU32F16(EmitContext& ctx, Id value) { + return ctx.OpConvertFToU(ctx.U32[1], value); +} + +Id EmitConvertU32F32(EmitContext& ctx, Id value) { + return ctx.OpConvertFToU(ctx.U32[1], value); +} + +Id EmitConvertU32F64(EmitContext& ctx, Id value) { + return ctx.OpConvertFToU(ctx.U32[1], value); +} + +Id EmitConvertU64F16(EmitContext& ctx, Id value) { + return ctx.OpConvertFToU(ctx.U64, value); +} + +Id EmitConvertU64F32(EmitContext& ctx, Id value) { + return ctx.OpConvertFToU(ctx.U64, value); +} + +Id EmitConvertU64F64(EmitContext& ctx, Id value) { + return ctx.OpConvertFToU(ctx.U64, value); +} + +Id EmitConvertU64U32(EmitContext& ctx, Id value) { + return ctx.OpUConvert(ctx.U64, value); +} + +Id EmitConvertU32U64(EmitContext& ctx, Id value) { + return ctx.OpUConvert(ctx.U32[1], value); +} + +Id EmitConvertF16F32(EmitContext& ctx, Id value) { + return ctx.OpFConvert(ctx.F16[1], value); +} + +Id EmitConvertF32F16(EmitContext& ctx, Id value) { + return ctx.OpFConvert(ctx.F32[1], value); +} + +Id EmitConvertF32F64(EmitContext& ctx, Id value) { + return ctx.OpFConvert(ctx.F32[1], value); +} + +Id EmitConvertF64F32(EmitContext& ctx, Id value) { + return ctx.OpFConvert(ctx.F64[1], value); +} + +Id EmitConvertF16S8(EmitContext& ctx, Id value) { + return ctx.OpConvertSToF(ctx.F16[1], ExtractS8(ctx, value)); +} + +Id EmitConvertF16S16(EmitContext& ctx, Id value) { + return ctx.OpConvertSToF(ctx.F16[1], ExtractS16(ctx, value)); +} + +Id EmitConvertF16S32(EmitContext& ctx, Id value) { + return ctx.OpConvertSToF(ctx.F16[1], value); +} + +Id EmitConvertF16S64(EmitContext& ctx, Id value) { + return ctx.OpConvertSToF(ctx.F16[1], value); +} + +Id EmitConvertF16U8(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F16[1], ExtractU8(ctx, value)); +} + +Id EmitConvertF16U16(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F16[1], ExtractU16(ctx, value)); +} + +Id EmitConvertF16U32(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F16[1], value); +} + +Id EmitConvertF16U64(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F16[1], value); +} + +Id EmitConvertF32S8(EmitContext& ctx, Id value) { + return ctx.OpConvertSToF(ctx.F32[1], ExtractS8(ctx, value)); +} + +Id EmitConvertF32S16(EmitContext& ctx, Id value) { + return ctx.OpConvertSToF(ctx.F32[1], ExtractS16(ctx, value)); +} + +Id EmitConvertF32S32(EmitContext& ctx, Id value) { + if (ctx.profile.has_broken_signed_operations) { + value = ctx.OpBitcast(ctx.S32[1], value); + } + return ctx.OpConvertSToF(ctx.F32[1], value); +} + +Id EmitConvertF32S64(EmitContext& ctx, Id value) { + return ctx.OpConvertSToF(ctx.F32[1], value); +} + +Id EmitConvertF32U8(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F32[1], ExtractU8(ctx, value)); +} + +Id EmitConvertF32U16(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F32[1], ExtractU16(ctx, value)); +} + +Id EmitConvertF32U32(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F32[1], value); +} + +Id EmitConvertF32U64(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F32[1], value); +} + +Id EmitConvertF64S8(EmitContext& ctx, Id value) { + return ctx.OpConvertSToF(ctx.F64[1], ExtractS8(ctx, value)); +} + +Id EmitConvertF64S16(EmitContext& ctx, Id value) { + return ctx.OpConvertSToF(ctx.F64[1], ExtractS16(ctx, value)); +} + +Id EmitConvertF64S32(EmitContext& ctx, Id value) { + if (ctx.profile.has_broken_signed_operations) { + value = ctx.OpBitcast(ctx.S32[1], value); + } + return ctx.OpConvertSToF(ctx.F64[1], value); +} + +Id EmitConvertF64S64(EmitContext& ctx, Id value) { + return ctx.OpConvertSToF(ctx.F64[1], value); +} + +Id EmitConvertF64U8(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F64[1], ExtractU8(ctx, value)); +} + +Id EmitConvertF64U16(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F64[1], ExtractU16(ctx, value)); +} + +Id EmitConvertF64U32(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F64[1], value); +} + +Id EmitConvertF64U64(EmitContext& ctx, Id value) { + return ctx.OpConvertUToF(ctx.F64[1], value); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_floating_point.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_floating_point.cpp new file mode 100755 index 000000000..61cf25f9c --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_floating_point.cpp @@ -0,0 +1,396 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" +#include "shader_recompiler/frontend/ir/modifiers.h" + +namespace Shader::Backend::SPIRV { +namespace { +Id Decorate(EmitContext& ctx, IR::Inst* inst, Id op) { + const auto flags{inst->Flags()}; + if (flags.no_contraction) { + ctx.Decorate(op, spv::Decoration::NoContraction); + } + return op; +} + +Id Clamp(EmitContext& ctx, Id type, Id value, Id zero, Id one) { + if (ctx.profile.has_broken_spirv_clamp) { + return ctx.OpFMin(type, ctx.OpFMax(type, value, zero), one); + } else { + return ctx.OpFClamp(type, value, zero, one); + } +} + +Id FPOrdNotEqual(EmitContext& ctx, Id lhs, Id rhs) { + if (ctx.profile.ignore_nan_fp_comparisons) { + const Id comp{ctx.OpFOrdEqual(ctx.U1, lhs, rhs)}; + const Id lhs_not_nan{ctx.OpLogicalNot(ctx.U1, ctx.OpIsNan(ctx.U1, lhs))}; + const Id rhs_not_nan{ctx.OpLogicalNot(ctx.U1, ctx.OpIsNan(ctx.U1, rhs))}; + return ctx.OpLogicalAnd(ctx.U1, ctx.OpLogicalAnd(ctx.U1, comp, lhs_not_nan), rhs_not_nan); + } else { + return ctx.OpFOrdNotEqual(ctx.U1, lhs, rhs); + } +} + +Id FPUnordCompare(Id (EmitContext::*comp_func)(Id, Id, Id), EmitContext& ctx, Id lhs, Id rhs) { + if (ctx.profile.ignore_nan_fp_comparisons) { + const Id lhs_nan{ctx.OpIsNan(ctx.U1, lhs)}; + const Id rhs_nan{ctx.OpIsNan(ctx.U1, rhs)}; + const Id comp{(ctx.*comp_func)(ctx.U1, lhs, rhs)}; + return ctx.OpLogicalOr(ctx.U1, ctx.OpLogicalOr(ctx.U1, comp, lhs_nan), rhs_nan); + } else { + return (ctx.*comp_func)(ctx.U1, lhs, rhs); + } +} +} // Anonymous namespace + +Id EmitFPAbs16(EmitContext& ctx, Id value) { + return ctx.OpFAbs(ctx.F16[1], value); +} + +Id EmitFPAbs32(EmitContext& ctx, Id value) { + return ctx.OpFAbs(ctx.F32[1], value); +} + +Id EmitFPAbs64(EmitContext& ctx, Id value) { + return ctx.OpFAbs(ctx.F64[1], value); +} + +Id EmitFPAdd16(EmitContext& ctx, IR::Inst* inst, Id a, Id b) { + return Decorate(ctx, inst, ctx.OpFAdd(ctx.F16[1], a, b)); +} + +Id EmitFPAdd32(EmitContext& ctx, IR::Inst* inst, Id a, Id b) { + return Decorate(ctx, inst, ctx.OpFAdd(ctx.F32[1], a, b)); +} + +Id EmitFPAdd64(EmitContext& ctx, IR::Inst* inst, Id a, Id b) { + return Decorate(ctx, inst, ctx.OpFAdd(ctx.F64[1], a, b)); +} + +Id EmitFPFma16(EmitContext& ctx, IR::Inst* inst, Id a, Id b, Id c) { + return Decorate(ctx, inst, ctx.OpFma(ctx.F16[1], a, b, c)); +} + +Id EmitFPFma32(EmitContext& ctx, IR::Inst* inst, Id a, Id b, Id c) { + return Decorate(ctx, inst, ctx.OpFma(ctx.F32[1], a, b, c)); +} + +Id EmitFPFma64(EmitContext& ctx, IR::Inst* inst, Id a, Id b, Id c) { + return Decorate(ctx, inst, ctx.OpFma(ctx.F64[1], a, b, c)); +} + +Id EmitFPMax32(EmitContext& ctx, Id a, Id b) { + return ctx.OpFMax(ctx.F32[1], a, b); +} + +Id EmitFPMax64(EmitContext& ctx, Id a, Id b) { + return ctx.OpFMax(ctx.F64[1], a, b); +} + +Id EmitFPMin32(EmitContext& ctx, Id a, Id b) { + return ctx.OpFMin(ctx.F32[1], a, b); +} + +Id EmitFPMin64(EmitContext& ctx, Id a, Id b) { + return ctx.OpFMin(ctx.F64[1], a, b); +} + +Id EmitFPMul16(EmitContext& ctx, IR::Inst* inst, Id a, Id b) { + return Decorate(ctx, inst, ctx.OpFMul(ctx.F16[1], a, b)); +} + +Id EmitFPMul32(EmitContext& ctx, IR::Inst* inst, Id a, Id b) { + return Decorate(ctx, inst, ctx.OpFMul(ctx.F32[1], a, b)); +} + +Id EmitFPMul64(EmitContext& ctx, IR::Inst* inst, Id a, Id b) { + return Decorate(ctx, inst, ctx.OpFMul(ctx.F64[1], a, b)); +} + +Id EmitFPNeg16(EmitContext& ctx, Id value) { + return ctx.OpFNegate(ctx.F16[1], value); +} + +Id EmitFPNeg32(EmitContext& ctx, Id value) { + return ctx.OpFNegate(ctx.F32[1], value); +} + +Id EmitFPNeg64(EmitContext& ctx, Id value) { + return ctx.OpFNegate(ctx.F64[1], value); +} + +Id EmitFPSin(EmitContext& ctx, Id value) { + return ctx.OpSin(ctx.F32[1], value); +} + +Id EmitFPCos(EmitContext& ctx, Id value) { + return ctx.OpCos(ctx.F32[1], value); +} + +Id EmitFPExp2(EmitContext& ctx, Id value) { + return ctx.OpExp2(ctx.F32[1], value); +} + +Id EmitFPLog2(EmitContext& ctx, Id value) { + return ctx.OpLog2(ctx.F32[1], value); +} + +Id EmitFPRecip32(EmitContext& ctx, Id value) { + return ctx.OpFDiv(ctx.F32[1], ctx.Const(1.0f), value); +} + +Id EmitFPRecip64(EmitContext& ctx, Id value) { + return ctx.OpFDiv(ctx.F64[1], ctx.Constant(ctx.F64[1], 1.0f), value); +} + +Id EmitFPRecipSqrt32(EmitContext& ctx, Id value) { + return ctx.OpInverseSqrt(ctx.F32[1], value); +} + +Id EmitFPRecipSqrt64(EmitContext& ctx, Id value) { + return ctx.OpInverseSqrt(ctx.F64[1], value); +} + +Id EmitFPSqrt(EmitContext& ctx, Id value) { + return ctx.OpSqrt(ctx.F32[1], value); +} + +Id EmitFPSaturate16(EmitContext& ctx, Id value) { + const Id zero{ctx.Constant(ctx.F16[1], u16{0})}; + const Id one{ctx.Constant(ctx.F16[1], u16{0x3c00})}; + return Clamp(ctx, ctx.F16[1], value, zero, one); +} + +Id EmitFPSaturate32(EmitContext& ctx, Id value) { + const Id zero{ctx.Const(f32{0.0})}; + const Id one{ctx.Const(f32{1.0})}; + return Clamp(ctx, ctx.F32[1], value, zero, one); +} + +Id EmitFPSaturate64(EmitContext& ctx, Id value) { + const Id zero{ctx.Constant(ctx.F64[1], f64{0.0})}; + const Id one{ctx.Constant(ctx.F64[1], f64{1.0})}; + return Clamp(ctx, ctx.F64[1], value, zero, one); +} + +Id EmitFPClamp16(EmitContext& ctx, Id value, Id min_value, Id max_value) { + return Clamp(ctx, ctx.F16[1], value, min_value, max_value); +} + +Id EmitFPClamp32(EmitContext& ctx, Id value, Id min_value, Id max_value) { + return Clamp(ctx, ctx.F32[1], value, min_value, max_value); +} + +Id EmitFPClamp64(EmitContext& ctx, Id value, Id min_value, Id max_value) { + return Clamp(ctx, ctx.F64[1], value, min_value, max_value); +} + +Id EmitFPRoundEven16(EmitContext& ctx, Id value) { + return ctx.OpRoundEven(ctx.F16[1], value); +} + +Id EmitFPRoundEven32(EmitContext& ctx, Id value) { + return ctx.OpRoundEven(ctx.F32[1], value); +} + +Id EmitFPRoundEven64(EmitContext& ctx, Id value) { + return ctx.OpRoundEven(ctx.F64[1], value); +} + +Id EmitFPFloor16(EmitContext& ctx, Id value) { + return ctx.OpFloor(ctx.F16[1], value); +} + +Id EmitFPFloor32(EmitContext& ctx, Id value) { + return ctx.OpFloor(ctx.F32[1], value); +} + +Id EmitFPFloor64(EmitContext& ctx, Id value) { + return ctx.OpFloor(ctx.F64[1], value); +} + +Id EmitFPCeil16(EmitContext& ctx, Id value) { + return ctx.OpCeil(ctx.F16[1], value); +} + +Id EmitFPCeil32(EmitContext& ctx, Id value) { + return ctx.OpCeil(ctx.F32[1], value); +} + +Id EmitFPCeil64(EmitContext& ctx, Id value) { + return ctx.OpCeil(ctx.F64[1], value); +} + +Id EmitFPTrunc16(EmitContext& ctx, Id value) { + return ctx.OpTrunc(ctx.F16[1], value); +} + +Id EmitFPTrunc32(EmitContext& ctx, Id value) { + return ctx.OpTrunc(ctx.F32[1], value); +} + +Id EmitFPTrunc64(EmitContext& ctx, Id value) { + return ctx.OpTrunc(ctx.F64[1], value); +} + +Id EmitFPOrdEqual16(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdEqual32(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdEqual64(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPUnordEqual16(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordEqual, ctx, lhs, rhs); +} + +Id EmitFPUnordEqual32(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordEqual, ctx, lhs, rhs); +} + +Id EmitFPUnordEqual64(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordEqual, ctx, lhs, rhs); +} + +Id EmitFPOrdNotEqual16(EmitContext& ctx, Id lhs, Id rhs) { + return FPOrdNotEqual(ctx, lhs, rhs); +} + +Id EmitFPOrdNotEqual32(EmitContext& ctx, Id lhs, Id rhs) { + return FPOrdNotEqual(ctx, lhs, rhs); +} + +Id EmitFPOrdNotEqual64(EmitContext& ctx, Id lhs, Id rhs) { + return FPOrdNotEqual(ctx, lhs, rhs); +} + +Id EmitFPUnordNotEqual16(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFUnordNotEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPUnordNotEqual32(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFUnordNotEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPUnordNotEqual64(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFUnordNotEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdLessThan16(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdLessThan(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdLessThan32(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdLessThan(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdLessThan64(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdLessThan(ctx.U1, lhs, rhs); +} + +Id EmitFPUnordLessThan16(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordLessThan, ctx, lhs, rhs); +} + +Id EmitFPUnordLessThan32(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordLessThan, ctx, lhs, rhs); +} + +Id EmitFPUnordLessThan64(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordLessThan, ctx, lhs, rhs); +} + +Id EmitFPOrdGreaterThan16(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdGreaterThan(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdGreaterThan32(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdGreaterThan(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdGreaterThan64(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdGreaterThan(ctx.U1, lhs, rhs); +} + +Id EmitFPUnordGreaterThan16(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordGreaterThan, ctx, lhs, rhs); +} + +Id EmitFPUnordGreaterThan32(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordGreaterThan, ctx, lhs, rhs); +} + +Id EmitFPUnordGreaterThan64(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordGreaterThan, ctx, lhs, rhs); +} + +Id EmitFPOrdLessThanEqual16(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdLessThanEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdLessThanEqual32(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdLessThanEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdLessThanEqual64(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdLessThanEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPUnordLessThanEqual16(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordLessThanEqual, ctx, lhs, rhs); +} + +Id EmitFPUnordLessThanEqual32(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordLessThanEqual, ctx, lhs, rhs); +} + +Id EmitFPUnordLessThanEqual64(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordLessThanEqual, ctx, lhs, rhs); +} + +Id EmitFPOrdGreaterThanEqual16(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdGreaterThanEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdGreaterThanEqual32(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdGreaterThanEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPOrdGreaterThanEqual64(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpFOrdGreaterThanEqual(ctx.U1, lhs, rhs); +} + +Id EmitFPUnordGreaterThanEqual16(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordGreaterThanEqual, ctx, lhs, rhs); +} + +Id EmitFPUnordGreaterThanEqual32(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordGreaterThanEqual, ctx, lhs, rhs); +} + +Id EmitFPUnordGreaterThanEqual64(EmitContext& ctx, Id lhs, Id rhs) { + return FPUnordCompare(&EmitContext::OpFUnordGreaterThanEqual, ctx, lhs, rhs); +} + +Id EmitFPIsNan16(EmitContext& ctx, Id value) { + return ctx.OpIsNan(ctx.U1, value); +} + +Id EmitFPIsNan32(EmitContext& ctx, Id value) { + return ctx.OpIsNan(ctx.U1, value); +} + +Id EmitFPIsNan64(EmitContext& ctx, Id value) { + return ctx.OpIsNan(ctx.U1, value); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_image.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_image.cpp new file mode 100755 index 000000000..647804814 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_image.cpp @@ -0,0 +1,457 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" +#include "shader_recompiler/frontend/ir/modifiers.h" + +namespace Shader::Backend::SPIRV { +namespace { +class ImageOperands { +public: + explicit ImageOperands(EmitContext& ctx, bool has_bias, bool has_lod, bool has_lod_clamp, + Id lod, const IR::Value& offset) { + if (has_bias) { + const Id bias{has_lod_clamp ? ctx.OpCompositeExtract(ctx.F32[1], lod, 0) : lod}; + Add(spv::ImageOperandsMask::Bias, bias); + } + if (has_lod) { + const Id lod_value{has_lod_clamp ? ctx.OpCompositeExtract(ctx.F32[1], lod, 0) : lod}; + Add(spv::ImageOperandsMask::Lod, lod_value); + } + AddOffset(ctx, offset); + if (has_lod_clamp) { + const Id lod_clamp{has_bias ? ctx.OpCompositeExtract(ctx.F32[1], lod, 1) : lod}; + Add(spv::ImageOperandsMask::MinLod, lod_clamp); + } + } + + explicit ImageOperands(EmitContext& ctx, const IR::Value& offset, const IR::Value& offset2) { + if (offset2.IsEmpty()) { + if (offset.IsEmpty()) { + return; + } + Add(spv::ImageOperandsMask::Offset, ctx.Def(offset)); + return; + } + const std::array values{offset.InstRecursive(), offset2.InstRecursive()}; + if (!values[0]->AreAllArgsImmediates() || !values[1]->AreAllArgsImmediates()) { + LOG_WARNING(Shader_SPIRV, "Not all arguments in PTP are immediate, ignoring"); + return; + } + const IR::Opcode opcode{values[0]->GetOpcode()}; + if (opcode != values[1]->GetOpcode() || opcode != IR::Opcode::CompositeConstructU32x4) { + throw LogicError("Invalid PTP arguments"); + } + auto read{[&](unsigned int a, unsigned int b) { return values[a]->Arg(b).U32(); }}; + + const Id offsets{ctx.ConstantComposite( + ctx.TypeArray(ctx.U32[2], ctx.Const(4U)), ctx.Const(read(0, 0), read(0, 1)), + ctx.Const(read(0, 2), read(0, 3)), ctx.Const(read(1, 0), read(1, 1)), + ctx.Const(read(1, 2), read(1, 3)))}; + Add(spv::ImageOperandsMask::ConstOffsets, offsets); + } + + explicit ImageOperands(Id offset, Id lod, Id ms) { + if (Sirit::ValidId(lod)) { + Add(spv::ImageOperandsMask::Lod, lod); + } + if (Sirit::ValidId(offset)) { + Add(spv::ImageOperandsMask::Offset, offset); + } + if (Sirit::ValidId(ms)) { + Add(spv::ImageOperandsMask::Sample, ms); + } + } + + explicit ImageOperands(EmitContext& ctx, bool has_lod_clamp, Id derivates, u32 num_derivates, + Id offset, Id lod_clamp) { + if (!Sirit::ValidId(derivates)) { + throw LogicError("Derivates must be present"); + } + boost::container::static_vector deriv_x_accum; + boost::container::static_vector deriv_y_accum; + for (u32 i = 0; i < num_derivates; ++i) { + deriv_x_accum.push_back(ctx.OpCompositeExtract(ctx.F32[1], derivates, i * 2)); + deriv_y_accum.push_back(ctx.OpCompositeExtract(ctx.F32[1], derivates, i * 2 + 1)); + } + const Id derivates_X{ctx.OpCompositeConstruct( + ctx.F32[num_derivates], std::span{deriv_x_accum.data(), deriv_x_accum.size()})}; + const Id derivates_Y{ctx.OpCompositeConstruct( + ctx.F32[num_derivates], std::span{deriv_y_accum.data(), deriv_y_accum.size()})}; + Add(spv::ImageOperandsMask::Grad, derivates_X, derivates_Y); + if (Sirit::ValidId(offset)) { + Add(spv::ImageOperandsMask::Offset, offset); + } + if (has_lod_clamp) { + Add(spv::ImageOperandsMask::MinLod, lod_clamp); + } + } + + std::span Span() const noexcept { + return std::span{operands.data(), operands.size()}; + } + + std::optional MaskOptional() const noexcept { + return mask != spv::ImageOperandsMask{} ? std::make_optional(mask) : std::nullopt; + } + + spv::ImageOperandsMask Mask() const noexcept { + return mask; + } + +private: + void AddOffset(EmitContext& ctx, const IR::Value& offset) { + if (offset.IsEmpty()) { + return; + } + if (offset.IsImmediate()) { + Add(spv::ImageOperandsMask::ConstOffset, ctx.SConst(offset.U32())); + return; + } + IR::Inst* const inst{offset.InstRecursive()}; + if (inst->AreAllArgsImmediates()) { + switch (inst->GetOpcode()) { + case IR::Opcode::CompositeConstructU32x2: + Add(spv::ImageOperandsMask::ConstOffset, + ctx.SConst(inst->Arg(0).U32(), inst->Arg(1).U32())); + return; + case IR::Opcode::CompositeConstructU32x3: + Add(spv::ImageOperandsMask::ConstOffset, + ctx.SConst(inst->Arg(0).U32(), inst->Arg(1).U32(), inst->Arg(2).U32())); + return; + case IR::Opcode::CompositeConstructU32x4: + Add(spv::ImageOperandsMask::ConstOffset, + ctx.SConst(inst->Arg(0).U32(), inst->Arg(1).U32(), inst->Arg(2).U32(), + inst->Arg(3).U32())); + return; + default: + break; + } + } + Add(spv::ImageOperandsMask::Offset, ctx.Def(offset)); + } + + void Add(spv::ImageOperandsMask new_mask, Id value) { + mask = static_cast(static_cast(mask) | + static_cast(new_mask)); + operands.push_back(value); + } + + void Add(spv::ImageOperandsMask new_mask, Id value_1, Id value_2) { + mask = static_cast(static_cast(mask) | + static_cast(new_mask)); + operands.push_back(value_1); + operands.push_back(value_2); + } + + boost::container::static_vector operands; + spv::ImageOperandsMask mask{}; +}; + +Id Texture(EmitContext& ctx, IR::TextureInstInfo info, [[maybe_unused]] const IR::Value& index) { + const TextureDefinition& def{ctx.textures.at(info.descriptor_index)}; + if (def.count > 1) { + const Id pointer{ctx.OpAccessChain(def.pointer_type, def.id, ctx.Def(index))}; + return ctx.OpLoad(def.sampled_type, pointer); + } else { + return ctx.OpLoad(def.sampled_type, def.id); + } +} + +Id TextureImage(EmitContext& ctx, IR::TextureInstInfo info, const IR::Value& index) { + if (!index.IsImmediate() || index.U32() != 0) { + throw NotImplementedException("Indirect image indexing"); + } + if (info.type == TextureType::Buffer) { + const TextureBufferDefinition& def{ctx.texture_buffers.at(info.descriptor_index)}; + if (def.count > 1) { + throw NotImplementedException("Indirect texture sample"); + } + const Id sampler_id{def.id}; + const Id id{ctx.OpLoad(ctx.sampled_texture_buffer_type, sampler_id)}; + return ctx.OpImage(ctx.image_buffer_type, id); + } else { + const TextureDefinition& def{ctx.textures.at(info.descriptor_index)}; + if (def.count > 1) { + throw NotImplementedException("Indirect texture sample"); + } + return ctx.OpImage(def.image_type, ctx.OpLoad(def.sampled_type, def.id)); + } +} + +Id Image(EmitContext& ctx, const IR::Value& index, IR::TextureInstInfo info) { + if (!index.IsImmediate() || index.U32() != 0) { + throw NotImplementedException("Indirect image indexing"); + } + if (info.type == TextureType::Buffer) { + const ImageBufferDefinition def{ctx.image_buffers.at(info.descriptor_index)}; + return ctx.OpLoad(def.image_type, def.id); + } else { + const ImageDefinition def{ctx.images.at(info.descriptor_index)}; + return ctx.OpLoad(def.image_type, def.id); + } +} + +Id Decorate(EmitContext& ctx, IR::Inst* inst, Id sample) { + const auto info{inst->Flags()}; + if (info.relaxed_precision != 0) { + ctx.Decorate(sample, spv::Decoration::RelaxedPrecision); + } + return sample; +} + +template +Id Emit(MethodPtrType sparse_ptr, MethodPtrType non_sparse_ptr, EmitContext& ctx, IR::Inst* inst, + Id result_type, Args&&... args) { + IR::Inst* const sparse{inst->GetAssociatedPseudoOperation(IR::Opcode::GetSparseFromOp)}; + if (!sparse) { + return Decorate(ctx, inst, (ctx.*non_sparse_ptr)(result_type, std::forward(args)...)); + } + const Id struct_type{ctx.TypeStruct(ctx.U32[1], result_type)}; + const Id sample{(ctx.*sparse_ptr)(struct_type, std::forward(args)...)}; + const Id resident_code{ctx.OpCompositeExtract(ctx.U32[1], sample, 0U)}; + sparse->SetDefinition(ctx.OpImageSparseTexelsResident(ctx.U1, resident_code)); + sparse->Invalidate(); + Decorate(ctx, inst, sample); + return ctx.OpCompositeExtract(result_type, sample, 1U); +} +} // Anonymous namespace + +Id EmitBindlessImageSampleImplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageSampleExplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageSampleDrefImplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageSampleDrefExplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageGather(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageGatherDref(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageFetch(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageQueryDimensions(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageQueryLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageGradient(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageRead(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBindlessImageWrite(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageSampleImplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageSampleExplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageSampleDrefImplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageSampleDrefExplicitLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageGather(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageGatherDref(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageFetch(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageQueryDimensions(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageQueryLod(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageGradient(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageRead(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitBoundImageWrite(EmitContext&) { + throw LogicError("Unreachable instruction"); +} + +Id EmitImageSampleImplicitLod(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id bias_lc, const IR::Value& offset) { + const auto info{inst->Flags()}; + if (ctx.stage == Stage::Fragment) { + const ImageOperands operands(ctx, info.has_bias != 0, false, info.has_lod_clamp != 0, + bias_lc, offset); + return Emit(&EmitContext::OpImageSparseSampleImplicitLod, + &EmitContext::OpImageSampleImplicitLod, ctx, inst, ctx.F32[4], + Texture(ctx, info, index), coords, operands.MaskOptional(), operands.Span()); + } else { + // We can't use implicit lods on non-fragment stages on SPIR-V. Maxwell hardware behaves as + // if the lod was explicitly zero. This may change on Turing with implicit compute + // derivatives + const Id lod{ctx.Const(0.0f)}; + const ImageOperands operands(ctx, false, true, info.has_lod_clamp != 0, lod, offset); + return Emit(&EmitContext::OpImageSparseSampleExplicitLod, + &EmitContext::OpImageSampleExplicitLod, ctx, inst, ctx.F32[4], + Texture(ctx, info, index), coords, operands.Mask(), operands.Span()); + } +} + +Id EmitImageSampleExplicitLod(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id lod, const IR::Value& offset) { + const auto info{inst->Flags()}; + const ImageOperands operands(ctx, false, true, false, lod, offset); + return Emit(&EmitContext::OpImageSparseSampleExplicitLod, + &EmitContext::OpImageSampleExplicitLod, ctx, inst, ctx.F32[4], + Texture(ctx, info, index), coords, operands.Mask(), operands.Span()); +} + +Id EmitImageSampleDrefImplicitLod(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, + Id coords, Id dref, Id bias_lc, const IR::Value& offset) { + const auto info{inst->Flags()}; + const ImageOperands operands(ctx, info.has_bias != 0, false, info.has_lod_clamp != 0, bias_lc, + offset); + return Emit(&EmitContext::OpImageSparseSampleDrefImplicitLod, + &EmitContext::OpImageSampleDrefImplicitLod, ctx, inst, ctx.F32[1], + Texture(ctx, info, index), coords, dref, operands.MaskOptional(), operands.Span()); +} + +Id EmitImageSampleDrefExplicitLod(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, + Id coords, Id dref, Id lod, const IR::Value& offset) { + const auto info{inst->Flags()}; + const ImageOperands operands(ctx, false, true, false, lod, offset); + return Emit(&EmitContext::OpImageSparseSampleDrefExplicitLod, + &EmitContext::OpImageSampleDrefExplicitLod, ctx, inst, ctx.F32[1], + Texture(ctx, info, index), coords, dref, operands.Mask(), operands.Span()); +} + +Id EmitImageGather(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + const IR::Value& offset, const IR::Value& offset2) { + const auto info{inst->Flags()}; + const ImageOperands operands(ctx, offset, offset2); + return Emit(&EmitContext::OpImageSparseGather, &EmitContext::OpImageGather, ctx, inst, + ctx.F32[4], Texture(ctx, info, index), coords, ctx.Const(info.gather_component), + operands.MaskOptional(), operands.Span()); +} + +Id EmitImageGatherDref(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + const IR::Value& offset, const IR::Value& offset2, Id dref) { + const auto info{inst->Flags()}; + const ImageOperands operands(ctx, offset, offset2); + return Emit(&EmitContext::OpImageSparseDrefGather, &EmitContext::OpImageDrefGather, ctx, inst, + ctx.F32[4], Texture(ctx, info, index), coords, dref, operands.MaskOptional(), + operands.Span()); +} + +Id EmitImageFetch(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, Id offset, + Id lod, Id ms) { + const auto info{inst->Flags()}; + if (info.type == TextureType::Buffer) { + lod = Id{}; + } + const ImageOperands operands(offset, lod, ms); + return Emit(&EmitContext::OpImageSparseFetch, &EmitContext::OpImageFetch, ctx, inst, ctx.F32[4], + TextureImage(ctx, info, index), coords, operands.MaskOptional(), operands.Span()); +} + +Id EmitImageQueryDimensions(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id lod) { + const auto info{inst->Flags()}; + const Id image{TextureImage(ctx, info, index)}; + const Id zero{ctx.u32_zero_value}; + const auto mips{[&] { return ctx.OpImageQueryLevels(ctx.U32[1], image); }}; + switch (info.type) { + case TextureType::Color1D: + return ctx.OpCompositeConstruct(ctx.U32[4], ctx.OpImageQuerySizeLod(ctx.U32[1], image, lod), + zero, zero, mips()); + case TextureType::ColorArray1D: + case TextureType::Color2D: + case TextureType::ColorCube: + return ctx.OpCompositeConstruct(ctx.U32[4], ctx.OpImageQuerySizeLod(ctx.U32[2], image, lod), + zero, mips()); + case TextureType::ColorArray2D: + case TextureType::Color3D: + case TextureType::ColorArrayCube: + return ctx.OpCompositeConstruct(ctx.U32[4], ctx.OpImageQuerySizeLod(ctx.U32[3], image, lod), + mips()); + case TextureType::Buffer: + return ctx.OpCompositeConstruct(ctx.U32[4], ctx.OpImageQuerySize(ctx.U32[1], image), zero, + zero, mips()); + } + throw LogicError("Unspecified image type {}", info.type.Value()); +} + +Id EmitImageQueryLod(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords) { + const auto info{inst->Flags()}; + const Id zero{ctx.f32_zero_value}; + const Id sampler{Texture(ctx, info, index)}; + return ctx.OpCompositeConstruct(ctx.F32[4], ctx.OpImageQueryLod(ctx.F32[2], sampler, coords), + zero, zero); +} + +Id EmitImageGradient(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id derivates, Id offset, Id lod_clamp) { + const auto info{inst->Flags()}; + const ImageOperands operands(ctx, info.has_lod_clamp != 0, derivates, info.num_derivates, + offset, lod_clamp); + return Emit(&EmitContext::OpImageSparseSampleExplicitLod, + &EmitContext::OpImageSampleExplicitLod, ctx, inst, ctx.F32[4], + Texture(ctx, info, index), coords, operands.Mask(), operands.Span()); +} + +Id EmitImageRead(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords) { + const auto info{inst->Flags()}; + if (info.image_format == ImageFormat::Typeless && !ctx.profile.support_typeless_image_loads) { + LOG_WARNING(Shader_SPIRV, "Typeless image read not supported by host"); + return ctx.ConstantNull(ctx.U32[4]); + } + return Emit(&EmitContext::OpImageSparseRead, &EmitContext::OpImageRead, ctx, inst, ctx.U32[4], + Image(ctx, index, info), coords, std::nullopt, std::span{}); +} + +void EmitImageWrite(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, Id color) { + const auto info{inst->Flags()}; + ctx.OpImageWrite(Image(ctx, index, info), coords, color); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_image_atomic.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_image_atomic.cpp new file mode 100755 index 000000000..d7f1a365a --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_image_atomic.cpp @@ -0,0 +1,183 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" +#include "shader_recompiler/frontend/ir/modifiers.h" + +namespace Shader::Backend::SPIRV { +namespace { +Id Image(EmitContext& ctx, const IR::Value& index, IR::TextureInstInfo info) { + if (!index.IsImmediate()) { + throw NotImplementedException("Indirect image indexing"); + } + if (info.type == TextureType::Buffer) { + const ImageBufferDefinition def{ctx.image_buffers.at(index.U32())}; + return def.id; + } else { + const ImageDefinition def{ctx.images.at(index.U32())}; + return def.id; + } +} + +std::pair AtomicArgs(EmitContext& ctx) { + const Id scope{ctx.Const(static_cast(spv::Scope::Device))}; + const Id semantics{ctx.u32_zero_value}; + return {scope, semantics}; +} + +Id ImageAtomicU32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, Id value, + Id (Sirit::Module::*atomic_func)(Id, Id, Id, Id, Id)) { + const auto info{inst->Flags()}; + const Id image{Image(ctx, index, info)}; + const Id pointer{ctx.OpImageTexelPointer(ctx.image_u32, image, coords, ctx.Const(0U))}; + const auto [scope, semantics]{AtomicArgs(ctx)}; + return (ctx.*atomic_func)(ctx.U32[1], pointer, scope, semantics, value); +} +} // Anonymous namespace + +Id EmitImageAtomicIAdd32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value) { + return ImageAtomicU32(ctx, inst, index, coords, value, &Sirit::Module::OpAtomicIAdd); +} + +Id EmitImageAtomicSMin32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value) { + return ImageAtomicU32(ctx, inst, index, coords, value, &Sirit::Module::OpAtomicSMin); +} + +Id EmitImageAtomicUMin32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value) { + return ImageAtomicU32(ctx, inst, index, coords, value, &Sirit::Module::OpAtomicUMin); +} + +Id EmitImageAtomicSMax32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value) { + return ImageAtomicU32(ctx, inst, index, coords, value, &Sirit::Module::OpAtomicSMax); +} + +Id EmitImageAtomicUMax32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value) { + return ImageAtomicU32(ctx, inst, index, coords, value, &Sirit::Module::OpAtomicUMax); +} + +Id EmitImageAtomicInc32(EmitContext&, IR::Inst*, const IR::Value&, Id, Id) { + // TODO: This is not yet implemented + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitImageAtomicDec32(EmitContext&, IR::Inst*, const IR::Value&, Id, Id) { + // TODO: This is not yet implemented + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitImageAtomicAnd32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value) { + return ImageAtomicU32(ctx, inst, index, coords, value, &Sirit::Module::OpAtomicAnd); +} + +Id EmitImageAtomicOr32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value) { + return ImageAtomicU32(ctx, inst, index, coords, value, &Sirit::Module::OpAtomicOr); +} + +Id EmitImageAtomicXor32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value) { + return ImageAtomicU32(ctx, inst, index, coords, value, &Sirit::Module::OpAtomicXor); +} + +Id EmitImageAtomicExchange32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value) { + return ImageAtomicU32(ctx, inst, index, coords, value, &Sirit::Module::OpAtomicExchange); +} + +Id EmitBindlessImageAtomicIAdd32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBindlessImageAtomicSMin32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBindlessImageAtomicUMin32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBindlessImageAtomicSMax32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBindlessImageAtomicUMax32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBindlessImageAtomicInc32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBindlessImageAtomicDec32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBindlessImageAtomicAnd32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBindlessImageAtomicOr32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBindlessImageAtomicXor32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBindlessImageAtomicExchange32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicIAdd32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicSMin32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicUMin32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicSMax32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicUMax32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicInc32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicDec32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicAnd32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicOr32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicXor32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitBoundImageAtomicExchange32(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_instructions.h b/src/shader_recompiler/backend/spirv/emit_spirv_instructions.h new file mode 100755 index 000000000..f99c02848 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_instructions.h @@ -0,0 +1,579 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/common_types.h" + +namespace Shader::IR { +enum class Attribute : u64; +enum class Patch : u64; +class Inst; +class Value; +} // namespace Shader::IR + +namespace Shader::Backend::SPIRV { + +using Sirit::Id; + +class EmitContext; + +// Microinstruction emitters +Id EmitPhi(EmitContext& ctx, IR::Inst* inst); +void EmitVoid(EmitContext& ctx); +Id EmitIdentity(EmitContext& ctx, const IR::Value& value); +Id EmitConditionRef(EmitContext& ctx, const IR::Value& value); +void EmitReference(EmitContext&); +void EmitPhiMove(EmitContext&); +void EmitJoin(EmitContext& ctx); +void EmitDemoteToHelperInvocation(EmitContext& ctx); +void EmitBarrier(EmitContext& ctx); +void EmitWorkgroupMemoryBarrier(EmitContext& ctx); +void EmitDeviceMemoryBarrier(EmitContext& ctx); +void EmitPrologue(EmitContext& ctx); +void EmitEpilogue(EmitContext& ctx); +void EmitEmitVertex(EmitContext& ctx, const IR::Value& stream); +void EmitEndPrimitive(EmitContext& ctx, const IR::Value& stream); +void EmitGetRegister(EmitContext& ctx); +void EmitSetRegister(EmitContext& ctx); +void EmitGetPred(EmitContext& ctx); +void EmitSetPred(EmitContext& ctx); +void EmitSetGotoVariable(EmitContext& ctx); +void EmitGetGotoVariable(EmitContext& ctx); +void EmitSetIndirectBranchVariable(EmitContext& ctx); +void EmitGetIndirectBranchVariable(EmitContext& ctx); +Id EmitGetCbufU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitGetCbufS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitGetCbufU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitGetCbufS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitGetCbufU32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitGetCbufF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitGetCbufU32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitGetAttribute(EmitContext& ctx, IR::Attribute attr, Id vertex); +void EmitSetAttribute(EmitContext& ctx, IR::Attribute attr, Id value, Id vertex); +Id EmitGetAttributeIndexed(EmitContext& ctx, Id offset, Id vertex); +void EmitSetAttributeIndexed(EmitContext& ctx, Id offset, Id value, Id vertex); +Id EmitGetPatch(EmitContext& ctx, IR::Patch patch); +void EmitSetPatch(EmitContext& ctx, IR::Patch patch, Id value); +void EmitSetFragColor(EmitContext& ctx, u32 index, u32 component, Id value); +void EmitSetSampleMask(EmitContext& ctx, Id value); +void EmitSetFragDepth(EmitContext& ctx, Id value); +void EmitGetZFlag(EmitContext& ctx); +void EmitGetSFlag(EmitContext& ctx); +void EmitGetCFlag(EmitContext& ctx); +void EmitGetOFlag(EmitContext& ctx); +void EmitSetZFlag(EmitContext& ctx); +void EmitSetSFlag(EmitContext& ctx); +void EmitSetCFlag(EmitContext& ctx); +void EmitSetOFlag(EmitContext& ctx); +Id EmitWorkgroupId(EmitContext& ctx); +Id EmitLocalInvocationId(EmitContext& ctx); +Id EmitInvocationId(EmitContext& ctx); +Id EmitSampleId(EmitContext& ctx); +Id EmitIsHelperInvocation(EmitContext& ctx); +Id EmitYDirection(EmitContext& ctx); +Id EmitLoadLocal(EmitContext& ctx, Id word_offset); +void EmitWriteLocal(EmitContext& ctx, Id word_offset, Id value); +Id EmitUndefU1(EmitContext& ctx); +Id EmitUndefU8(EmitContext& ctx); +Id EmitUndefU16(EmitContext& ctx); +Id EmitUndefU32(EmitContext& ctx); +Id EmitUndefU64(EmitContext& ctx); +void EmitLoadGlobalU8(EmitContext& ctx); +void EmitLoadGlobalS8(EmitContext& ctx); +void EmitLoadGlobalU16(EmitContext& ctx); +void EmitLoadGlobalS16(EmitContext& ctx); +Id EmitLoadGlobal32(EmitContext& ctx, Id address); +Id EmitLoadGlobal64(EmitContext& ctx, Id address); +Id EmitLoadGlobal128(EmitContext& ctx, Id address); +void EmitWriteGlobalU8(EmitContext& ctx); +void EmitWriteGlobalS8(EmitContext& ctx); +void EmitWriteGlobalU16(EmitContext& ctx); +void EmitWriteGlobalS16(EmitContext& ctx); +void EmitWriteGlobal32(EmitContext& ctx, Id address, Id value); +void EmitWriteGlobal64(EmitContext& ctx, Id address, Id value); +void EmitWriteGlobal128(EmitContext& ctx, Id address, Id value); +Id EmitLoadStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitLoadStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitLoadStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitLoadStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitLoadStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitLoadStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +Id EmitLoadStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); +void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +void EmitWriteStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +void EmitWriteStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +void EmitWriteStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +void EmitWriteStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitLoadSharedU8(EmitContext& ctx, Id offset); +Id EmitLoadSharedS8(EmitContext& ctx, Id offset); +Id EmitLoadSharedU16(EmitContext& ctx, Id offset); +Id EmitLoadSharedS16(EmitContext& ctx, Id offset); +Id EmitLoadSharedU32(EmitContext& ctx, Id offset); +Id EmitLoadSharedU64(EmitContext& ctx, Id offset); +Id EmitLoadSharedU128(EmitContext& ctx, Id offset); +void EmitWriteSharedU8(EmitContext& ctx, Id offset, Id value); +void EmitWriteSharedU16(EmitContext& ctx, Id offset, Id value); +void EmitWriteSharedU32(EmitContext& ctx, Id offset, Id value); +void EmitWriteSharedU64(EmitContext& ctx, Id offset, Id value); +void EmitWriteSharedU128(EmitContext& ctx, Id offset, Id value); +Id EmitCompositeConstructU32x2(EmitContext& ctx, Id e1, Id e2); +Id EmitCompositeConstructU32x3(EmitContext& ctx, Id e1, Id e2, Id e3); +Id EmitCompositeConstructU32x4(EmitContext& ctx, Id e1, Id e2, Id e3, Id e4); +Id EmitCompositeExtractU32x2(EmitContext& ctx, Id composite, u32 index); +Id EmitCompositeExtractU32x3(EmitContext& ctx, Id composite, u32 index); +Id EmitCompositeExtractU32x4(EmitContext& ctx, Id composite, u32 index); +Id EmitCompositeInsertU32x2(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitCompositeInsertU32x3(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitCompositeInsertU32x4(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitCompositeConstructF16x2(EmitContext& ctx, Id e1, Id e2); +Id EmitCompositeConstructF16x3(EmitContext& ctx, Id e1, Id e2, Id e3); +Id EmitCompositeConstructF16x4(EmitContext& ctx, Id e1, Id e2, Id e3, Id e4); +Id EmitCompositeExtractF16x2(EmitContext& ctx, Id composite, u32 index); +Id EmitCompositeExtractF16x3(EmitContext& ctx, Id composite, u32 index); +Id EmitCompositeExtractF16x4(EmitContext& ctx, Id composite, u32 index); +Id EmitCompositeInsertF16x2(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitCompositeInsertF16x3(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitCompositeInsertF16x4(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitCompositeConstructF32x2(EmitContext& ctx, Id e1, Id e2); +Id EmitCompositeConstructF32x3(EmitContext& ctx, Id e1, Id e2, Id e3); +Id EmitCompositeConstructF32x4(EmitContext& ctx, Id e1, Id e2, Id e3, Id e4); +Id EmitCompositeExtractF32x2(EmitContext& ctx, Id composite, u32 index); +Id EmitCompositeExtractF32x3(EmitContext& ctx, Id composite, u32 index); +Id EmitCompositeExtractF32x4(EmitContext& ctx, Id composite, u32 index); +Id EmitCompositeInsertF32x2(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitCompositeInsertF32x3(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitCompositeInsertF32x4(EmitContext& ctx, Id composite, Id object, u32 index); +void EmitCompositeConstructF64x2(EmitContext& ctx); +void EmitCompositeConstructF64x3(EmitContext& ctx); +void EmitCompositeConstructF64x4(EmitContext& ctx); +void EmitCompositeExtractF64x2(EmitContext& ctx); +void EmitCompositeExtractF64x3(EmitContext& ctx); +void EmitCompositeExtractF64x4(EmitContext& ctx); +Id EmitCompositeInsertF64x2(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitCompositeInsertF64x3(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitCompositeInsertF64x4(EmitContext& ctx, Id composite, Id object, u32 index); +Id EmitSelectU1(EmitContext& ctx, Id cond, Id true_value, Id false_value); +Id EmitSelectU8(EmitContext& ctx, Id cond, Id true_value, Id false_value); +Id EmitSelectU16(EmitContext& ctx, Id cond, Id true_value, Id false_value); +Id EmitSelectU32(EmitContext& ctx, Id cond, Id true_value, Id false_value); +Id EmitSelectU64(EmitContext& ctx, Id cond, Id true_value, Id false_value); +Id EmitSelectF16(EmitContext& ctx, Id cond, Id true_value, Id false_value); +Id EmitSelectF32(EmitContext& ctx, Id cond, Id true_value, Id false_value); +Id EmitSelectF64(EmitContext& ctx, Id cond, Id true_value, Id false_value); +void EmitBitCastU16F16(EmitContext& ctx); +Id EmitBitCastU32F32(EmitContext& ctx, Id value); +void EmitBitCastU64F64(EmitContext& ctx); +void EmitBitCastF16U16(EmitContext& ctx); +Id EmitBitCastF32U32(EmitContext& ctx, Id value); +void EmitBitCastF64U64(EmitContext& ctx); +Id EmitPackUint2x32(EmitContext& ctx, Id value); +Id EmitUnpackUint2x32(EmitContext& ctx, Id value); +Id EmitPackFloat2x16(EmitContext& ctx, Id value); +Id EmitUnpackFloat2x16(EmitContext& ctx, Id value); +Id EmitPackHalf2x16(EmitContext& ctx, Id value); +Id EmitUnpackHalf2x16(EmitContext& ctx, Id value); +Id EmitPackDouble2x32(EmitContext& ctx, Id value); +Id EmitUnpackDouble2x32(EmitContext& ctx, Id value); +void EmitGetZeroFromOp(EmitContext& ctx); +void EmitGetSignFromOp(EmitContext& ctx); +void EmitGetCarryFromOp(EmitContext& ctx); +void EmitGetOverflowFromOp(EmitContext& ctx); +void EmitGetSparseFromOp(EmitContext& ctx); +void EmitGetInBoundsFromOp(EmitContext& ctx); +Id EmitFPAbs16(EmitContext& ctx, Id value); +Id EmitFPAbs32(EmitContext& ctx, Id value); +Id EmitFPAbs64(EmitContext& ctx, Id value); +Id EmitFPAdd16(EmitContext& ctx, IR::Inst* inst, Id a, Id b); +Id EmitFPAdd32(EmitContext& ctx, IR::Inst* inst, Id a, Id b); +Id EmitFPAdd64(EmitContext& ctx, IR::Inst* inst, Id a, Id b); +Id EmitFPFma16(EmitContext& ctx, IR::Inst* inst, Id a, Id b, Id c); +Id EmitFPFma32(EmitContext& ctx, IR::Inst* inst, Id a, Id b, Id c); +Id EmitFPFma64(EmitContext& ctx, IR::Inst* inst, Id a, Id b, Id c); +Id EmitFPMax32(EmitContext& ctx, Id a, Id b); +Id EmitFPMax64(EmitContext& ctx, Id a, Id b); +Id EmitFPMin32(EmitContext& ctx, Id a, Id b); +Id EmitFPMin64(EmitContext& ctx, Id a, Id b); +Id EmitFPMul16(EmitContext& ctx, IR::Inst* inst, Id a, Id b); +Id EmitFPMul32(EmitContext& ctx, IR::Inst* inst, Id a, Id b); +Id EmitFPMul64(EmitContext& ctx, IR::Inst* inst, Id a, Id b); +Id EmitFPNeg16(EmitContext& ctx, Id value); +Id EmitFPNeg32(EmitContext& ctx, Id value); +Id EmitFPNeg64(EmitContext& ctx, Id value); +Id EmitFPSin(EmitContext& ctx, Id value); +Id EmitFPCos(EmitContext& ctx, Id value); +Id EmitFPExp2(EmitContext& ctx, Id value); +Id EmitFPLog2(EmitContext& ctx, Id value); +Id EmitFPRecip32(EmitContext& ctx, Id value); +Id EmitFPRecip64(EmitContext& ctx, Id value); +Id EmitFPRecipSqrt32(EmitContext& ctx, Id value); +Id EmitFPRecipSqrt64(EmitContext& ctx, Id value); +Id EmitFPSqrt(EmitContext& ctx, Id value); +Id EmitFPSaturate16(EmitContext& ctx, Id value); +Id EmitFPSaturate32(EmitContext& ctx, Id value); +Id EmitFPSaturate64(EmitContext& ctx, Id value); +Id EmitFPClamp16(EmitContext& ctx, Id value, Id min_value, Id max_value); +Id EmitFPClamp32(EmitContext& ctx, Id value, Id min_value, Id max_value); +Id EmitFPClamp64(EmitContext& ctx, Id value, Id min_value, Id max_value); +Id EmitFPRoundEven16(EmitContext& ctx, Id value); +Id EmitFPRoundEven32(EmitContext& ctx, Id value); +Id EmitFPRoundEven64(EmitContext& ctx, Id value); +Id EmitFPFloor16(EmitContext& ctx, Id value); +Id EmitFPFloor32(EmitContext& ctx, Id value); +Id EmitFPFloor64(EmitContext& ctx, Id value); +Id EmitFPCeil16(EmitContext& ctx, Id value); +Id EmitFPCeil32(EmitContext& ctx, Id value); +Id EmitFPCeil64(EmitContext& ctx, Id value); +Id EmitFPTrunc16(EmitContext& ctx, Id value); +Id EmitFPTrunc32(EmitContext& ctx, Id value); +Id EmitFPTrunc64(EmitContext& ctx, Id value); +Id EmitFPOrdEqual16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdEqual32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdEqual64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordEqual16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordEqual32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordEqual64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdNotEqual16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdNotEqual32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdNotEqual64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordNotEqual16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordNotEqual32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordNotEqual64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdLessThan16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdLessThan32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdLessThan64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordLessThan16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordLessThan32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordLessThan64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdGreaterThan16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdGreaterThan32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdGreaterThan64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordGreaterThan16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordGreaterThan32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordGreaterThan64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdLessThanEqual16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdLessThanEqual32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdLessThanEqual64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordLessThanEqual16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordLessThanEqual32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordLessThanEqual64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdGreaterThanEqual16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdGreaterThanEqual32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPOrdGreaterThanEqual64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordGreaterThanEqual16(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordGreaterThanEqual32(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPUnordGreaterThanEqual64(EmitContext& ctx, Id lhs, Id rhs); +Id EmitFPIsNan16(EmitContext& ctx, Id value); +Id EmitFPIsNan32(EmitContext& ctx, Id value); +Id EmitFPIsNan64(EmitContext& ctx, Id value); +Id EmitIAdd32(EmitContext& ctx, IR::Inst* inst, Id a, Id b); +Id EmitIAdd64(EmitContext& ctx, Id a, Id b); +Id EmitISub32(EmitContext& ctx, Id a, Id b); +Id EmitISub64(EmitContext& ctx, Id a, Id b); +Id EmitIMul32(EmitContext& ctx, Id a, Id b); +Id EmitINeg32(EmitContext& ctx, Id value); +Id EmitINeg64(EmitContext& ctx, Id value); +Id EmitIAbs32(EmitContext& ctx, Id value); +Id EmitShiftLeftLogical32(EmitContext& ctx, Id base, Id shift); +Id EmitShiftLeftLogical64(EmitContext& ctx, Id base, Id shift); +Id EmitShiftRightLogical32(EmitContext& ctx, Id base, Id shift); +Id EmitShiftRightLogical64(EmitContext& ctx, Id base, Id shift); +Id EmitShiftRightArithmetic32(EmitContext& ctx, Id base, Id shift); +Id EmitShiftRightArithmetic64(EmitContext& ctx, Id base, Id shift); +Id EmitBitwiseAnd32(EmitContext& ctx, IR::Inst* inst, Id a, Id b); +Id EmitBitwiseOr32(EmitContext& ctx, IR::Inst* inst, Id a, Id b); +Id EmitBitwiseXor32(EmitContext& ctx, IR::Inst* inst, Id a, Id b); +Id EmitBitFieldInsert(EmitContext& ctx, Id base, Id insert, Id offset, Id count); +Id EmitBitFieldSExtract(EmitContext& ctx, IR::Inst* inst, Id base, Id offset, Id count); +Id EmitBitFieldUExtract(EmitContext& ctx, IR::Inst* inst, Id base, Id offset, Id count); +Id EmitBitReverse32(EmitContext& ctx, Id value); +Id EmitBitCount32(EmitContext& ctx, Id value); +Id EmitBitwiseNot32(EmitContext& ctx, Id value); +Id EmitFindSMsb32(EmitContext& ctx, Id value); +Id EmitFindUMsb32(EmitContext& ctx, Id value); +Id EmitSMin32(EmitContext& ctx, Id a, Id b); +Id EmitUMin32(EmitContext& ctx, Id a, Id b); +Id EmitSMax32(EmitContext& ctx, Id a, Id b); +Id EmitUMax32(EmitContext& ctx, Id a, Id b); +Id EmitSClamp32(EmitContext& ctx, IR::Inst* inst, Id value, Id min, Id max); +Id EmitUClamp32(EmitContext& ctx, IR::Inst* inst, Id value, Id min, Id max); +Id EmitSLessThan(EmitContext& ctx, Id lhs, Id rhs); +Id EmitULessThan(EmitContext& ctx, Id lhs, Id rhs); +Id EmitIEqual(EmitContext& ctx, Id lhs, Id rhs); +Id EmitSLessThanEqual(EmitContext& ctx, Id lhs, Id rhs); +Id EmitULessThanEqual(EmitContext& ctx, Id lhs, Id rhs); +Id EmitSGreaterThan(EmitContext& ctx, Id lhs, Id rhs); +Id EmitUGreaterThan(EmitContext& ctx, Id lhs, Id rhs); +Id EmitINotEqual(EmitContext& ctx, Id lhs, Id rhs); +Id EmitSGreaterThanEqual(EmitContext& ctx, Id lhs, Id rhs); +Id EmitUGreaterThanEqual(EmitContext& ctx, Id lhs, Id rhs); +Id EmitSharedAtomicIAdd32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicSMin32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicUMin32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicSMax32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicUMax32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicInc32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicDec32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicAnd32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicOr32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicXor32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicExchange32(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitSharedAtomicExchange64(EmitContext& ctx, Id pointer_offset, Id value); +Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicUMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicSMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicUMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicInc32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicDec32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicAnd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicOr32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicXor32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicExchange32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicIAdd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicSMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicUMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicSMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicUMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicAnd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicOr64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicXor64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicAddF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicMinF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicMinF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicMaxF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitStorageAtomicMaxF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value); +Id EmitGlobalAtomicIAdd32(EmitContext& ctx); +Id EmitGlobalAtomicSMin32(EmitContext& ctx); +Id EmitGlobalAtomicUMin32(EmitContext& ctx); +Id EmitGlobalAtomicSMax32(EmitContext& ctx); +Id EmitGlobalAtomicUMax32(EmitContext& ctx); +Id EmitGlobalAtomicInc32(EmitContext& ctx); +Id EmitGlobalAtomicDec32(EmitContext& ctx); +Id EmitGlobalAtomicAnd32(EmitContext& ctx); +Id EmitGlobalAtomicOr32(EmitContext& ctx); +Id EmitGlobalAtomicXor32(EmitContext& ctx); +Id EmitGlobalAtomicExchange32(EmitContext& ctx); +Id EmitGlobalAtomicIAdd64(EmitContext& ctx); +Id EmitGlobalAtomicSMin64(EmitContext& ctx); +Id EmitGlobalAtomicUMin64(EmitContext& ctx); +Id EmitGlobalAtomicSMax64(EmitContext& ctx); +Id EmitGlobalAtomicUMax64(EmitContext& ctx); +Id EmitGlobalAtomicInc64(EmitContext& ctx); +Id EmitGlobalAtomicDec64(EmitContext& ctx); +Id EmitGlobalAtomicAnd64(EmitContext& ctx); +Id EmitGlobalAtomicOr64(EmitContext& ctx); +Id EmitGlobalAtomicXor64(EmitContext& ctx); +Id EmitGlobalAtomicExchange64(EmitContext& ctx); +Id EmitGlobalAtomicAddF32(EmitContext& ctx); +Id EmitGlobalAtomicAddF16x2(EmitContext& ctx); +Id EmitGlobalAtomicAddF32x2(EmitContext& ctx); +Id EmitGlobalAtomicMinF16x2(EmitContext& ctx); +Id EmitGlobalAtomicMinF32x2(EmitContext& ctx); +Id EmitGlobalAtomicMaxF16x2(EmitContext& ctx); +Id EmitGlobalAtomicMaxF32x2(EmitContext& ctx); +Id EmitLogicalOr(EmitContext& ctx, Id a, Id b); +Id EmitLogicalAnd(EmitContext& ctx, Id a, Id b); +Id EmitLogicalXor(EmitContext& ctx, Id a, Id b); +Id EmitLogicalNot(EmitContext& ctx, Id value); +Id EmitConvertS16F16(EmitContext& ctx, Id value); +Id EmitConvertS16F32(EmitContext& ctx, Id value); +Id EmitConvertS16F64(EmitContext& ctx, Id value); +Id EmitConvertS32F16(EmitContext& ctx, Id value); +Id EmitConvertS32F32(EmitContext& ctx, Id value); +Id EmitConvertS32F64(EmitContext& ctx, Id value); +Id EmitConvertS64F16(EmitContext& ctx, Id value); +Id EmitConvertS64F32(EmitContext& ctx, Id value); +Id EmitConvertS64F64(EmitContext& ctx, Id value); +Id EmitConvertU16F16(EmitContext& ctx, Id value); +Id EmitConvertU16F32(EmitContext& ctx, Id value); +Id EmitConvertU16F64(EmitContext& ctx, Id value); +Id EmitConvertU32F16(EmitContext& ctx, Id value); +Id EmitConvertU32F32(EmitContext& ctx, Id value); +Id EmitConvertU32F64(EmitContext& ctx, Id value); +Id EmitConvertU64F16(EmitContext& ctx, Id value); +Id EmitConvertU64F32(EmitContext& ctx, Id value); +Id EmitConvertU64F64(EmitContext& ctx, Id value); +Id EmitConvertU64U32(EmitContext& ctx, Id value); +Id EmitConvertU32U64(EmitContext& ctx, Id value); +Id EmitConvertF16F32(EmitContext& ctx, Id value); +Id EmitConvertF32F16(EmitContext& ctx, Id value); +Id EmitConvertF32F64(EmitContext& ctx, Id value); +Id EmitConvertF64F32(EmitContext& ctx, Id value); +Id EmitConvertF16S8(EmitContext& ctx, Id value); +Id EmitConvertF16S16(EmitContext& ctx, Id value); +Id EmitConvertF16S32(EmitContext& ctx, Id value); +Id EmitConvertF16S64(EmitContext& ctx, Id value); +Id EmitConvertF16U8(EmitContext& ctx, Id value); +Id EmitConvertF16U16(EmitContext& ctx, Id value); +Id EmitConvertF16U32(EmitContext& ctx, Id value); +Id EmitConvertF16U64(EmitContext& ctx, Id value); +Id EmitConvertF32S8(EmitContext& ctx, Id value); +Id EmitConvertF32S16(EmitContext& ctx, Id value); +Id EmitConvertF32S32(EmitContext& ctx, Id value); +Id EmitConvertF32S64(EmitContext& ctx, Id value); +Id EmitConvertF32U8(EmitContext& ctx, Id value); +Id EmitConvertF32U16(EmitContext& ctx, Id value); +Id EmitConvertF32U32(EmitContext& ctx, Id value); +Id EmitConvertF32U64(EmitContext& ctx, Id value); +Id EmitConvertF64S8(EmitContext& ctx, Id value); +Id EmitConvertF64S16(EmitContext& ctx, Id value); +Id EmitConvertF64S32(EmitContext& ctx, Id value); +Id EmitConvertF64S64(EmitContext& ctx, Id value); +Id EmitConvertF64U8(EmitContext& ctx, Id value); +Id EmitConvertF64U16(EmitContext& ctx, Id value); +Id EmitConvertF64U32(EmitContext& ctx, Id value); +Id EmitConvertF64U64(EmitContext& ctx, Id value); +Id EmitBindlessImageSampleImplicitLod(EmitContext&); +Id EmitBindlessImageSampleExplicitLod(EmitContext&); +Id EmitBindlessImageSampleDrefImplicitLod(EmitContext&); +Id EmitBindlessImageSampleDrefExplicitLod(EmitContext&); +Id EmitBindlessImageGather(EmitContext&); +Id EmitBindlessImageGatherDref(EmitContext&); +Id EmitBindlessImageFetch(EmitContext&); +Id EmitBindlessImageQueryDimensions(EmitContext&); +Id EmitBindlessImageQueryLod(EmitContext&); +Id EmitBindlessImageGradient(EmitContext&); +Id EmitBindlessImageRead(EmitContext&); +Id EmitBindlessImageWrite(EmitContext&); +Id EmitBoundImageSampleImplicitLod(EmitContext&); +Id EmitBoundImageSampleExplicitLod(EmitContext&); +Id EmitBoundImageSampleDrefImplicitLod(EmitContext&); +Id EmitBoundImageSampleDrefExplicitLod(EmitContext&); +Id EmitBoundImageGather(EmitContext&); +Id EmitBoundImageGatherDref(EmitContext&); +Id EmitBoundImageFetch(EmitContext&); +Id EmitBoundImageQueryDimensions(EmitContext&); +Id EmitBoundImageQueryLod(EmitContext&); +Id EmitBoundImageGradient(EmitContext&); +Id EmitBoundImageRead(EmitContext&); +Id EmitBoundImageWrite(EmitContext&); +Id EmitImageSampleImplicitLod(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id bias_lc, const IR::Value& offset); +Id EmitImageSampleExplicitLod(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id lod, const IR::Value& offset); +Id EmitImageSampleDrefImplicitLod(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, + Id coords, Id dref, Id bias_lc, const IR::Value& offset); +Id EmitImageSampleDrefExplicitLod(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, + Id coords, Id dref, Id lod, const IR::Value& offset); +Id EmitImageGather(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + const IR::Value& offset, const IR::Value& offset2); +Id EmitImageGatherDref(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + const IR::Value& offset, const IR::Value& offset2, Id dref); +Id EmitImageFetch(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, Id offset, + Id lod, Id ms); +Id EmitImageQueryDimensions(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id lod); +Id EmitImageQueryLod(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords); +Id EmitImageGradient(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id derivates, Id offset, Id lod_clamp); +Id EmitImageRead(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords); +void EmitImageWrite(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, Id color); +Id EmitBindlessImageAtomicIAdd32(EmitContext&); +Id EmitBindlessImageAtomicSMin32(EmitContext&); +Id EmitBindlessImageAtomicUMin32(EmitContext&); +Id EmitBindlessImageAtomicSMax32(EmitContext&); +Id EmitBindlessImageAtomicUMax32(EmitContext&); +Id EmitBindlessImageAtomicInc32(EmitContext&); +Id EmitBindlessImageAtomicDec32(EmitContext&); +Id EmitBindlessImageAtomicAnd32(EmitContext&); +Id EmitBindlessImageAtomicOr32(EmitContext&); +Id EmitBindlessImageAtomicXor32(EmitContext&); +Id EmitBindlessImageAtomicExchange32(EmitContext&); +Id EmitBoundImageAtomicIAdd32(EmitContext&); +Id EmitBoundImageAtomicSMin32(EmitContext&); +Id EmitBoundImageAtomicUMin32(EmitContext&); +Id EmitBoundImageAtomicSMax32(EmitContext&); +Id EmitBoundImageAtomicUMax32(EmitContext&); +Id EmitBoundImageAtomicInc32(EmitContext&); +Id EmitBoundImageAtomicDec32(EmitContext&); +Id EmitBoundImageAtomicAnd32(EmitContext&); +Id EmitBoundImageAtomicOr32(EmitContext&); +Id EmitBoundImageAtomicXor32(EmitContext&); +Id EmitBoundImageAtomicExchange32(EmitContext&); +Id EmitImageAtomicIAdd32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitImageAtomicSMin32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitImageAtomicUMin32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitImageAtomicSMax32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitImageAtomicUMax32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitImageAtomicInc32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitImageAtomicDec32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitImageAtomicAnd32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitImageAtomicOr32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitImageAtomicXor32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitImageAtomicExchange32(EmitContext& ctx, IR::Inst* inst, const IR::Value& index, Id coords, + Id value); +Id EmitLaneId(EmitContext& ctx); +Id EmitVoteAll(EmitContext& ctx, Id pred); +Id EmitVoteAny(EmitContext& ctx, Id pred); +Id EmitVoteEqual(EmitContext& ctx, Id pred); +Id EmitSubgroupBallot(EmitContext& ctx, Id pred); +Id EmitSubgroupEqMask(EmitContext& ctx); +Id EmitSubgroupLtMask(EmitContext& ctx); +Id EmitSubgroupLeMask(EmitContext& ctx); +Id EmitSubgroupGtMask(EmitContext& ctx); +Id EmitSubgroupGeMask(EmitContext& ctx); +Id EmitShuffleIndex(EmitContext& ctx, IR::Inst* inst, Id value, Id index, Id clamp, + Id segmentation_mask); +Id EmitShuffleUp(EmitContext& ctx, IR::Inst* inst, Id value, Id index, Id clamp, + Id segmentation_mask); +Id EmitShuffleDown(EmitContext& ctx, IR::Inst* inst, Id value, Id index, Id clamp, + Id segmentation_mask); +Id EmitShuffleButterfly(EmitContext& ctx, IR::Inst* inst, Id value, Id index, Id clamp, + Id segmentation_mask); +Id EmitFSwizzleAdd(EmitContext& ctx, Id op_a, Id op_b, Id swizzle); +Id EmitDPdxFine(EmitContext& ctx, Id op_a); +Id EmitDPdyFine(EmitContext& ctx, Id op_a); +Id EmitDPdxCoarse(EmitContext& ctx, Id op_a); +Id EmitDPdyCoarse(EmitContext& ctx, Id op_a); + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_integer.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_integer.cpp new file mode 100755 index 000000000..3501d7495 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_integer.cpp @@ -0,0 +1,270 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { +namespace { +void SetZeroFlag(EmitContext& ctx, IR::Inst* inst, Id result) { + IR::Inst* const zero{inst->GetAssociatedPseudoOperation(IR::Opcode::GetZeroFromOp)}; + if (!zero) { + return; + } + zero->SetDefinition(ctx.OpIEqual(ctx.U1, result, ctx.u32_zero_value)); + zero->Invalidate(); +} + +void SetSignFlag(EmitContext& ctx, IR::Inst* inst, Id result) { + IR::Inst* const sign{inst->GetAssociatedPseudoOperation(IR::Opcode::GetSignFromOp)}; + if (!sign) { + return; + } + sign->SetDefinition(ctx.OpSLessThan(ctx.U1, result, ctx.u32_zero_value)); + sign->Invalidate(); +} +} // Anonymous namespace + +Id EmitIAdd32(EmitContext& ctx, IR::Inst* inst, Id a, Id b) { + Id result{}; + if (IR::Inst* const carry{inst->GetAssociatedPseudoOperation(IR::Opcode::GetCarryFromOp)}) { + const Id carry_type{ctx.TypeStruct(ctx.U32[1], ctx.U32[1])}; + const Id carry_result{ctx.OpIAddCarry(carry_type, a, b)}; + result = ctx.OpCompositeExtract(ctx.U32[1], carry_result, 0U); + + const Id carry_value{ctx.OpCompositeExtract(ctx.U32[1], carry_result, 1U)}; + carry->SetDefinition(ctx.OpINotEqual(ctx.U1, carry_value, ctx.u32_zero_value)); + carry->Invalidate(); + } else { + result = ctx.OpIAdd(ctx.U32[1], a, b); + } + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); + if (IR::Inst * overflow{inst->GetAssociatedPseudoOperation(IR::Opcode::GetOverflowFromOp)}) { + // https://stackoverflow.com/questions/55468823/how-to-detect-integer-overflow-in-c + constexpr u32 s32_max{static_cast(std::numeric_limits::max())}; + const Id is_positive{ctx.OpSGreaterThanEqual(ctx.U1, a, ctx.u32_zero_value)}; + const Id sub_a{ctx.OpISub(ctx.U32[1], ctx.Const(s32_max), a)}; + + const Id positive_test{ctx.OpSGreaterThan(ctx.U1, b, sub_a)}; + const Id negative_test{ctx.OpSLessThan(ctx.U1, b, sub_a)}; + const Id carry_flag{ctx.OpSelect(ctx.U1, is_positive, positive_test, negative_test)}; + overflow->SetDefinition(carry_flag); + overflow->Invalidate(); + } + return result; +} + +Id EmitIAdd64(EmitContext& ctx, Id a, Id b) { + return ctx.OpIAdd(ctx.U64, a, b); +} + +Id EmitISub32(EmitContext& ctx, Id a, Id b) { + return ctx.OpISub(ctx.U32[1], a, b); +} + +Id EmitISub64(EmitContext& ctx, Id a, Id b) { + return ctx.OpISub(ctx.U64, a, b); +} + +Id EmitIMul32(EmitContext& ctx, Id a, Id b) { + return ctx.OpIMul(ctx.U32[1], a, b); +} + +Id EmitINeg32(EmitContext& ctx, Id value) { + return ctx.OpSNegate(ctx.U32[1], value); +} + +Id EmitINeg64(EmitContext& ctx, Id value) { + return ctx.OpSNegate(ctx.U64, value); +} + +Id EmitIAbs32(EmitContext& ctx, Id value) { + return ctx.OpSAbs(ctx.U32[1], value); +} + +Id EmitShiftLeftLogical32(EmitContext& ctx, Id base, Id shift) { + return ctx.OpShiftLeftLogical(ctx.U32[1], base, shift); +} + +Id EmitShiftLeftLogical64(EmitContext& ctx, Id base, Id shift) { + return ctx.OpShiftLeftLogical(ctx.U64, base, shift); +} + +Id EmitShiftRightLogical32(EmitContext& ctx, Id base, Id shift) { + return ctx.OpShiftRightLogical(ctx.U32[1], base, shift); +} + +Id EmitShiftRightLogical64(EmitContext& ctx, Id base, Id shift) { + return ctx.OpShiftRightLogical(ctx.U64, base, shift); +} + +Id EmitShiftRightArithmetic32(EmitContext& ctx, Id base, Id shift) { + return ctx.OpShiftRightArithmetic(ctx.U32[1], base, shift); +} + +Id EmitShiftRightArithmetic64(EmitContext& ctx, Id base, Id shift) { + return ctx.OpShiftRightArithmetic(ctx.U64, base, shift); +} + +Id EmitBitwiseAnd32(EmitContext& ctx, IR::Inst* inst, Id a, Id b) { + const Id result{ctx.OpBitwiseAnd(ctx.U32[1], a, b)}; + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); + return result; +} + +Id EmitBitwiseOr32(EmitContext& ctx, IR::Inst* inst, Id a, Id b) { + const Id result{ctx.OpBitwiseOr(ctx.U32[1], a, b)}; + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); + return result; +} + +Id EmitBitwiseXor32(EmitContext& ctx, IR::Inst* inst, Id a, Id b) { + const Id result{ctx.OpBitwiseXor(ctx.U32[1], a, b)}; + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); + return result; +} + +Id EmitBitFieldInsert(EmitContext& ctx, Id base, Id insert, Id offset, Id count) { + return ctx.OpBitFieldInsert(ctx.U32[1], base, insert, offset, count); +} + +Id EmitBitFieldSExtract(EmitContext& ctx, IR::Inst* inst, Id base, Id offset, Id count) { + const Id result{ctx.OpBitFieldSExtract(ctx.U32[1], base, offset, count)}; + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); + return result; +} + +Id EmitBitFieldUExtract(EmitContext& ctx, IR::Inst* inst, Id base, Id offset, Id count) { + const Id result{ctx.OpBitFieldUExtract(ctx.U32[1], base, offset, count)}; + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); + return result; +} + +Id EmitBitReverse32(EmitContext& ctx, Id value) { + return ctx.OpBitReverse(ctx.U32[1], value); +} + +Id EmitBitCount32(EmitContext& ctx, Id value) { + return ctx.OpBitCount(ctx.U32[1], value); +} + +Id EmitBitwiseNot32(EmitContext& ctx, Id value) { + return ctx.OpNot(ctx.U32[1], value); +} + +Id EmitFindSMsb32(EmitContext& ctx, Id value) { + return ctx.OpFindSMsb(ctx.U32[1], value); +} + +Id EmitFindUMsb32(EmitContext& ctx, Id value) { + return ctx.OpFindUMsb(ctx.U32[1], value); +} + +Id EmitSMin32(EmitContext& ctx, Id a, Id b) { + const bool is_broken{ctx.profile.has_broken_signed_operations}; + if (is_broken) { + a = ctx.OpBitcast(ctx.S32[1], a); + b = ctx.OpBitcast(ctx.S32[1], b); + } + const Id result{ctx.OpSMin(ctx.U32[1], a, b)}; + return is_broken ? ctx.OpBitcast(ctx.U32[1], result) : result; +} + +Id EmitUMin32(EmitContext& ctx, Id a, Id b) { + return ctx.OpUMin(ctx.U32[1], a, b); +} + +Id EmitSMax32(EmitContext& ctx, Id a, Id b) { + const bool is_broken{ctx.profile.has_broken_signed_operations}; + if (is_broken) { + a = ctx.OpBitcast(ctx.S32[1], a); + b = ctx.OpBitcast(ctx.S32[1], b); + } + const Id result{ctx.OpSMax(ctx.U32[1], a, b)}; + return is_broken ? ctx.OpBitcast(ctx.U32[1], result) : result; +} + +Id EmitUMax32(EmitContext& ctx, Id a, Id b) { + return ctx.OpUMax(ctx.U32[1], a, b); +} + +Id EmitSClamp32(EmitContext& ctx, IR::Inst* inst, Id value, Id min, Id max) { + Id result{}; + if (ctx.profile.has_broken_signed_operations || ctx.profile.has_broken_spirv_clamp) { + value = ctx.OpBitcast(ctx.S32[1], value); + min = ctx.OpBitcast(ctx.S32[1], min); + max = ctx.OpBitcast(ctx.S32[1], max); + if (ctx.profile.has_broken_spirv_clamp) { + result = ctx.OpSMax(ctx.S32[1], ctx.OpSMin(ctx.S32[1], value, max), min); + } else { + result = ctx.OpSClamp(ctx.S32[1], value, min, max); + } + result = ctx.OpBitcast(ctx.U32[1], result); + } else { + result = ctx.OpSClamp(ctx.U32[1], value, min, max); + } + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); + return result; +} + +Id EmitUClamp32(EmitContext& ctx, IR::Inst* inst, Id value, Id min, Id max) { + Id result{}; + if (ctx.profile.has_broken_spirv_clamp) { + result = ctx.OpUMax(ctx.U32[1], ctx.OpUMin(ctx.U32[1], value, max), min); + } else { + result = ctx.OpUClamp(ctx.U32[1], value, min, max); + } + SetZeroFlag(ctx, inst, result); + SetSignFlag(ctx, inst, result); + return result; +} + +Id EmitSLessThan(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpSLessThan(ctx.U1, lhs, rhs); +} + +Id EmitULessThan(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpULessThan(ctx.U1, lhs, rhs); +} + +Id EmitIEqual(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpIEqual(ctx.U1, lhs, rhs); +} + +Id EmitSLessThanEqual(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpSLessThanEqual(ctx.U1, lhs, rhs); +} + +Id EmitULessThanEqual(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpULessThanEqual(ctx.U1, lhs, rhs); +} + +Id EmitSGreaterThan(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpSGreaterThan(ctx.U1, lhs, rhs); +} + +Id EmitUGreaterThan(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpUGreaterThan(ctx.U1, lhs, rhs); +} + +Id EmitINotEqual(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpINotEqual(ctx.U1, lhs, rhs); +} + +Id EmitSGreaterThanEqual(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpSGreaterThanEqual(ctx.U1, lhs, rhs); +} + +Id EmitUGreaterThanEqual(EmitContext& ctx, Id lhs, Id rhs) { + return ctx.OpUGreaterThanEqual(ctx.U1, lhs, rhs); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_logical.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_logical.cpp new file mode 100755 index 000000000..b9a9500fc --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_logical.cpp @@ -0,0 +1,26 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { + +Id EmitLogicalOr(EmitContext& ctx, Id a, Id b) { + return ctx.OpLogicalOr(ctx.U1, a, b); +} + +Id EmitLogicalAnd(EmitContext& ctx, Id a, Id b) { + return ctx.OpLogicalAnd(ctx.U1, a, b); +} + +Id EmitLogicalXor(EmitContext& ctx, Id a, Id b) { + return ctx.OpLogicalNotEqual(ctx.U1, a, b); +} + +Id EmitLogicalNot(EmitContext& ctx, Id value) { + return ctx.OpLogicalNot(ctx.U1, value); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp new file mode 100755 index 000000000..679ee2684 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp @@ -0,0 +1,275 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { +namespace { +Id StorageIndex(EmitContext& ctx, const IR::Value& offset, size_t element_size, + u32 index_offset = 0) { + if (offset.IsImmediate()) { + const u32 imm_offset{static_cast(offset.U32() / element_size) + index_offset}; + return ctx.Const(imm_offset); + } + const u32 shift{static_cast(std::countr_zero(element_size))}; + Id index{ctx.Def(offset)}; + if (shift != 0) { + const Id shift_id{ctx.Const(shift)}; + index = ctx.OpShiftRightLogical(ctx.U32[1], index, shift_id); + } + if (index_offset != 0) { + index = ctx.OpIAdd(ctx.U32[1], index, ctx.Const(index_offset)); + } + return index; +} + +Id StoragePointer(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + const StorageTypeDefinition& type_def, size_t element_size, + Id StorageDefinitions::*member_ptr, u32 index_offset = 0) { + if (!binding.IsImmediate()) { + throw NotImplementedException("Dynamic storage buffer indexing"); + } + const Id ssbo{ctx.ssbos[binding.U32()].*member_ptr}; + const Id index{StorageIndex(ctx, offset, element_size, index_offset)}; + return ctx.OpAccessChain(type_def.element, ssbo, ctx.u32_zero_value, index); +} + +Id LoadStorage(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id result_type, + const StorageTypeDefinition& type_def, size_t element_size, + Id StorageDefinitions::*member_ptr, u32 index_offset = 0) { + const Id pointer{ + StoragePointer(ctx, binding, offset, type_def, element_size, member_ptr, index_offset)}; + return ctx.OpLoad(result_type, pointer); +} + +Id LoadStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + u32 index_offset = 0) { + return LoadStorage(ctx, binding, offset, ctx.U32[1], ctx.storage_types.U32, sizeof(u32), + &StorageDefinitions::U32, index_offset); +} + +void WriteStorage(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value, + const StorageTypeDefinition& type_def, size_t element_size, + Id StorageDefinitions::*member_ptr, u32 index_offset = 0) { + const Id pointer{ + StoragePointer(ctx, binding, offset, type_def, element_size, member_ptr, index_offset)}; + ctx.OpStore(pointer, value); +} + +void WriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value, + u32 index_offset = 0) { + WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32, sizeof(u32), + &StorageDefinitions::U32, index_offset); +} +} // Anonymous namespace + +void EmitLoadGlobalU8(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitLoadGlobalS8(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitLoadGlobalU16(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitLoadGlobalS16(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitLoadGlobal32(EmitContext& ctx, Id address) { + if (ctx.profile.support_int64) { + return ctx.OpFunctionCall(ctx.U32[1], ctx.load_global_func_u32, address); + } + LOG_WARNING(Shader_SPIRV, "Int64 not supported, ignoring memory operation"); + return ctx.Const(0u); +} + +Id EmitLoadGlobal64(EmitContext& ctx, Id address) { + if (ctx.profile.support_int64) { + return ctx.OpFunctionCall(ctx.U32[2], ctx.load_global_func_u32x2, address); + } + LOG_WARNING(Shader_SPIRV, "Int64 not supported, ignoring memory operation"); + return ctx.Const(0u, 0u); +} + +Id EmitLoadGlobal128(EmitContext& ctx, Id address) { + if (ctx.profile.support_int64) { + return ctx.OpFunctionCall(ctx.U32[4], ctx.load_global_func_u32x4, address); + } + LOG_WARNING(Shader_SPIRV, "Int64 not supported, ignoring memory operation"); + return ctx.Const(0u, 0u, 0u, 0u); +} + +void EmitWriteGlobalU8(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitWriteGlobalS8(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitWriteGlobalU16(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitWriteGlobalS16(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +void EmitWriteGlobal32(EmitContext& ctx, Id address, Id value) { + if (ctx.profile.support_int64) { + ctx.OpFunctionCall(ctx.void_id, ctx.write_global_func_u32, address, value); + return; + } + LOG_WARNING(Shader_SPIRV, "Int64 not supported, ignoring memory operation"); +} + +void EmitWriteGlobal64(EmitContext& ctx, Id address, Id value) { + if (ctx.profile.support_int64) { + ctx.OpFunctionCall(ctx.void_id, ctx.write_global_func_u32x2, address, value); + return; + } + LOG_WARNING(Shader_SPIRV, "Int64 not supported, ignoring memory operation"); +} + +void EmitWriteGlobal128(EmitContext& ctx, Id address, Id value) { + if (ctx.profile.support_int64) { + ctx.OpFunctionCall(ctx.void_id, ctx.write_global_func_u32x4, address, value); + return; + } + LOG_WARNING(Shader_SPIRV, "Int64 not supported, ignoring memory operation"); +} + +Id EmitLoadStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_int8 && ctx.profile.support_descriptor_aliasing) { + return ctx.OpUConvert(ctx.U32[1], + LoadStorage(ctx, binding, offset, ctx.U8, ctx.storage_types.U8, + sizeof(u8), &StorageDefinitions::U8)); + } else { + return ctx.OpBitFieldUExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset), + ctx.BitOffset8(offset), ctx.Const(8u)); + } +} + +Id EmitLoadStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_int8 && ctx.profile.support_descriptor_aliasing) { + return ctx.OpSConvert(ctx.U32[1], + LoadStorage(ctx, binding, offset, ctx.S8, ctx.storage_types.S8, + sizeof(s8), &StorageDefinitions::S8)); + } else { + return ctx.OpBitFieldSExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset), + ctx.BitOffset8(offset), ctx.Const(8u)); + } +} + +Id EmitLoadStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_int16 && ctx.profile.support_descriptor_aliasing) { + return ctx.OpUConvert(ctx.U32[1], + LoadStorage(ctx, binding, offset, ctx.U16, ctx.storage_types.U16, + sizeof(u16), &StorageDefinitions::U16)); + } else { + return ctx.OpBitFieldUExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset), + ctx.BitOffset16(offset), ctx.Const(16u)); + } +} + +Id EmitLoadStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_int16 && ctx.profile.support_descriptor_aliasing) { + return ctx.OpSConvert(ctx.U32[1], + LoadStorage(ctx, binding, offset, ctx.S16, ctx.storage_types.S16, + sizeof(s16), &StorageDefinitions::S16)); + } else { + return ctx.OpBitFieldSExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset), + ctx.BitOffset16(offset), ctx.Const(16u)); + } +} + +Id EmitLoadStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + return LoadStorage32(ctx, binding, offset); +} + +Id EmitLoadStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_descriptor_aliasing) { + return LoadStorage(ctx, binding, offset, ctx.U32[2], ctx.storage_types.U32x2, + sizeof(u32[2]), &StorageDefinitions::U32x2); + } else { + return ctx.OpCompositeConstruct(ctx.U32[2], LoadStorage32(ctx, binding, offset, 0), + LoadStorage32(ctx, binding, offset, 1)); + } +} + +Id EmitLoadStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) { + if (ctx.profile.support_descriptor_aliasing) { + return LoadStorage(ctx, binding, offset, ctx.U32[4], ctx.storage_types.U32x4, + sizeof(u32[4]), &StorageDefinitions::U32x4); + } else { + return ctx.OpCompositeConstruct(ctx.U32[4], LoadStorage32(ctx, binding, offset, 0), + LoadStorage32(ctx, binding, offset, 1), + LoadStorage32(ctx, binding, offset, 2), + LoadStorage32(ctx, binding, offset, 3)); + } +} + +void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + WriteStorage(ctx, binding, offset, ctx.OpSConvert(ctx.U8, value), ctx.storage_types.U8, + sizeof(u8), &StorageDefinitions::U8); +} + +void EmitWriteStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + WriteStorage(ctx, binding, offset, ctx.OpSConvert(ctx.S8, value), ctx.storage_types.S8, + sizeof(s8), &StorageDefinitions::S8); +} + +void EmitWriteStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + WriteStorage(ctx, binding, offset, ctx.OpSConvert(ctx.U16, value), ctx.storage_types.U16, + sizeof(u16), &StorageDefinitions::U16); +} + +void EmitWriteStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + WriteStorage(ctx, binding, offset, ctx.OpSConvert(ctx.S16, value), ctx.storage_types.S16, + sizeof(s16), &StorageDefinitions::S16); +} + +void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + WriteStorage32(ctx, binding, offset, value); +} + +void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + if (ctx.profile.support_descriptor_aliasing) { + WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32x2, sizeof(u32[2]), + &StorageDefinitions::U32x2); + } else { + for (u32 index = 0; index < 2; ++index) { + const Id element{ctx.OpCompositeExtract(ctx.U32[1], value, index)}; + WriteStorage32(ctx, binding, offset, element, index); + } + } +} + +void EmitWriteStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, + Id value) { + if (ctx.profile.support_descriptor_aliasing) { + WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32x4, sizeof(u32[4]), + &StorageDefinitions::U32x4); + } else { + for (u32 index = 0; index < 4; ++index) { + const Id element{ctx.OpCompositeExtract(ctx.U32[1], value, index)}; + WriteStorage32(ctx, binding, offset, element, index); + } + } +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_select.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_select.cpp new file mode 100755 index 000000000..c5b4f4720 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_select.cpp @@ -0,0 +1,42 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { + +Id EmitSelectU1(EmitContext& ctx, Id cond, Id true_value, Id false_value) { + return ctx.OpSelect(ctx.U1, cond, true_value, false_value); +} + +Id EmitSelectU8(EmitContext&, Id, Id, Id) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitSelectU16(EmitContext& ctx, Id cond, Id true_value, Id false_value) { + return ctx.OpSelect(ctx.U16, cond, true_value, false_value); +} + +Id EmitSelectU32(EmitContext& ctx, Id cond, Id true_value, Id false_value) { + return ctx.OpSelect(ctx.U32[1], cond, true_value, false_value); +} + +Id EmitSelectU64(EmitContext& ctx, Id cond, Id true_value, Id false_value) { + return ctx.OpSelect(ctx.U64, cond, true_value, false_value); +} + +Id EmitSelectF16(EmitContext& ctx, Id cond, Id true_value, Id false_value) { + return ctx.OpSelect(ctx.F16[1], cond, true_value, false_value); +} + +Id EmitSelectF32(EmitContext& ctx, Id cond, Id true_value, Id false_value) { + return ctx.OpSelect(ctx.F32[1], cond, true_value, false_value); +} + +Id EmitSelectF64(EmitContext& ctx, Id cond, Id true_value, Id false_value) { + return ctx.OpSelect(ctx.F64[1], cond, true_value, false_value); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_shared_memory.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_shared_memory.cpp new file mode 100755 index 000000000..9a79fc7a2 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_shared_memory.cpp @@ -0,0 +1,174 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { +namespace { +Id Pointer(EmitContext& ctx, Id pointer_type, Id array, Id offset, u32 shift) { + const Id shift_id{ctx.Const(shift)}; + const Id index{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift_id)}; + return ctx.OpAccessChain(pointer_type, array, ctx.u32_zero_value, index); +} + +Id Word(EmitContext& ctx, Id offset) { + const Id shift_id{ctx.Const(2U)}; + const Id index{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift_id)}; + const Id pointer{ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, index)}; + return ctx.OpLoad(ctx.U32[1], pointer); +} + +std::pair ExtractArgs(EmitContext& ctx, Id offset, u32 mask, u32 count) { + const Id shift{ctx.OpShiftLeftLogical(ctx.U32[1], offset, ctx.Const(3U))}; + const Id bit{ctx.OpBitwiseAnd(ctx.U32[1], shift, ctx.Const(mask))}; + const Id count_id{ctx.Const(count)}; + return {bit, count_id}; +} +} // Anonymous namespace + +Id EmitLoadSharedU8(EmitContext& ctx, Id offset) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{ + ctx.OpAccessChain(ctx.shared_u8, ctx.shared_memory_u8, ctx.u32_zero_value, offset)}; + return ctx.OpUConvert(ctx.U32[1], ctx.OpLoad(ctx.U8, pointer)); + } else { + const auto [bit, count]{ExtractArgs(ctx, offset, 24, 8)}; + return ctx.OpBitFieldUExtract(ctx.U32[1], Word(ctx, offset), bit, count); + } +} + +Id EmitLoadSharedS8(EmitContext& ctx, Id offset) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{ + ctx.OpAccessChain(ctx.shared_u8, ctx.shared_memory_u8, ctx.u32_zero_value, offset)}; + return ctx.OpSConvert(ctx.U32[1], ctx.OpLoad(ctx.U8, pointer)); + } else { + const auto [bit, count]{ExtractArgs(ctx, offset, 24, 8)}; + return ctx.OpBitFieldSExtract(ctx.U32[1], Word(ctx, offset), bit, count); + } +} + +Id EmitLoadSharedU16(EmitContext& ctx, Id offset) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{Pointer(ctx, ctx.shared_u16, ctx.shared_memory_u16, offset, 1)}; + return ctx.OpUConvert(ctx.U32[1], ctx.OpLoad(ctx.U16, pointer)); + } else { + const auto [bit, count]{ExtractArgs(ctx, offset, 16, 16)}; + return ctx.OpBitFieldUExtract(ctx.U32[1], Word(ctx, offset), bit, count); + } +} + +Id EmitLoadSharedS16(EmitContext& ctx, Id offset) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{Pointer(ctx, ctx.shared_u16, ctx.shared_memory_u16, offset, 1)}; + return ctx.OpSConvert(ctx.U32[1], ctx.OpLoad(ctx.U16, pointer)); + } else { + const auto [bit, count]{ExtractArgs(ctx, offset, 16, 16)}; + return ctx.OpBitFieldSExtract(ctx.U32[1], Word(ctx, offset), bit, count); + } +} + +Id EmitLoadSharedU32(EmitContext& ctx, Id offset) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{Pointer(ctx, ctx.shared_u32, ctx.shared_memory_u32, offset, 2)}; + return ctx.OpLoad(ctx.U32[1], pointer); + } else { + return Word(ctx, offset); + } +} + +Id EmitLoadSharedU64(EmitContext& ctx, Id offset) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{Pointer(ctx, ctx.shared_u32x2, ctx.shared_memory_u32x2, offset, 3)}; + return ctx.OpLoad(ctx.U32[2], pointer); + } else { + const Id shift_id{ctx.Const(2U)}; + const Id base_index{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift_id)}; + const Id next_index{ctx.OpIAdd(ctx.U32[1], base_index, ctx.Const(1U))}; + const Id lhs_pointer{ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, base_index)}; + const Id rhs_pointer{ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, next_index)}; + return ctx.OpCompositeConstruct(ctx.U32[2], ctx.OpLoad(ctx.U32[1], lhs_pointer), + ctx.OpLoad(ctx.U32[1], rhs_pointer)); + } +} + +Id EmitLoadSharedU128(EmitContext& ctx, Id offset) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{Pointer(ctx, ctx.shared_u32x4, ctx.shared_memory_u32x4, offset, 4)}; + return ctx.OpLoad(ctx.U32[4], pointer); + } + const Id shift_id{ctx.Const(2U)}; + const Id base_index{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift_id)}; + std::array values{}; + for (u32 i = 0; i < 4; ++i) { + const Id index{i == 0 ? base_index : ctx.OpIAdd(ctx.U32[1], base_index, ctx.Const(i))}; + const Id pointer{ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, index)}; + values[i] = ctx.OpLoad(ctx.U32[1], pointer); + } + return ctx.OpCompositeConstruct(ctx.U32[4], values); +} + +void EmitWriteSharedU8(EmitContext& ctx, Id offset, Id value) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{ + ctx.OpAccessChain(ctx.shared_u8, ctx.shared_memory_u8, ctx.u32_zero_value, offset)}; + ctx.OpStore(pointer, ctx.OpUConvert(ctx.U8, value)); + } else { + ctx.OpFunctionCall(ctx.void_id, ctx.shared_store_u8_func, offset, value); + } +} + +void EmitWriteSharedU16(EmitContext& ctx, Id offset, Id value) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{Pointer(ctx, ctx.shared_u16, ctx.shared_memory_u16, offset, 1)}; + ctx.OpStore(pointer, ctx.OpUConvert(ctx.U16, value)); + } else { + ctx.OpFunctionCall(ctx.void_id, ctx.shared_store_u16_func, offset, value); + } +} + +void EmitWriteSharedU32(EmitContext& ctx, Id offset, Id value) { + Id pointer{}; + if (ctx.profile.support_explicit_workgroup_layout) { + pointer = Pointer(ctx, ctx.shared_u32, ctx.shared_memory_u32, offset, 2); + } else { + const Id shift{ctx.Const(2U)}; + const Id word_offset{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift)}; + pointer = ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, word_offset); + } + ctx.OpStore(pointer, value); +} + +void EmitWriteSharedU64(EmitContext& ctx, Id offset, Id value) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{Pointer(ctx, ctx.shared_u32x2, ctx.shared_memory_u32x2, offset, 3)}; + ctx.OpStore(pointer, value); + return; + } + const Id shift{ctx.Const(2U)}; + const Id word_offset{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift)}; + const Id next_offset{ctx.OpIAdd(ctx.U32[1], word_offset, ctx.Const(1U))}; + const Id lhs_pointer{ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, word_offset)}; + const Id rhs_pointer{ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, next_offset)}; + ctx.OpStore(lhs_pointer, ctx.OpCompositeExtract(ctx.U32[1], value, 0U)); + ctx.OpStore(rhs_pointer, ctx.OpCompositeExtract(ctx.U32[1], value, 1U)); +} + +void EmitWriteSharedU128(EmitContext& ctx, Id offset, Id value) { + if (ctx.profile.support_explicit_workgroup_layout) { + const Id pointer{Pointer(ctx, ctx.shared_u32x4, ctx.shared_memory_u32x4, offset, 4)}; + ctx.OpStore(pointer, value); + return; + } + const Id shift{ctx.Const(2U)}; + const Id base_index{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift)}; + for (u32 i = 0; i < 4; ++i) { + const Id index{i == 0 ? base_index : ctx.OpIAdd(ctx.U32[1], base_index, ctx.Const(i))}; + const Id pointer{ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, index)}; + ctx.OpStore(pointer, ctx.OpCompositeExtract(ctx.U32[1], value, i)); + } +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_special.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_special.cpp new file mode 100755 index 000000000..9e7eb3cb1 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_special.cpp @@ -0,0 +1,150 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { +namespace { +void ConvertDepthMode(EmitContext& ctx) { + const Id type{ctx.F32[1]}; + const Id position{ctx.OpLoad(ctx.F32[4], ctx.output_position)}; + const Id z{ctx.OpCompositeExtract(type, position, 2u)}; + const Id w{ctx.OpCompositeExtract(type, position, 3u)}; + const Id screen_depth{ctx.OpFMul(type, ctx.OpFAdd(type, z, w), ctx.Constant(type, 0.5f))}; + const Id vector{ctx.OpCompositeInsert(ctx.F32[4], screen_depth, position, 2u)}; + ctx.OpStore(ctx.output_position, vector); +} + +void SetFixedPipelinePointSize(EmitContext& ctx) { + if (ctx.runtime_info.fixed_state_point_size) { + const float point_size{*ctx.runtime_info.fixed_state_point_size}; + ctx.OpStore(ctx.output_point_size, ctx.Const(point_size)); + } +} + +Id DefaultVarying(EmitContext& ctx, u32 num_components, u32 element, Id zero, Id one, + Id default_vector) { + switch (num_components) { + case 1: + return element == 3 ? one : zero; + case 2: + return ctx.ConstantComposite(ctx.F32[2], zero, element + 1 == 3 ? one : zero); + case 3: + return ctx.ConstantComposite(ctx.F32[3], zero, zero, element + 2 == 3 ? one : zero); + case 4: + return default_vector; + } + throw InvalidArgument("Bad element"); +} + +Id ComparisonFunction(EmitContext& ctx, CompareFunction comparison, Id operand_1, Id operand_2) { + switch (comparison) { + case CompareFunction::Never: + return ctx.false_value; + case CompareFunction::Less: + return ctx.OpFOrdLessThan(ctx.U1, operand_1, operand_2); + case CompareFunction::Equal: + return ctx.OpFOrdEqual(ctx.U1, operand_1, operand_2); + case CompareFunction::LessThanEqual: + return ctx.OpFOrdLessThanEqual(ctx.U1, operand_1, operand_2); + case CompareFunction::Greater: + return ctx.OpFOrdGreaterThan(ctx.U1, operand_1, operand_2); + case CompareFunction::NotEqual: + return ctx.OpFOrdNotEqual(ctx.U1, operand_1, operand_2); + case CompareFunction::GreaterThanEqual: + return ctx.OpFOrdGreaterThanEqual(ctx.U1, operand_1, operand_2); + case CompareFunction::Always: + return ctx.true_value; + } + throw InvalidArgument("Comparison function {}", comparison); +} + +void AlphaTest(EmitContext& ctx) { + if (!ctx.runtime_info.alpha_test_func) { + return; + } + const auto comparison{*ctx.runtime_info.alpha_test_func}; + if (comparison == CompareFunction::Always) { + return; + } + if (!Sirit::ValidId(ctx.frag_color[0])) { + return; + } + + const Id type{ctx.F32[1]}; + const Id rt0_color{ctx.OpLoad(ctx.F32[4], ctx.frag_color[0])}; + const Id alpha{ctx.OpCompositeExtract(type, rt0_color, 3u)}; + + const Id true_label{ctx.OpLabel()}; + const Id discard_label{ctx.OpLabel()}; + const Id alpha_reference{ctx.Const(ctx.runtime_info.alpha_test_reference)}; + const Id condition{ComparisonFunction(ctx, comparison, alpha, alpha_reference)}; + + ctx.OpSelectionMerge(true_label, spv::SelectionControlMask::MaskNone); + ctx.OpBranchConditional(condition, true_label, discard_label); + ctx.AddLabel(discard_label); + ctx.OpKill(); + ctx.AddLabel(true_label); +} +} // Anonymous namespace + +void EmitPrologue(EmitContext& ctx) { + if (ctx.stage == Stage::VertexB) { + const Id zero{ctx.Const(0.0f)}; + const Id one{ctx.Const(1.0f)}; + const Id default_vector{ctx.ConstantComposite(ctx.F32[4], zero, zero, zero, one)}; + ctx.OpStore(ctx.output_position, default_vector); + for (const auto& info : ctx.output_generics) { + if (info[0].num_components == 0) { + continue; + } + u32 element{0}; + while (element < 4) { + const auto& element_info{info[element]}; + const u32 num{element_info.num_components}; + const Id value{DefaultVarying(ctx, num, element, zero, one, default_vector)}; + ctx.OpStore(element_info.id, value); + element += num; + } + } + } + if (ctx.stage == Stage::VertexB || ctx.stage == Stage::Geometry) { + SetFixedPipelinePointSize(ctx); + } +} + +void EmitEpilogue(EmitContext& ctx) { + if (ctx.stage == Stage::VertexB && ctx.runtime_info.convert_depth_mode) { + ConvertDepthMode(ctx); + } + if (ctx.stage == Stage::Fragment) { + AlphaTest(ctx); + } +} + +void EmitEmitVertex(EmitContext& ctx, const IR::Value& stream) { + if (ctx.runtime_info.convert_depth_mode) { + ConvertDepthMode(ctx); + } + if (stream.IsImmediate()) { + ctx.OpEmitStreamVertex(ctx.Def(stream)); + } else { + LOG_WARNING(Shader_SPIRV, "Stream is not immediate"); + ctx.OpEmitStreamVertex(ctx.u32_zero_value); + } + // Restore fixed pipeline point size after emitting the vertex + SetFixedPipelinePointSize(ctx); +} + +void EmitEndPrimitive(EmitContext& ctx, const IR::Value& stream) { + if (stream.IsImmediate()) { + ctx.OpEndStreamPrimitive(ctx.Def(stream)); + } else { + LOG_WARNING(Shader_SPIRV, "Stream is not immediate"); + ctx.OpEndStreamPrimitive(ctx.u32_zero_value); + } +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_undefined.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_undefined.cpp new file mode 100755 index 000000000..c9f469e90 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_undefined.cpp @@ -0,0 +1,30 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { + +Id EmitUndefU1(EmitContext& ctx) { + return ctx.OpUndef(ctx.U1); +} + +Id EmitUndefU8(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitUndefU16(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +Id EmitUndefU32(EmitContext& ctx) { + return ctx.OpUndef(ctx.U32[1]); +} + +Id EmitUndefU64(EmitContext&) { + throw NotImplementedException("SPIR-V Instruction"); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_warp.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_warp.cpp new file mode 100755 index 000000000..78b1e1ba7 --- /dev/null +++ b/src/shader_recompiler/backend/spirv/emit_spirv_warp.cpp @@ -0,0 +1,203 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/backend/spirv/emit_spirv_instructions.h" + +namespace Shader::Backend::SPIRV { +namespace { +Id WarpExtract(EmitContext& ctx, Id value) { + const Id local_index{ctx.OpLoad(ctx.U32[1], ctx.subgroup_local_invocation_id)}; + return ctx.OpVectorExtractDynamic(ctx.U32[1], value, local_index); +} + +Id LoadMask(EmitContext& ctx, Id mask) { + const Id value{ctx.OpLoad(ctx.U32[4], mask)}; + if (!ctx.profile.warp_size_potentially_larger_than_guest) { + return ctx.OpCompositeExtract(ctx.U32[1], value, 0U); + } + return WarpExtract(ctx, value); +} + +void SetInBoundsFlag(IR::Inst* inst, Id result) { + IR::Inst* const in_bounds{inst->GetAssociatedPseudoOperation(IR::Opcode::GetInBoundsFromOp)}; + if (!in_bounds) { + return; + } + in_bounds->SetDefinition(result); + in_bounds->Invalidate(); +} + +Id ComputeMinThreadId(EmitContext& ctx, Id thread_id, Id segmentation_mask) { + return ctx.OpBitwiseAnd(ctx.U32[1], thread_id, segmentation_mask); +} + +Id ComputeMaxThreadId(EmitContext& ctx, Id min_thread_id, Id clamp, Id not_seg_mask) { + return ctx.OpBitwiseOr(ctx.U32[1], min_thread_id, + ctx.OpBitwiseAnd(ctx.U32[1], clamp, not_seg_mask)); +} + +Id GetMaxThreadId(EmitContext& ctx, Id thread_id, Id clamp, Id segmentation_mask) { + const Id not_seg_mask{ctx.OpNot(ctx.U32[1], segmentation_mask)}; + const Id min_thread_id{ComputeMinThreadId(ctx, thread_id, segmentation_mask)}; + return ComputeMaxThreadId(ctx, min_thread_id, clamp, not_seg_mask); +} + +Id SelectValue(EmitContext& ctx, Id in_range, Id value, Id src_thread_id) { + return ctx.OpSelect(ctx.U32[1], in_range, + ctx.OpSubgroupReadInvocationKHR(ctx.U32[1], value, src_thread_id), value); +} +} // Anonymous namespace + +Id EmitLaneId(EmitContext& ctx) { + const Id id{ctx.OpLoad(ctx.U32[1], ctx.subgroup_local_invocation_id)}; + if (!ctx.profile.warp_size_potentially_larger_than_guest) { + return id; + } + return ctx.OpBitwiseAnd(ctx.U32[1], id, ctx.Const(31U)); +} + +Id EmitVoteAll(EmitContext& ctx, Id pred) { + if (!ctx.profile.warp_size_potentially_larger_than_guest) { + return ctx.OpSubgroupAllKHR(ctx.U1, pred); + } + const Id mask_ballot{ctx.OpSubgroupBallotKHR(ctx.U32[4], ctx.true_value)}; + const Id active_mask{WarpExtract(ctx, mask_ballot)}; + const Id ballot{WarpExtract(ctx, ctx.OpSubgroupBallotKHR(ctx.U32[4], pred))}; + const Id lhs{ctx.OpBitwiseAnd(ctx.U32[1], ballot, active_mask)}; + return ctx.OpIEqual(ctx.U1, lhs, active_mask); +} + +Id EmitVoteAny(EmitContext& ctx, Id pred) { + if (!ctx.profile.warp_size_potentially_larger_than_guest) { + return ctx.OpSubgroupAnyKHR(ctx.U1, pred); + } + const Id mask_ballot{ctx.OpSubgroupBallotKHR(ctx.U32[4], ctx.true_value)}; + const Id active_mask{WarpExtract(ctx, mask_ballot)}; + const Id ballot{WarpExtract(ctx, ctx.OpSubgroupBallotKHR(ctx.U32[4], pred))}; + const Id lhs{ctx.OpBitwiseAnd(ctx.U32[1], ballot, active_mask)}; + return ctx.OpINotEqual(ctx.U1, lhs, ctx.u32_zero_value); +} + +Id EmitVoteEqual(EmitContext& ctx, Id pred) { + if (!ctx.profile.warp_size_potentially_larger_than_guest) { + return ctx.OpSubgroupAllEqualKHR(ctx.U1, pred); + } + const Id mask_ballot{ctx.OpSubgroupBallotKHR(ctx.U32[4], ctx.true_value)}; + const Id active_mask{WarpExtract(ctx, mask_ballot)}; + const Id ballot{WarpExtract(ctx, ctx.OpSubgroupBallotKHR(ctx.U32[4], pred))}; + const Id lhs{ctx.OpBitwiseXor(ctx.U32[1], ballot, active_mask)}; + return ctx.OpLogicalOr(ctx.U1, ctx.OpIEqual(ctx.U1, lhs, ctx.u32_zero_value), + ctx.OpIEqual(ctx.U1, lhs, active_mask)); +} + +Id EmitSubgroupBallot(EmitContext& ctx, Id pred) { + const Id ballot{ctx.OpSubgroupBallotKHR(ctx.U32[4], pred)}; + if (!ctx.profile.warp_size_potentially_larger_than_guest) { + return ctx.OpCompositeExtract(ctx.U32[1], ballot, 0U); + } + return WarpExtract(ctx, ballot); +} + +Id EmitSubgroupEqMask(EmitContext& ctx) { + return LoadMask(ctx, ctx.subgroup_mask_eq); +} + +Id EmitSubgroupLtMask(EmitContext& ctx) { + return LoadMask(ctx, ctx.subgroup_mask_lt); +} + +Id EmitSubgroupLeMask(EmitContext& ctx) { + return LoadMask(ctx, ctx.subgroup_mask_le); +} + +Id EmitSubgroupGtMask(EmitContext& ctx) { + return LoadMask(ctx, ctx.subgroup_mask_gt); +} + +Id EmitSubgroupGeMask(EmitContext& ctx) { + return LoadMask(ctx, ctx.subgroup_mask_ge); +} + +Id EmitShuffleIndex(EmitContext& ctx, IR::Inst* inst, Id value, Id index, Id clamp, + Id segmentation_mask) { + const Id not_seg_mask{ctx.OpNot(ctx.U32[1], segmentation_mask)}; + const Id thread_id{ctx.OpLoad(ctx.U32[1], ctx.subgroup_local_invocation_id)}; + const Id min_thread_id{ComputeMinThreadId(ctx, thread_id, segmentation_mask)}; + const Id max_thread_id{ComputeMaxThreadId(ctx, min_thread_id, clamp, not_seg_mask)}; + + const Id lhs{ctx.OpBitwiseAnd(ctx.U32[1], index, not_seg_mask)}; + const Id src_thread_id{ctx.OpBitwiseOr(ctx.U32[1], lhs, min_thread_id)}; + const Id in_range{ctx.OpSLessThanEqual(ctx.U1, src_thread_id, max_thread_id)}; + + SetInBoundsFlag(inst, in_range); + return SelectValue(ctx, in_range, value, src_thread_id); +} + +Id EmitShuffleUp(EmitContext& ctx, IR::Inst* inst, Id value, Id index, Id clamp, + Id segmentation_mask) { + const Id thread_id{ctx.OpLoad(ctx.U32[1], ctx.subgroup_local_invocation_id)}; + const Id max_thread_id{GetMaxThreadId(ctx, thread_id, clamp, segmentation_mask)}; + const Id src_thread_id{ctx.OpISub(ctx.U32[1], thread_id, index)}; + const Id in_range{ctx.OpSGreaterThanEqual(ctx.U1, src_thread_id, max_thread_id)}; + + SetInBoundsFlag(inst, in_range); + return SelectValue(ctx, in_range, value, src_thread_id); +} + +Id EmitShuffleDown(EmitContext& ctx, IR::Inst* inst, Id value, Id index, Id clamp, + Id segmentation_mask) { + const Id thread_id{ctx.OpLoad(ctx.U32[1], ctx.subgroup_local_invocation_id)}; + const Id max_thread_id{GetMaxThreadId(ctx, thread_id, clamp, segmentation_mask)}; + const Id src_thread_id{ctx.OpIAdd(ctx.U32[1], thread_id, index)}; + const Id in_range{ctx.OpSLessThanEqual(ctx.U1, src_thread_id, max_thread_id)}; + + SetInBoundsFlag(inst, in_range); + return SelectValue(ctx, in_range, value, src_thread_id); +} + +Id EmitShuffleButterfly(EmitContext& ctx, IR::Inst* inst, Id value, Id index, Id clamp, + Id segmentation_mask) { + const Id thread_id{ctx.OpLoad(ctx.U32[1], ctx.subgroup_local_invocation_id)}; + const Id max_thread_id{GetMaxThreadId(ctx, thread_id, clamp, segmentation_mask)}; + const Id src_thread_id{ctx.OpBitwiseXor(ctx.U32[1], thread_id, index)}; + const Id in_range{ctx.OpSLessThanEqual(ctx.U1, src_thread_id, max_thread_id)}; + + SetInBoundsFlag(inst, in_range); + return SelectValue(ctx, in_range, value, src_thread_id); +} + +Id EmitFSwizzleAdd(EmitContext& ctx, Id op_a, Id op_b, Id swizzle) { + const Id three{ctx.Const(3U)}; + Id mask{ctx.OpLoad(ctx.U32[1], ctx.subgroup_local_invocation_id)}; + mask = ctx.OpBitwiseAnd(ctx.U32[1], mask, three); + mask = ctx.OpShiftLeftLogical(ctx.U32[1], mask, ctx.Const(1U)); + mask = ctx.OpShiftRightLogical(ctx.U32[1], swizzle, mask); + mask = ctx.OpBitwiseAnd(ctx.U32[1], mask, three); + + const Id modifier_a{ctx.OpVectorExtractDynamic(ctx.F32[1], ctx.fswzadd_lut_a, mask)}; + const Id modifier_b{ctx.OpVectorExtractDynamic(ctx.F32[1], ctx.fswzadd_lut_b, mask)}; + + const Id result_a{ctx.OpFMul(ctx.F32[1], op_a, modifier_a)}; + const Id result_b{ctx.OpFMul(ctx.F32[1], op_b, modifier_b)}; + return ctx.OpFAdd(ctx.F32[1], result_a, result_b); +} + +Id EmitDPdxFine(EmitContext& ctx, Id op_a) { + return ctx.OpDPdxFine(ctx.F32[1], op_a); +} + +Id EmitDPdyFine(EmitContext& ctx, Id op_a) { + return ctx.OpDPdyFine(ctx.F32[1], op_a); +} + +Id EmitDPdxCoarse(EmitContext& ctx, Id op_a) { + return ctx.OpDPdxCoarse(ctx.F32[1], op_a); +} + +Id EmitDPdyCoarse(EmitContext& ctx, Id op_a) { + return ctx.OpDPdyCoarse(ctx.F32[1], op_a); +} + +} // namespace Shader::Backend::SPIRV diff --git a/src/shader_recompiler/environment.h b/src/shader_recompiler/environment.h new file mode 100755 index 000000000..8369d0d84 --- /dev/null +++ b/src/shader_recompiler/environment.h @@ -0,0 +1,53 @@ +#pragma once + +#include + +#include "common/common_types.h" +#include "shader_recompiler/program_header.h" +#include "shader_recompiler/shader_info.h" +#include "shader_recompiler/stage.h" + +namespace Shader { + +class Environment { +public: + virtual ~Environment() = default; + + [[nodiscard]] virtual u64 ReadInstruction(u32 address) = 0; + + [[nodiscard]] virtual u32 ReadCbufValue(u32 cbuf_index, u32 cbuf_offset) = 0; + + [[nodiscard]] virtual TextureType ReadTextureType(u32 raw_handle) = 0; + + [[nodiscard]] virtual u32 TextureBoundBuffer() const = 0; + + [[nodiscard]] virtual u32 LocalMemorySize() const = 0; + + [[nodiscard]] virtual u32 SharedMemorySize() const = 0; + + [[nodiscard]] virtual std::array WorkgroupSize() const = 0; + + [[nodiscard]] const ProgramHeader& SPH() const noexcept { + return sph; + } + + [[nodiscard]] const std::array& GpPassthroughMask() const noexcept { + return gp_passthrough_mask; + } + + [[nodiscard]] Stage ShaderStage() const noexcept { + return stage; + } + + [[nodiscard]] u32 StartAddress() const noexcept { + return start_address; + } + +protected: + ProgramHeader sph{}; + std::array gp_passthrough_mask{}; + Stage stage{}; + u32 start_address{}; +}; + +} // namespace Shader diff --git a/src/shader_recompiler/exception.h b/src/shader_recompiler/exception.h new file mode 100755 index 000000000..337e7f0c8 --- /dev/null +++ b/src/shader_recompiler/exception.h @@ -0,0 +1,66 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include +#include + +#include + +namespace Shader { + +class Exception : public std::exception { +public: + explicit Exception(std::string message) noexcept : err_message{std::move(message)} {} + + const char* what() const noexcept override { + return err_message.c_str(); + } + + void Prepend(std::string_view prepend) { + err_message.insert(0, prepend); + } + + void Append(std::string_view append) { + err_message += append; + } + +private: + std::string err_message; +}; + +class LogicError : public Exception { +public: + template + LogicError(const char* message, Args&&... args) + : Exception{fmt::format(fmt::runtime(message), std::forward(args)...)} {} +}; + +class RuntimeError : public Exception { +public: + template + RuntimeError(const char* message, Args&&... args) + : Exception{fmt::format(fmt::runtime(message), std::forward(args)...)} {} +}; + +class NotImplementedException : public Exception { +public: + template + NotImplementedException(const char* message, Args&&... args) + : Exception{fmt::format(fmt::runtime(message), std::forward(args)...)} { + Append(" is not implemented"); + } +}; + +class InvalidArgument : public Exception { +public: + template + InvalidArgument(const char* message, Args&&... args) + : Exception{fmt::format(fmt::runtime(message), std::forward(args)...)} {} +}; + +} // namespace Shader diff --git a/src/shader_recompiler/frontend/ir/abstract_syntax_list.h b/src/shader_recompiler/frontend/ir/abstract_syntax_list.h new file mode 100755 index 000000000..b61773487 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/abstract_syntax_list.h @@ -0,0 +1,58 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::IR { + +class Block; + +struct AbstractSyntaxNode { + enum class Type { + Block, + If, + EndIf, + Loop, + Repeat, + Break, + Return, + Unreachable, + }; + union Data { + Block* block; + struct { + U1 cond; + Block* body; + Block* merge; + } if_node; + struct { + Block* merge; + } end_if; + struct { + Block* body; + Block* continue_block; + Block* merge; + } loop; + struct { + U1 cond; + Block* loop_header; + Block* merge; + } repeat; + struct { + U1 cond; + Block* merge; + Block* skip; + } break_node; + }; + + Data data{}; + Type type{}; +}; +using AbstractSyntaxList = std::vector; + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/attribute.cpp b/src/shader_recompiler/frontend/ir/attribute.cpp new file mode 100755 index 000000000..4d0b8b8e5 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/attribute.cpp @@ -0,0 +1,454 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/attribute.h" + +namespace Shader::IR { + +bool IsGeneric(Attribute attribute) noexcept { + return attribute >= Attribute::Generic0X && attribute <= Attribute::Generic31X; +} + +u32 GenericAttributeIndex(Attribute attribute) { + if (!IsGeneric(attribute)) { + throw InvalidArgument("Attribute is not generic {}", attribute); + } + return (static_cast(attribute) - static_cast(Attribute::Generic0X)) / 4u; +} + +u32 GenericAttributeElement(Attribute attribute) { + if (!IsGeneric(attribute)) { + throw InvalidArgument("Attribute is not generic {}", attribute); + } + return static_cast(attribute) % 4; +} + +std::string NameOf(Attribute attribute) { + switch (attribute) { + case Attribute::PrimitiveId: + return "PrimitiveId"; + case Attribute::Layer: + return "Layer"; + case Attribute::ViewportIndex: + return "ViewportIndex"; + case Attribute::PointSize: + return "PointSize"; + case Attribute::PositionX: + return "Position.X"; + case Attribute::PositionY: + return "Position.Y"; + case Attribute::PositionZ: + return "Position.Z"; + case Attribute::PositionW: + return "Position.W"; + case Attribute::Generic0X: + return "Generic[0].X"; + case Attribute::Generic0Y: + return "Generic[0].Y"; + case Attribute::Generic0Z: + return "Generic[0].Z"; + case Attribute::Generic0W: + return "Generic[0].W"; + case Attribute::Generic1X: + return "Generic[1].X"; + case Attribute::Generic1Y: + return "Generic[1].Y"; + case Attribute::Generic1Z: + return "Generic[1].Z"; + case Attribute::Generic1W: + return "Generic[1].W"; + case Attribute::Generic2X: + return "Generic[2].X"; + case Attribute::Generic2Y: + return "Generic[2].Y"; + case Attribute::Generic2Z: + return "Generic[2].Z"; + case Attribute::Generic2W: + return "Generic[2].W"; + case Attribute::Generic3X: + return "Generic[3].X"; + case Attribute::Generic3Y: + return "Generic[3].Y"; + case Attribute::Generic3Z: + return "Generic[3].Z"; + case Attribute::Generic3W: + return "Generic[3].W"; + case Attribute::Generic4X: + return "Generic[4].X"; + case Attribute::Generic4Y: + return "Generic[4].Y"; + case Attribute::Generic4Z: + return "Generic[4].Z"; + case Attribute::Generic4W: + return "Generic[4].W"; + case Attribute::Generic5X: + return "Generic[5].X"; + case Attribute::Generic5Y: + return "Generic[5].Y"; + case Attribute::Generic5Z: + return "Generic[5].Z"; + case Attribute::Generic5W: + return "Generic[5].W"; + case Attribute::Generic6X: + return "Generic[6].X"; + case Attribute::Generic6Y: + return "Generic[6].Y"; + case Attribute::Generic6Z: + return "Generic[6].Z"; + case Attribute::Generic6W: + return "Generic[6].W"; + case Attribute::Generic7X: + return "Generic[7].X"; + case Attribute::Generic7Y: + return "Generic[7].Y"; + case Attribute::Generic7Z: + return "Generic[7].Z"; + case Attribute::Generic7W: + return "Generic[7].W"; + case Attribute::Generic8X: + return "Generic[8].X"; + case Attribute::Generic8Y: + return "Generic[8].Y"; + case Attribute::Generic8Z: + return "Generic[8].Z"; + case Attribute::Generic8W: + return "Generic[8].W"; + case Attribute::Generic9X: + return "Generic[9].X"; + case Attribute::Generic9Y: + return "Generic[9].Y"; + case Attribute::Generic9Z: + return "Generic[9].Z"; + case Attribute::Generic9W: + return "Generic[9].W"; + case Attribute::Generic10X: + return "Generic[10].X"; + case Attribute::Generic10Y: + return "Generic[10].Y"; + case Attribute::Generic10Z: + return "Generic[10].Z"; + case Attribute::Generic10W: + return "Generic[10].W"; + case Attribute::Generic11X: + return "Generic[11].X"; + case Attribute::Generic11Y: + return "Generic[11].Y"; + case Attribute::Generic11Z: + return "Generic[11].Z"; + case Attribute::Generic11W: + return "Generic[11].W"; + case Attribute::Generic12X: + return "Generic[12].X"; + case Attribute::Generic12Y: + return "Generic[12].Y"; + case Attribute::Generic12Z: + return "Generic[12].Z"; + case Attribute::Generic12W: + return "Generic[12].W"; + case Attribute::Generic13X: + return "Generic[13].X"; + case Attribute::Generic13Y: + return "Generic[13].Y"; + case Attribute::Generic13Z: + return "Generic[13].Z"; + case Attribute::Generic13W: + return "Generic[13].W"; + case Attribute::Generic14X: + return "Generic[14].X"; + case Attribute::Generic14Y: + return "Generic[14].Y"; + case Attribute::Generic14Z: + return "Generic[14].Z"; + case Attribute::Generic14W: + return "Generic[14].W"; + case Attribute::Generic15X: + return "Generic[15].X"; + case Attribute::Generic15Y: + return "Generic[15].Y"; + case Attribute::Generic15Z: + return "Generic[15].Z"; + case Attribute::Generic15W: + return "Generic[15].W"; + case Attribute::Generic16X: + return "Generic[16].X"; + case Attribute::Generic16Y: + return "Generic[16].Y"; + case Attribute::Generic16Z: + return "Generic[16].Z"; + case Attribute::Generic16W: + return "Generic[16].W"; + case Attribute::Generic17X: + return "Generic[17].X"; + case Attribute::Generic17Y: + return "Generic[17].Y"; + case Attribute::Generic17Z: + return "Generic[17].Z"; + case Attribute::Generic17W: + return "Generic[17].W"; + case Attribute::Generic18X: + return "Generic[18].X"; + case Attribute::Generic18Y: + return "Generic[18].Y"; + case Attribute::Generic18Z: + return "Generic[18].Z"; + case Attribute::Generic18W: + return "Generic[18].W"; + case Attribute::Generic19X: + return "Generic[19].X"; + case Attribute::Generic19Y: + return "Generic[19].Y"; + case Attribute::Generic19Z: + return "Generic[19].Z"; + case Attribute::Generic19W: + return "Generic[19].W"; + case Attribute::Generic20X: + return "Generic[20].X"; + case Attribute::Generic20Y: + return "Generic[20].Y"; + case Attribute::Generic20Z: + return "Generic[20].Z"; + case Attribute::Generic20W: + return "Generic[20].W"; + case Attribute::Generic21X: + return "Generic[21].X"; + case Attribute::Generic21Y: + return "Generic[21].Y"; + case Attribute::Generic21Z: + return "Generic[21].Z"; + case Attribute::Generic21W: + return "Generic[21].W"; + case Attribute::Generic22X: + return "Generic[22].X"; + case Attribute::Generic22Y: + return "Generic[22].Y"; + case Attribute::Generic22Z: + return "Generic[22].Z"; + case Attribute::Generic22W: + return "Generic[22].W"; + case Attribute::Generic23X: + return "Generic[23].X"; + case Attribute::Generic23Y: + return "Generic[23].Y"; + case Attribute::Generic23Z: + return "Generic[23].Z"; + case Attribute::Generic23W: + return "Generic[23].W"; + case Attribute::Generic24X: + return "Generic[24].X"; + case Attribute::Generic24Y: + return "Generic[24].Y"; + case Attribute::Generic24Z: + return "Generic[24].Z"; + case Attribute::Generic24W: + return "Generic[24].W"; + case Attribute::Generic25X: + return "Generic[25].X"; + case Attribute::Generic25Y: + return "Generic[25].Y"; + case Attribute::Generic25Z: + return "Generic[25].Z"; + case Attribute::Generic25W: + return "Generic[25].W"; + case Attribute::Generic26X: + return "Generic[26].X"; + case Attribute::Generic26Y: + return "Generic[26].Y"; + case Attribute::Generic26Z: + return "Generic[26].Z"; + case Attribute::Generic26W: + return "Generic[26].W"; + case Attribute::Generic27X: + return "Generic[27].X"; + case Attribute::Generic27Y: + return "Generic[27].Y"; + case Attribute::Generic27Z: + return "Generic[27].Z"; + case Attribute::Generic27W: + return "Generic[27].W"; + case Attribute::Generic28X: + return "Generic[28].X"; + case Attribute::Generic28Y: + return "Generic[28].Y"; + case Attribute::Generic28Z: + return "Generic[28].Z"; + case Attribute::Generic28W: + return "Generic[28].W"; + case Attribute::Generic29X: + return "Generic[29].X"; + case Attribute::Generic29Y: + return "Generic[29].Y"; + case Attribute::Generic29Z: + return "Generic[29].Z"; + case Attribute::Generic29W: + return "Generic[29].W"; + case Attribute::Generic30X: + return "Generic[30].X"; + case Attribute::Generic30Y: + return "Generic[30].Y"; + case Attribute::Generic30Z: + return "Generic[30].Z"; + case Attribute::Generic30W: + return "Generic[30].W"; + case Attribute::Generic31X: + return "Generic[31].X"; + case Attribute::Generic31Y: + return "Generic[31].Y"; + case Attribute::Generic31Z: + return "Generic[31].Z"; + case Attribute::Generic31W: + return "Generic[31].W"; + case Attribute::ColorFrontDiffuseR: + return "ColorFrontDiffuse.R"; + case Attribute::ColorFrontDiffuseG: + return "ColorFrontDiffuse.G"; + case Attribute::ColorFrontDiffuseB: + return "ColorFrontDiffuse.B"; + case Attribute::ColorFrontDiffuseA: + return "ColorFrontDiffuse.A"; + case Attribute::ColorFrontSpecularR: + return "ColorFrontSpecular.R"; + case Attribute::ColorFrontSpecularG: + return "ColorFrontSpecular.G"; + case Attribute::ColorFrontSpecularB: + return "ColorFrontSpecular.B"; + case Attribute::ColorFrontSpecularA: + return "ColorFrontSpecular.A"; + case Attribute::ColorBackDiffuseR: + return "ColorBackDiffuse.R"; + case Attribute::ColorBackDiffuseG: + return "ColorBackDiffuse.G"; + case Attribute::ColorBackDiffuseB: + return "ColorBackDiffuse.B"; + case Attribute::ColorBackDiffuseA: + return "ColorBackDiffuse.A"; + case Attribute::ColorBackSpecularR: + return "ColorBackSpecular.R"; + case Attribute::ColorBackSpecularG: + return "ColorBackSpecular.G"; + case Attribute::ColorBackSpecularB: + return "ColorBackSpecular.B"; + case Attribute::ColorBackSpecularA: + return "ColorBackSpecular.A"; + case Attribute::ClipDistance0: + return "ClipDistance[0]"; + case Attribute::ClipDistance1: + return "ClipDistance[1]"; + case Attribute::ClipDistance2: + return "ClipDistance[2]"; + case Attribute::ClipDistance3: + return "ClipDistance[3]"; + case Attribute::ClipDistance4: + return "ClipDistance[4]"; + case Attribute::ClipDistance5: + return "ClipDistance[5]"; + case Attribute::ClipDistance6: + return "ClipDistance[6]"; + case Attribute::ClipDistance7: + return "ClipDistance[7]"; + case Attribute::PointSpriteS: + return "PointSprite.S"; + case Attribute::PointSpriteT: + return "PointSprite.T"; + case Attribute::FogCoordinate: + return "FogCoordinate"; + case Attribute::TessellationEvaluationPointU: + return "TessellationEvaluationPoint.U"; + case Attribute::TessellationEvaluationPointV: + return "TessellationEvaluationPoint.V"; + case Attribute::InstanceId: + return "InstanceId"; + case Attribute::VertexId: + return "VertexId"; + case Attribute::FixedFncTexture0S: + return "FixedFncTexture[0].S"; + case Attribute::FixedFncTexture0T: + return "FixedFncTexture[0].T"; + case Attribute::FixedFncTexture0R: + return "FixedFncTexture[0].R"; + case Attribute::FixedFncTexture0Q: + return "FixedFncTexture[0].Q"; + case Attribute::FixedFncTexture1S: + return "FixedFncTexture[1].S"; + case Attribute::FixedFncTexture1T: + return "FixedFncTexture[1].T"; + case Attribute::FixedFncTexture1R: + return "FixedFncTexture[1].R"; + case Attribute::FixedFncTexture1Q: + return "FixedFncTexture[1].Q"; + case Attribute::FixedFncTexture2S: + return "FixedFncTexture[2].S"; + case Attribute::FixedFncTexture2T: + return "FixedFncTexture[2].T"; + case Attribute::FixedFncTexture2R: + return "FixedFncTexture[2].R"; + case Attribute::FixedFncTexture2Q: + return "FixedFncTexture[2].Q"; + case Attribute::FixedFncTexture3S: + return "FixedFncTexture[3].S"; + case Attribute::FixedFncTexture3T: + return "FixedFncTexture[3].T"; + case Attribute::FixedFncTexture3R: + return "FixedFncTexture[3].R"; + case Attribute::FixedFncTexture3Q: + return "FixedFncTexture[3].Q"; + case Attribute::FixedFncTexture4S: + return "FixedFncTexture[4].S"; + case Attribute::FixedFncTexture4T: + return "FixedFncTexture[4].T"; + case Attribute::FixedFncTexture4R: + return "FixedFncTexture[4].R"; + case Attribute::FixedFncTexture4Q: + return "FixedFncTexture[4].Q"; + case Attribute::FixedFncTexture5S: + return "FixedFncTexture[5].S"; + case Attribute::FixedFncTexture5T: + return "FixedFncTexture[5].T"; + case Attribute::FixedFncTexture5R: + return "FixedFncTexture[5].R"; + case Attribute::FixedFncTexture5Q: + return "FixedFncTexture[5].Q"; + case Attribute::FixedFncTexture6S: + return "FixedFncTexture[6].S"; + case Attribute::FixedFncTexture6T: + return "FixedFncTexture[6].T"; + case Attribute::FixedFncTexture6R: + return "FixedFncTexture[6].R"; + case Attribute::FixedFncTexture6Q: + return "FixedFncTexture[6].Q"; + case Attribute::FixedFncTexture7S: + return "FixedFncTexture[7].S"; + case Attribute::FixedFncTexture7T: + return "FixedFncTexture[7].T"; + case Attribute::FixedFncTexture7R: + return "FixedFncTexture[7].R"; + case Attribute::FixedFncTexture7Q: + return "FixedFncTexture[7].Q"; + case Attribute::FixedFncTexture8S: + return "FixedFncTexture[8].S"; + case Attribute::FixedFncTexture8T: + return "FixedFncTexture[8].T"; + case Attribute::FixedFncTexture8R: + return "FixedFncTexture[8].R"; + case Attribute::FixedFncTexture8Q: + return "FixedFncTexture[8].Q"; + case Attribute::FixedFncTexture9S: + return "FixedFncTexture[9].S"; + case Attribute::FixedFncTexture9T: + return "FixedFncTexture[9].T"; + case Attribute::FixedFncTexture9R: + return "FixedFncTexture[9].R"; + case Attribute::FixedFncTexture9Q: + return "FixedFncTexture[9].Q"; + case Attribute::ViewportMask: + return "ViewportMask"; + case Attribute::FrontFace: + return "FrontFace"; + } + return fmt::format("", static_cast(attribute)); +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/attribute.h b/src/shader_recompiler/frontend/ir/attribute.h new file mode 100755 index 000000000..ca1199494 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/attribute.h @@ -0,0 +1,250 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include "common/common_types.h" + +namespace Shader::IR { + +enum class Attribute : u64 { + PrimitiveId = 24, + Layer = 25, + ViewportIndex = 26, + PointSize = 27, + PositionX = 28, + PositionY = 29, + PositionZ = 30, + PositionW = 31, + Generic0X = 32, + Generic0Y = 33, + Generic0Z = 34, + Generic0W = 35, + Generic1X = 36, + Generic1Y = 37, + Generic1Z = 38, + Generic1W = 39, + Generic2X = 40, + Generic2Y = 41, + Generic2Z = 42, + Generic2W = 43, + Generic3X = 44, + Generic3Y = 45, + Generic3Z = 46, + Generic3W = 47, + Generic4X = 48, + Generic4Y = 49, + Generic4Z = 50, + Generic4W = 51, + Generic5X = 52, + Generic5Y = 53, + Generic5Z = 54, + Generic5W = 55, + Generic6X = 56, + Generic6Y = 57, + Generic6Z = 58, + Generic6W = 59, + Generic7X = 60, + Generic7Y = 61, + Generic7Z = 62, + Generic7W = 63, + Generic8X = 64, + Generic8Y = 65, + Generic8Z = 66, + Generic8W = 67, + Generic9X = 68, + Generic9Y = 69, + Generic9Z = 70, + Generic9W = 71, + Generic10X = 72, + Generic10Y = 73, + Generic10Z = 74, + Generic10W = 75, + Generic11X = 76, + Generic11Y = 77, + Generic11Z = 78, + Generic11W = 79, + Generic12X = 80, + Generic12Y = 81, + Generic12Z = 82, + Generic12W = 83, + Generic13X = 84, + Generic13Y = 85, + Generic13Z = 86, + Generic13W = 87, + Generic14X = 88, + Generic14Y = 89, + Generic14Z = 90, + Generic14W = 91, + Generic15X = 92, + Generic15Y = 93, + Generic15Z = 94, + Generic15W = 95, + Generic16X = 96, + Generic16Y = 97, + Generic16Z = 98, + Generic16W = 99, + Generic17X = 100, + Generic17Y = 101, + Generic17Z = 102, + Generic17W = 103, + Generic18X = 104, + Generic18Y = 105, + Generic18Z = 106, + Generic18W = 107, + Generic19X = 108, + Generic19Y = 109, + Generic19Z = 110, + Generic19W = 111, + Generic20X = 112, + Generic20Y = 113, + Generic20Z = 114, + Generic20W = 115, + Generic21X = 116, + Generic21Y = 117, + Generic21Z = 118, + Generic21W = 119, + Generic22X = 120, + Generic22Y = 121, + Generic22Z = 122, + Generic22W = 123, + Generic23X = 124, + Generic23Y = 125, + Generic23Z = 126, + Generic23W = 127, + Generic24X = 128, + Generic24Y = 129, + Generic24Z = 130, + Generic24W = 131, + Generic25X = 132, + Generic25Y = 133, + Generic25Z = 134, + Generic25W = 135, + Generic26X = 136, + Generic26Y = 137, + Generic26Z = 138, + Generic26W = 139, + Generic27X = 140, + Generic27Y = 141, + Generic27Z = 142, + Generic27W = 143, + Generic28X = 144, + Generic28Y = 145, + Generic28Z = 146, + Generic28W = 147, + Generic29X = 148, + Generic29Y = 149, + Generic29Z = 150, + Generic29W = 151, + Generic30X = 152, + Generic30Y = 153, + Generic30Z = 154, + Generic30W = 155, + Generic31X = 156, + Generic31Y = 157, + Generic31Z = 158, + Generic31W = 159, + ColorFrontDiffuseR = 160, + ColorFrontDiffuseG = 161, + ColorFrontDiffuseB = 162, + ColorFrontDiffuseA = 163, + ColorFrontSpecularR = 164, + ColorFrontSpecularG = 165, + ColorFrontSpecularB = 166, + ColorFrontSpecularA = 167, + ColorBackDiffuseR = 168, + ColorBackDiffuseG = 169, + ColorBackDiffuseB = 170, + ColorBackDiffuseA = 171, + ColorBackSpecularR = 172, + ColorBackSpecularG = 173, + ColorBackSpecularB = 174, + ColorBackSpecularA = 175, + ClipDistance0 = 176, + ClipDistance1 = 177, + ClipDistance2 = 178, + ClipDistance3 = 179, + ClipDistance4 = 180, + ClipDistance5 = 181, + ClipDistance6 = 182, + ClipDistance7 = 183, + PointSpriteS = 184, + PointSpriteT = 185, + FogCoordinate = 186, + TessellationEvaluationPointU = 188, + TessellationEvaluationPointV = 189, + InstanceId = 190, + VertexId = 191, + FixedFncTexture0S = 192, + FixedFncTexture0T = 193, + FixedFncTexture0R = 194, + FixedFncTexture0Q = 195, + FixedFncTexture1S = 196, + FixedFncTexture1T = 197, + FixedFncTexture1R = 198, + FixedFncTexture1Q = 199, + FixedFncTexture2S = 200, + FixedFncTexture2T = 201, + FixedFncTexture2R = 202, + FixedFncTexture2Q = 203, + FixedFncTexture3S = 204, + FixedFncTexture3T = 205, + FixedFncTexture3R = 206, + FixedFncTexture3Q = 207, + FixedFncTexture4S = 208, + FixedFncTexture4T = 209, + FixedFncTexture4R = 210, + FixedFncTexture4Q = 211, + FixedFncTexture5S = 212, + FixedFncTexture5T = 213, + FixedFncTexture5R = 214, + FixedFncTexture5Q = 215, + FixedFncTexture6S = 216, + FixedFncTexture6T = 217, + FixedFncTexture6R = 218, + FixedFncTexture6Q = 219, + FixedFncTexture7S = 220, + FixedFncTexture7T = 221, + FixedFncTexture7R = 222, + FixedFncTexture7Q = 223, + FixedFncTexture8S = 224, + FixedFncTexture8T = 225, + FixedFncTexture8R = 226, + FixedFncTexture8Q = 227, + FixedFncTexture9S = 228, + FixedFncTexture9T = 229, + FixedFncTexture9R = 230, + FixedFncTexture9Q = 231, + ViewportMask = 232, + FrontFace = 255, +}; + +constexpr size_t NUM_GENERICS = 32; + +[[nodiscard]] bool IsGeneric(Attribute attribute) noexcept; + +[[nodiscard]] u32 GenericAttributeIndex(Attribute attribute); + +[[nodiscard]] u32 GenericAttributeElement(Attribute attribute); + +[[nodiscard]] std::string NameOf(Attribute attribute); + +[[nodiscard]] constexpr IR::Attribute operator+(IR::Attribute attribute, size_t value) noexcept { + return static_cast(static_cast(attribute) + value); +} + +} // namespace Shader::IR + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::IR::Attribute& attribute, FormatContext& ctx) { + return fmt::format_to(ctx.out(), "{}", Shader::IR::NameOf(attribute)); + } +}; diff --git a/src/shader_recompiler/frontend/ir/basic_block.cpp b/src/shader_recompiler/frontend/ir/basic_block.cpp new file mode 100755 index 000000000..7c08b25ce --- /dev/null +++ b/src/shader_recompiler/frontend/ir/basic_block.cpp @@ -0,0 +1,149 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include + +#include "common/bit_cast.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::IR { + +Block::Block(ObjectPool& inst_pool_) : inst_pool{&inst_pool_} {} + +Block::~Block() = default; + +void Block::AppendNewInst(Opcode op, std::initializer_list args) { + PrependNewInst(end(), op, args); +} + +Block::iterator Block::PrependNewInst(iterator insertion_point, Opcode op, + std::initializer_list args, u32 flags) { + Inst* const inst{inst_pool->Create(op, flags)}; + const auto result_it{instructions.insert(insertion_point, *inst)}; + + if (inst->NumArgs() != args.size()) { + throw InvalidArgument("Invalid number of arguments {} in {}", args.size(), op); + } + std::ranges::for_each(args, [inst, index = size_t{0}](const Value& arg) mutable { + inst->SetArg(index, arg); + ++index; + }); + return result_it; +} + +void Block::AddBranch(Block* block) { + if (std::ranges::find(imm_successors, block) != imm_successors.end()) { + throw LogicError("Successor already inserted"); + } + if (std::ranges::find(block->imm_predecessors, this) != block->imm_predecessors.end()) { + throw LogicError("Predecessor already inserted"); + } + imm_successors.push_back(block); + block->imm_predecessors.push_back(this); +} + +static std::string BlockToIndex(const std::map& block_to_index, + Block* block) { + if (const auto it{block_to_index.find(block)}; it != block_to_index.end()) { + return fmt::format("{{Block ${}}}", it->second); + } + return fmt::format("$", reinterpret_cast(block)); +} + +static size_t InstIndex(std::map& inst_to_index, size_t& inst_index, + const Inst* inst) { + const auto [it, is_inserted]{inst_to_index.emplace(inst, inst_index + 1)}; + if (is_inserted) { + ++inst_index; + } + return it->second; +} + +static std::string ArgToIndex(std::map& inst_to_index, size_t& inst_index, + const Value& arg) { + if (arg.IsEmpty()) { + return ""; + } + if (!arg.IsImmediate() || arg.IsIdentity()) { + return fmt::format("%{}", InstIndex(inst_to_index, inst_index, arg.Inst())); + } + switch (arg.Type()) { + case Type::U1: + return fmt::format("#{}", arg.U1() ? "true" : "false"); + case Type::U8: + return fmt::format("#{}", arg.U8()); + case Type::U16: + return fmt::format("#{}", arg.U16()); + case Type::U32: + return fmt::format("#{}", arg.U32()); + case Type::U64: + return fmt::format("#{}", arg.U64()); + case Type::F32: + return fmt::format("#{}", arg.F32()); + case Type::Reg: + return fmt::format("{}", arg.Reg()); + case Type::Pred: + return fmt::format("{}", arg.Pred()); + case Type::Attribute: + return fmt::format("{}", arg.Attribute()); + default: + return ""; + } +} + +std::string DumpBlock(const Block& block) { + size_t inst_index{0}; + std::map inst_to_index; + return DumpBlock(block, {}, inst_to_index, inst_index); +} + +std::string DumpBlock(const Block& block, const std::map& block_to_index, + std::map& inst_to_index, size_t& inst_index) { + std::string ret{"Block"}; + if (const auto it{block_to_index.find(&block)}; it != block_to_index.end()) { + ret += fmt::format(" ${}", it->second); + } + ret += '\n'; + for (const Inst& inst : block) { + const Opcode op{inst.GetOpcode()}; + ret += fmt::format("[{:016x}] ", reinterpret_cast(&inst)); + if (TypeOf(op) != Type::Void) { + ret += fmt::format("%{:<5} = {}", InstIndex(inst_to_index, inst_index, &inst), op); + } else { + ret += fmt::format(" {}", op); // '%00000 = ' -> 1 + 5 + 3 = 9 spaces + } + const size_t arg_count{inst.NumArgs()}; + for (size_t arg_index = 0; arg_index < arg_count; ++arg_index) { + const Value arg{inst.Arg(arg_index)}; + const std::string arg_str{ArgToIndex(inst_to_index, inst_index, arg)}; + ret += arg_index != 0 ? ", " : " "; + if (op == Opcode::Phi) { + ret += fmt::format("[ {}, {} ]", arg_str, + BlockToIndex(block_to_index, inst.PhiBlock(arg_index))); + } else { + ret += arg_str; + } + if (op != Opcode::Phi) { + const Type actual_type{arg.Type()}; + const Type expected_type{ArgTypeOf(op, arg_index)}; + if (!AreTypesCompatible(actual_type, expected_type)) { + ret += fmt::format("", actual_type, expected_type); + } + } + } + if (TypeOf(op) != Type::Void) { + ret += fmt::format(" (uses: {})\n", inst.UseCount()); + } else { + ret += '\n'; + } + } + return ret; +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/basic_block.h b/src/shader_recompiler/frontend/ir/basic_block.h new file mode 100755 index 000000000..7e134b4c7 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/basic_block.h @@ -0,0 +1,185 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include +#include + +#include + +#include "common/bit_cast.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/condition.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/object_pool.h" + +namespace Shader::IR { + +class Block { +public: + using InstructionList = boost::intrusive::list; + using size_type = InstructionList::size_type; + using iterator = InstructionList::iterator; + using const_iterator = InstructionList::const_iterator; + using reverse_iterator = InstructionList::reverse_iterator; + using const_reverse_iterator = InstructionList::const_reverse_iterator; + + explicit Block(ObjectPool& inst_pool_); + ~Block(); + + Block(const Block&) = delete; + Block& operator=(const Block&) = delete; + + Block(Block&&) = default; + Block& operator=(Block&&) = default; + + /// Appends a new instruction to the end of this basic block. + void AppendNewInst(Opcode op, std::initializer_list args); + + /// Prepends a new instruction to this basic block before the insertion point. + iterator PrependNewInst(iterator insertion_point, Opcode op, + std::initializer_list args = {}, u32 flags = 0); + + /// Adds a new branch to this basic block. + void AddBranch(Block* block); + + /// Gets a mutable reference to the instruction list for this basic block. + [[nodiscard]] InstructionList& Instructions() noexcept { + return instructions; + } + /// Gets an immutable reference to the instruction list for this basic block. + [[nodiscard]] const InstructionList& Instructions() const noexcept { + return instructions; + } + + /// Gets an immutable span to the immediate predecessors. + [[nodiscard]] std::span ImmPredecessors() const noexcept { + return imm_predecessors; + } + /// Gets an immutable span to the immediate successors. + [[nodiscard]] std::span ImmSuccessors() const noexcept { + return imm_successors; + } + + /// Intrusively store the host definition of this instruction. + template + void SetDefinition(DefinitionType def) { + definition = Common::BitCast(def); + } + + /// Return the intrusively stored host definition of this instruction. + template + [[nodiscard]] DefinitionType Definition() const noexcept { + return Common::BitCast(definition); + } + + void SetSsaRegValue(IR::Reg reg, const Value& value) noexcept { + ssa_reg_values[RegIndex(reg)] = value; + } + const Value& SsaRegValue(IR::Reg reg) const noexcept { + return ssa_reg_values[RegIndex(reg)]; + } + + void SsaSeal() noexcept { + is_ssa_sealed = true; + } + [[nodiscard]] bool IsSsaSealed() const noexcept { + return is_ssa_sealed; + } + + [[nodiscard]] bool empty() const { + return instructions.empty(); + } + [[nodiscard]] size_type size() const { + return instructions.size(); + } + + [[nodiscard]] Inst& front() { + return instructions.front(); + } + [[nodiscard]] const Inst& front() const { + return instructions.front(); + } + + [[nodiscard]] Inst& back() { + return instructions.back(); + } + [[nodiscard]] const Inst& back() const { + return instructions.back(); + } + + [[nodiscard]] iterator begin() { + return instructions.begin(); + } + [[nodiscard]] const_iterator begin() const { + return instructions.begin(); + } + [[nodiscard]] iterator end() { + return instructions.end(); + } + [[nodiscard]] const_iterator end() const { + return instructions.end(); + } + + [[nodiscard]] reverse_iterator rbegin() { + return instructions.rbegin(); + } + [[nodiscard]] const_reverse_iterator rbegin() const { + return instructions.rbegin(); + } + [[nodiscard]] reverse_iterator rend() { + return instructions.rend(); + } + [[nodiscard]] const_reverse_iterator rend() const { + return instructions.rend(); + } + + [[nodiscard]] const_iterator cbegin() const { + return instructions.cbegin(); + } + [[nodiscard]] const_iterator cend() const { + return instructions.cend(); + } + + [[nodiscard]] const_reverse_iterator crbegin() const { + return instructions.crbegin(); + } + [[nodiscard]] const_reverse_iterator crend() const { + return instructions.crend(); + } + +private: + /// Memory pool for instruction list + ObjectPool* inst_pool; + + /// List of instructions in this block + InstructionList instructions; + + /// Block immediate predecessors + std::vector imm_predecessors; + /// Block immediate successors + std::vector imm_successors; + + /// Intrusively store the value of a register in the block. + std::array ssa_reg_values; + /// Intrusively store if the block is sealed in the SSA pass. + bool is_ssa_sealed{false}; + + /// Intrusively stored host definition of this block. + u32 definition{}; +}; + +using BlockList = std::vector; + +[[nodiscard]] std::string DumpBlock(const Block& block); + +[[nodiscard]] std::string DumpBlock(const Block& block, + const std::map& block_to_index, + std::map& inst_to_index, + size_t& inst_index); + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/breadth_first_search.h b/src/shader_recompiler/frontend/ir/breadth_first_search.h new file mode 100755 index 000000000..a52ccbd58 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/breadth_first_search.h @@ -0,0 +1,56 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include + +#include + +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::IR { + +template +auto BreadthFirstSearch(const Value& value, Pred&& pred) + -> std::invoke_result_t { + if (value.IsImmediate()) { + // Nothing to do with immediates + return std::nullopt; + } + // Breadth-first search visiting the right most arguments first + // Small vector has been determined from shaders in Super Smash Bros. Ultimate + boost::container::small_vector visited; + std::queue queue; + queue.push(value.InstRecursive()); + + while (!queue.empty()) { + // Pop one instruction from the queue + const Inst* const inst{queue.front()}; + queue.pop(); + if (const std::optional result = pred(inst)) { + // This is the instruction we were looking for + return result; + } + // Visit the right most arguments first + for (size_t arg = inst->NumArgs(); arg--;) { + const Value arg_value{inst->Arg(arg)}; + if (arg_value.IsImmediate()) { + continue; + } + // Queue instruction if it hasn't been visited + const Inst* const arg_inst{arg_value.InstRecursive()}; + if (std::ranges::find(visited, arg_inst) == visited.end()) { + visited.push_back(arg_inst); + queue.push(arg_inst); + } + } + } + // SSA tree has been traversed and the result hasn't been found + return std::nullopt; +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/condition.cpp b/src/shader_recompiler/frontend/ir/condition.cpp new file mode 100755 index 000000000..fc18ea2a2 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/condition.cpp @@ -0,0 +1,29 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include + +#include "shader_recompiler/frontend/ir/condition.h" + +namespace Shader::IR { + +std::string NameOf(Condition condition) { + std::string ret; + if (condition.GetFlowTest() != FlowTest::T) { + ret = fmt::to_string(condition.GetFlowTest()); + } + const auto [pred, negated]{condition.GetPred()}; + if (!ret.empty()) { + ret += '&'; + } + if (negated) { + ret += '!'; + } + ret += fmt::to_string(pred); + return ret; +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/condition.h b/src/shader_recompiler/frontend/ir/condition.h new file mode 100755 index 000000000..aa8597c60 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/condition.h @@ -0,0 +1,60 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include + +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/flow_test.h" +#include "shader_recompiler/frontend/ir/pred.h" + +namespace Shader::IR { + +class Condition { +public: + Condition() noexcept = default; + + explicit Condition(FlowTest flow_test_, Pred pred_, bool pred_negated_ = false) noexcept + : flow_test{static_cast(flow_test_)}, pred{static_cast(pred_)}, + pred_negated{pred_negated_ ? u8{1} : u8{0}} {} + + explicit Condition(Pred pred_, bool pred_negated_ = false) noexcept + : Condition(FlowTest::T, pred_, pred_negated_) {} + + explicit Condition(bool value) : Condition(Pred::PT, !value) {} + + auto operator<=>(const Condition&) const noexcept = default; + + [[nodiscard]] IR::FlowTest GetFlowTest() const noexcept { + return static_cast(flow_test); + } + + [[nodiscard]] std::pair GetPred() const noexcept { + return {static_cast(pred), pred_negated != 0}; + } + +private: + u16 flow_test; + u8 pred; + u8 pred_negated; +}; + +std::string NameOf(Condition condition); + +} // namespace Shader::IR + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::IR::Condition& cond, FormatContext& ctx) { + return fmt::format_to(ctx.out(), "{}", Shader::IR::NameOf(cond)); + } +}; diff --git a/src/shader_recompiler/frontend/ir/flow_test.cpp b/src/shader_recompiler/frontend/ir/flow_test.cpp new file mode 100755 index 000000000..6ebb4ad89 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/flow_test.cpp @@ -0,0 +1,83 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include + +#include "shader_recompiler/frontend/ir/flow_test.h" + +namespace Shader::IR { + +std::string NameOf(FlowTest flow_test) { + switch (flow_test) { + case FlowTest::F: + return "F"; + case FlowTest::LT: + return "LT"; + case FlowTest::EQ: + return "EQ"; + case FlowTest::LE: + return "LE"; + case FlowTest::GT: + return "GT"; + case FlowTest::NE: + return "NE"; + case FlowTest::GE: + return "GE"; + case FlowTest::NUM: + return "NUM"; + case FlowTest::NaN: + return "NAN"; + case FlowTest::LTU: + return "LTU"; + case FlowTest::EQU: + return "EQU"; + case FlowTest::LEU: + return "LEU"; + case FlowTest::GTU: + return "GTU"; + case FlowTest::NEU: + return "NEU"; + case FlowTest::GEU: + return "GEU"; + case FlowTest::T: + return "T"; + case FlowTest::OFF: + return "OFF"; + case FlowTest::LO: + return "LO"; + case FlowTest::SFF: + return "SFF"; + case FlowTest::LS: + return "LS"; + case FlowTest::HI: + return "HI"; + case FlowTest::SFT: + return "SFT"; + case FlowTest::HS: + return "HS"; + case FlowTest::OFT: + return "OFT"; + case FlowTest::CSM_TA: + return "CSM_TA"; + case FlowTest::CSM_TR: + return "CSM_TR"; + case FlowTest::CSM_MX: + return "CSM_MX"; + case FlowTest::FCSM_TA: + return "FCSM_TA"; + case FlowTest::FCSM_TR: + return "FCSM_TR"; + case FlowTest::FCSM_MX: + return "FCSM_MX"; + case FlowTest::RLE: + return "RLE"; + case FlowTest::RGT: + return "RGT"; + } + return fmt::format("", static_cast(flow_test)); +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/flow_test.h b/src/shader_recompiler/frontend/ir/flow_test.h new file mode 100755 index 000000000..09e113773 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/flow_test.h @@ -0,0 +1,62 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include "common/common_types.h" + +namespace Shader::IR { + +enum class FlowTest : u64 { + F, + LT, + EQ, + LE, + GT, + NE, + GE, + NUM, + NaN, + LTU, + EQU, + LEU, + GTU, + NEU, + GEU, + T, + OFF, + LO, + SFF, + LS, + HI, + SFT, + HS, + OFT, + CSM_TA, + CSM_TR, + CSM_MX, + FCSM_TA, + FCSM_TR, + FCSM_MX, + RLE, + RGT, +}; + +[[nodiscard]] std::string NameOf(FlowTest flow_test); + +} // namespace Shader::IR + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::IR::FlowTest& flow_test, FormatContext& ctx) { + return fmt::format_to(ctx.out(), "{}", Shader::IR::NameOf(flow_test)); + } +}; diff --git a/src/shader_recompiler/frontend/ir/ir_emitter.cpp b/src/shader_recompiler/frontend/ir/ir_emitter.cpp new file mode 100755 index 000000000..13159a68d --- /dev/null +++ b/src/shader_recompiler/frontend/ir/ir_emitter.cpp @@ -0,0 +1,2017 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_cast.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::IR { +namespace { +[[noreturn]] void ThrowInvalidType(Type type) { + throw InvalidArgument("Invalid type {}", type); +} + +Value MakeLodClampPair(IREmitter& ir, const F32& bias_lod, const F32& lod_clamp) { + if (!bias_lod.IsEmpty() && !lod_clamp.IsEmpty()) { + return ir.CompositeConstruct(bias_lod, lod_clamp); + } else if (!bias_lod.IsEmpty()) { + return bias_lod; + } else if (!lod_clamp.IsEmpty()) { + return lod_clamp; + } else { + return Value{}; + } +} +} // Anonymous namespace + +U1 IREmitter::Imm1(bool value) const { + return U1{Value{value}}; +} + +U8 IREmitter::Imm8(u8 value) const { + return U8{Value{value}}; +} + +U16 IREmitter::Imm16(u16 value) const { + return U16{Value{value}}; +} + +U32 IREmitter::Imm32(u32 value) const { + return U32{Value{value}}; +} + +U32 IREmitter::Imm32(s32 value) const { + return U32{Value{static_cast(value)}}; +} + +F32 IREmitter::Imm32(f32 value) const { + return F32{Value{value}}; +} + +U64 IREmitter::Imm64(u64 value) const { + return U64{Value{value}}; +} + +U64 IREmitter::Imm64(s64 value) const { + return U64{Value{static_cast(value)}}; +} + +F64 IREmitter::Imm64(f64 value) const { + return F64{Value{value}}; +} + +U1 IREmitter::ConditionRef(const U1& value) { + return Inst(Opcode::ConditionRef, value); +} + +void IREmitter::Reference(const Value& value) { + Inst(Opcode::Reference, value); +} + +void IREmitter::PhiMove(IR::Inst& phi, const Value& value) { + Inst(Opcode::PhiMove, Value{&phi}, value); +} + +void IREmitter::Prologue() { + Inst(Opcode::Prologue); +} + +void IREmitter::Epilogue() { + Inst(Opcode::Epilogue); +} + +void IREmitter::DemoteToHelperInvocation() { + Inst(Opcode::DemoteToHelperInvocation); +} + +void IREmitter::EmitVertex(const U32& stream) { + Inst(Opcode::EmitVertex, stream); +} + +void IREmitter::EndPrimitive(const U32& stream) { + Inst(Opcode::EndPrimitive, stream); +} + +void IREmitter::Barrier() { + Inst(Opcode::Barrier); +} + +void IREmitter::WorkgroupMemoryBarrier() { + Inst(Opcode::WorkgroupMemoryBarrier); +} + +void IREmitter::DeviceMemoryBarrier() { + Inst(Opcode::DeviceMemoryBarrier); +} + +U32 IREmitter::GetReg(IR::Reg reg) { + return Inst(Opcode::GetRegister, reg); +} + +void IREmitter::SetReg(IR::Reg reg, const U32& value) { + Inst(Opcode::SetRegister, reg, value); +} + +U1 IREmitter::GetPred(IR::Pred pred, bool is_negated) { + if (pred == Pred::PT) { + return Imm1(!is_negated); + } + const U1 value{Inst(Opcode::GetPred, pred)}; + if (is_negated) { + return Inst(Opcode::LogicalNot, value); + } else { + return value; + } +} + +void IREmitter::SetPred(IR::Pred pred, const U1& value) { + if (pred != IR::Pred::PT) { + Inst(Opcode::SetPred, pred, value); + } +} + +U1 IREmitter::GetGotoVariable(u32 id) { + return Inst(Opcode::GetGotoVariable, id); +} + +void IREmitter::SetGotoVariable(u32 id, const U1& value) { + Inst(Opcode::SetGotoVariable, id, value); +} + +U32 IREmitter::GetIndirectBranchVariable() { + return Inst(Opcode::GetIndirectBranchVariable); +} + +void IREmitter::SetIndirectBranchVariable(const U32& value) { + Inst(Opcode::SetIndirectBranchVariable, value); +} + +U32 IREmitter::GetCbuf(const U32& binding, const U32& byte_offset) { + return Inst(Opcode::GetCbufU32, binding, byte_offset); +} + +Value IREmitter::GetCbuf(const U32& binding, const U32& byte_offset, size_t bitsize, + bool is_signed) { + switch (bitsize) { + case 8: + return Inst(is_signed ? Opcode::GetCbufS8 : Opcode::GetCbufU8, binding, byte_offset); + case 16: + return Inst(is_signed ? Opcode::GetCbufS16 : Opcode::GetCbufU16, binding, byte_offset); + case 32: + return Inst(Opcode::GetCbufU32, binding, byte_offset); + case 64: + return Inst(Opcode::GetCbufU32x2, binding, byte_offset); + default: + throw InvalidArgument("Invalid bit size {}", bitsize); + } +} + +F32 IREmitter::GetFloatCbuf(const U32& binding, const U32& byte_offset) { + return Inst(Opcode::GetCbufF32, binding, byte_offset); +} + +U1 IREmitter::GetZFlag() { + return Inst(Opcode::GetZFlag); +} + +U1 IREmitter::GetSFlag() { + return Inst(Opcode::GetSFlag); +} + +U1 IREmitter::GetCFlag() { + return Inst(Opcode::GetCFlag); +} + +U1 IREmitter::GetOFlag() { + return Inst(Opcode::GetOFlag); +} + +void IREmitter::SetZFlag(const U1& value) { + Inst(Opcode::SetZFlag, value); +} + +void IREmitter::SetSFlag(const U1& value) { + Inst(Opcode::SetSFlag, value); +} + +void IREmitter::SetCFlag(const U1& value) { + Inst(Opcode::SetCFlag, value); +} + +void IREmitter::SetOFlag(const U1& value) { + Inst(Opcode::SetOFlag, value); +} + +static U1 GetFlowTest(IREmitter& ir, FlowTest flow_test) { + switch (flow_test) { + case FlowTest::F: + return ir.Imm1(false); + case FlowTest::LT: + return ir.LogicalXor(ir.LogicalAnd(ir.GetSFlag(), ir.LogicalNot(ir.GetZFlag())), + ir.GetOFlag()); + case FlowTest::EQ: + return ir.LogicalAnd(ir.LogicalNot(ir.GetSFlag()), ir.GetZFlag()); + case FlowTest::LE: + return ir.LogicalXor(ir.GetSFlag(), ir.LogicalOr(ir.GetZFlag(), ir.GetOFlag())); + case FlowTest::GT: + return ir.LogicalAnd(ir.LogicalXor(ir.LogicalNot(ir.GetSFlag()), ir.GetOFlag()), + ir.LogicalNot(ir.GetZFlag())); + case FlowTest::NE: + return ir.LogicalNot(ir.GetZFlag()); + case FlowTest::GE: + return ir.LogicalNot(ir.LogicalXor(ir.GetSFlag(), ir.GetOFlag())); + case FlowTest::NUM: + return ir.LogicalOr(ir.LogicalNot(ir.GetSFlag()), ir.LogicalNot(ir.GetZFlag())); + case FlowTest::NaN: + return ir.LogicalAnd(ir.GetSFlag(), ir.GetZFlag()); + case FlowTest::LTU: + return ir.LogicalXor(ir.GetSFlag(), ir.GetOFlag()); + case FlowTest::EQU: + return ir.GetZFlag(); + case FlowTest::LEU: + return ir.LogicalOr(ir.LogicalXor(ir.GetSFlag(), ir.GetOFlag()), ir.GetZFlag()); + case FlowTest::GTU: + return ir.LogicalXor(ir.LogicalNot(ir.GetSFlag()), + ir.LogicalOr(ir.GetZFlag(), ir.GetOFlag())); + case FlowTest::NEU: + return ir.LogicalOr(ir.GetSFlag(), ir.LogicalNot(ir.GetZFlag())); + case FlowTest::GEU: + return ir.LogicalXor(ir.LogicalOr(ir.LogicalNot(ir.GetSFlag()), ir.GetZFlag()), + ir.GetOFlag()); + case FlowTest::T: + return ir.Imm1(true); + case FlowTest::OFF: + return ir.LogicalNot(ir.GetOFlag()); + case FlowTest::LO: + return ir.LogicalNot(ir.GetCFlag()); + case FlowTest::SFF: + return ir.LogicalNot(ir.GetSFlag()); + case FlowTest::LS: + return ir.LogicalOr(ir.GetZFlag(), ir.LogicalNot(ir.GetCFlag())); + case FlowTest::HI: + return ir.LogicalAnd(ir.GetCFlag(), ir.LogicalNot(ir.GetZFlag())); + case FlowTest::SFT: + return ir.GetSFlag(); + case FlowTest::HS: + return ir.GetCFlag(); + case FlowTest::OFT: + return ir.GetOFlag(); + case FlowTest::RLE: + return ir.LogicalOr(ir.GetSFlag(), ir.GetZFlag()); + case FlowTest::RGT: + return ir.LogicalAnd(ir.LogicalNot(ir.GetSFlag()), ir.LogicalNot(ir.GetZFlag())); + case FlowTest::FCSM_TR: + LOG_WARNING(Shader, "(STUBBED) FCSM_TR"); + return ir.Imm1(false); + case FlowTest::CSM_TA: + case FlowTest::CSM_TR: + case FlowTest::CSM_MX: + case FlowTest::FCSM_TA: + case FlowTest::FCSM_MX: + default: + throw NotImplementedException("Flow test {}", flow_test); + } +} + +U1 IREmitter::Condition(IR::Condition cond) { + const FlowTest flow_test{cond.GetFlowTest()}; + const auto [pred, is_negated]{cond.GetPred()}; + if (flow_test == FlowTest::T) { + return GetPred(pred, is_negated); + } + return LogicalAnd(GetPred(pred, is_negated), GetFlowTest(*this, flow_test)); +} + +U1 IREmitter::GetFlowTestResult(FlowTest test) { + return GetFlowTest(*this, test); +} + +F32 IREmitter::GetAttribute(IR::Attribute attribute) { + return GetAttribute(attribute, Imm32(0)); +} + +F32 IREmitter::GetAttribute(IR::Attribute attribute, const U32& vertex) { + return Inst(Opcode::GetAttribute, attribute, vertex); +} + +void IREmitter::SetAttribute(IR::Attribute attribute, const F32& value, const U32& vertex) { + Inst(Opcode::SetAttribute, attribute, value, vertex); +} + +F32 IREmitter::GetAttributeIndexed(const U32& phys_address) { + return GetAttributeIndexed(phys_address, Imm32(0)); +} + +F32 IREmitter::GetAttributeIndexed(const U32& phys_address, const U32& vertex) { + return Inst(Opcode::GetAttributeIndexed, phys_address, vertex); +} + +void IREmitter::SetAttributeIndexed(const U32& phys_address, const F32& value, const U32& vertex) { + Inst(Opcode::SetAttributeIndexed, phys_address, value, vertex); +} + +F32 IREmitter::GetPatch(Patch patch) { + return Inst(Opcode::GetPatch, patch); +} + +void IREmitter::SetPatch(Patch patch, const F32& value) { + Inst(Opcode::SetPatch, patch, value); +} + +void IREmitter::SetFragColor(u32 index, u32 component, const F32& value) { + Inst(Opcode::SetFragColor, Imm32(index), Imm32(component), value); +} + +void IREmitter::SetSampleMask(const U32& value) { + Inst(Opcode::SetSampleMask, value); +} + +void IREmitter::SetFragDepth(const F32& value) { + Inst(Opcode::SetFragDepth, value); +} + +U32 IREmitter::WorkgroupIdX() { + return U32{CompositeExtract(Inst(Opcode::WorkgroupId), 0)}; +} + +U32 IREmitter::WorkgroupIdY() { + return U32{CompositeExtract(Inst(Opcode::WorkgroupId), 1)}; +} + +U32 IREmitter::WorkgroupIdZ() { + return U32{CompositeExtract(Inst(Opcode::WorkgroupId), 2)}; +} + +Value IREmitter::LocalInvocationId() { + return Inst(Opcode::LocalInvocationId); +} + +U32 IREmitter::LocalInvocationIdX() { + return U32{CompositeExtract(Inst(Opcode::LocalInvocationId), 0)}; +} + +U32 IREmitter::LocalInvocationIdY() { + return U32{CompositeExtract(Inst(Opcode::LocalInvocationId), 1)}; +} + +U32 IREmitter::LocalInvocationIdZ() { + return U32{CompositeExtract(Inst(Opcode::LocalInvocationId), 2)}; +} + +U32 IREmitter::InvocationId() { + return Inst(Opcode::InvocationId); +} + +U32 IREmitter::SampleId() { + return Inst(Opcode::SampleId); +} + +U1 IREmitter::IsHelperInvocation() { + return Inst(Opcode::IsHelperInvocation); +} + +F32 IREmitter::YDirection() { + return Inst(Opcode::YDirection); +} + +U32 IREmitter::LaneId() { + return Inst(Opcode::LaneId); +} + +U32 IREmitter::LoadGlobalU8(const U64& address) { + return Inst(Opcode::LoadGlobalU8, address); +} + +U32 IREmitter::LoadGlobalS8(const U64& address) { + return Inst(Opcode::LoadGlobalS8, address); +} + +U32 IREmitter::LoadGlobalU16(const U64& address) { + return Inst(Opcode::LoadGlobalU16, address); +} + +U32 IREmitter::LoadGlobalS16(const U64& address) { + return Inst(Opcode::LoadGlobalS16, address); +} + +U32 IREmitter::LoadGlobal32(const U64& address) { + return Inst(Opcode::LoadGlobal32, address); +} + +Value IREmitter::LoadGlobal64(const U64& address) { + return Inst(Opcode::LoadGlobal64, address); +} + +Value IREmitter::LoadGlobal128(const U64& address) { + return Inst(Opcode::LoadGlobal128, address); +} + +void IREmitter::WriteGlobalU8(const U64& address, const U32& value) { + Inst(Opcode::WriteGlobalU8, address, value); +} + +void IREmitter::WriteGlobalS8(const U64& address, const U32& value) { + Inst(Opcode::WriteGlobalS8, address, value); +} + +void IREmitter::WriteGlobalU16(const U64& address, const U32& value) { + Inst(Opcode::WriteGlobalU16, address, value); +} + +void IREmitter::WriteGlobalS16(const U64& address, const U32& value) { + Inst(Opcode::WriteGlobalS16, address, value); +} + +void IREmitter::WriteGlobal32(const U64& address, const U32& value) { + Inst(Opcode::WriteGlobal32, address, value); +} + +void IREmitter::WriteGlobal64(const U64& address, const IR::Value& vector) { + Inst(Opcode::WriteGlobal64, address, vector); +} + +void IREmitter::WriteGlobal128(const U64& address, const IR::Value& vector) { + Inst(Opcode::WriteGlobal128, address, vector); +} + +U32 IREmitter::LoadLocal(const IR::U32& word_offset) { + return Inst(Opcode::LoadLocal, word_offset); +} + +void IREmitter::WriteLocal(const IR::U32& word_offset, const IR::U32& value) { + Inst(Opcode::WriteLocal, word_offset, value); +} + +Value IREmitter::LoadShared(int bit_size, bool is_signed, const IR::U32& offset) { + switch (bit_size) { + case 8: + return Inst(is_signed ? Opcode::LoadSharedS8 : Opcode::LoadSharedU8, offset); + case 16: + return Inst(is_signed ? Opcode::LoadSharedS16 : Opcode::LoadSharedU16, offset); + case 32: + return Inst(Opcode::LoadSharedU32, offset); + case 64: + return Inst(Opcode::LoadSharedU64, offset); + case 128: + return Inst(Opcode::LoadSharedU128, offset); + } + throw InvalidArgument("Invalid bit size {}", bit_size); +} + +void IREmitter::WriteShared(int bit_size, const IR::U32& offset, const IR::Value& value) { + switch (bit_size) { + case 8: + Inst(Opcode::WriteSharedU8, offset, value); + break; + case 16: + Inst(Opcode::WriteSharedU16, offset, value); + break; + case 32: + Inst(Opcode::WriteSharedU32, offset, value); + break; + case 64: + Inst(Opcode::WriteSharedU64, offset, value); + break; + case 128: + Inst(Opcode::WriteSharedU128, offset, value); + break; + default: + throw InvalidArgument("Invalid bit size {}", bit_size); + } +} + +U1 IREmitter::GetZeroFromOp(const Value& op) { + return Inst(Opcode::GetZeroFromOp, op); +} + +U1 IREmitter::GetSignFromOp(const Value& op) { + return Inst(Opcode::GetSignFromOp, op); +} + +U1 IREmitter::GetCarryFromOp(const Value& op) { + return Inst(Opcode::GetCarryFromOp, op); +} + +U1 IREmitter::GetOverflowFromOp(const Value& op) { + return Inst(Opcode::GetOverflowFromOp, op); +} + +U1 IREmitter::GetSparseFromOp(const Value& op) { + return Inst(Opcode::GetSparseFromOp, op); +} + +U1 IREmitter::GetInBoundsFromOp(const Value& op) { + return Inst(Opcode::GetInBoundsFromOp, op); +} + +F16F32F64 IREmitter::FPAdd(const F16F32F64& a, const F16F32F64& b, FpControl control) { + if (a.Type() != b.Type()) { + throw InvalidArgument("Mismatching types {} and {}", a.Type(), b.Type()); + } + switch (a.Type()) { + case Type::F16: + return Inst(Opcode::FPAdd16, Flags{control}, a, b); + case Type::F32: + return Inst(Opcode::FPAdd32, Flags{control}, a, b); + case Type::F64: + return Inst(Opcode::FPAdd64, Flags{control}, a, b); + default: + ThrowInvalidType(a.Type()); + } +} + +Value IREmitter::CompositeConstruct(const Value& e1, const Value& e2) { + if (e1.Type() != e2.Type()) { + throw InvalidArgument("Mismatching types {} and {}", e1.Type(), e2.Type()); + } + switch (e1.Type()) { + case Type::U32: + return Inst(Opcode::CompositeConstructU32x2, e1, e2); + case Type::F16: + return Inst(Opcode::CompositeConstructF16x2, e1, e2); + case Type::F32: + return Inst(Opcode::CompositeConstructF32x2, e1, e2); + case Type::F64: + return Inst(Opcode::CompositeConstructF64x2, e1, e2); + default: + ThrowInvalidType(e1.Type()); + } +} + +Value IREmitter::CompositeConstruct(const Value& e1, const Value& e2, const Value& e3) { + if (e1.Type() != e2.Type() || e1.Type() != e3.Type()) { + throw InvalidArgument("Mismatching types {}, {}, and {}", e1.Type(), e2.Type(), e3.Type()); + } + switch (e1.Type()) { + case Type::U32: + return Inst(Opcode::CompositeConstructU32x3, e1, e2, e3); + case Type::F16: + return Inst(Opcode::CompositeConstructF16x3, e1, e2, e3); + case Type::F32: + return Inst(Opcode::CompositeConstructF32x3, e1, e2, e3); + case Type::F64: + return Inst(Opcode::CompositeConstructF64x3, e1, e2, e3); + default: + ThrowInvalidType(e1.Type()); + } +} + +Value IREmitter::CompositeConstruct(const Value& e1, const Value& e2, const Value& e3, + const Value& e4) { + if (e1.Type() != e2.Type() || e1.Type() != e3.Type() || e1.Type() != e4.Type()) { + throw InvalidArgument("Mismatching types {}, {}, {}, and {}", e1.Type(), e2.Type(), + e3.Type(), e4.Type()); + } + switch (e1.Type()) { + case Type::U32: + return Inst(Opcode::CompositeConstructU32x4, e1, e2, e3, e4); + case Type::F16: + return Inst(Opcode::CompositeConstructF16x4, e1, e2, e3, e4); + case Type::F32: + return Inst(Opcode::CompositeConstructF32x4, e1, e2, e3, e4); + case Type::F64: + return Inst(Opcode::CompositeConstructF64x4, e1, e2, e3, e4); + default: + ThrowInvalidType(e1.Type()); + } +} + +Value IREmitter::CompositeExtract(const Value& vector, size_t element) { + const auto read{[&](Opcode opcode, size_t limit) -> Value { + if (element >= limit) { + throw InvalidArgument("Out of bounds element {}", element); + } + return Inst(opcode, vector, Value{static_cast(element)}); + }}; + switch (vector.Type()) { + case Type::U32x2: + return read(Opcode::CompositeExtractU32x2, 2); + case Type::U32x3: + return read(Opcode::CompositeExtractU32x3, 3); + case Type::U32x4: + return read(Opcode::CompositeExtractU32x4, 4); + case Type::F16x2: + return read(Opcode::CompositeExtractF16x2, 2); + case Type::F16x3: + return read(Opcode::CompositeExtractF16x3, 3); + case Type::F16x4: + return read(Opcode::CompositeExtractF16x4, 4); + case Type::F32x2: + return read(Opcode::CompositeExtractF32x2, 2); + case Type::F32x3: + return read(Opcode::CompositeExtractF32x3, 3); + case Type::F32x4: + return read(Opcode::CompositeExtractF32x4, 4); + case Type::F64x2: + return read(Opcode::CompositeExtractF64x2, 2); + case Type::F64x3: + return read(Opcode::CompositeExtractF64x3, 3); + case Type::F64x4: + return read(Opcode::CompositeExtractF64x4, 4); + default: + ThrowInvalidType(vector.Type()); + } +} + +Value IREmitter::CompositeInsert(const Value& vector, const Value& object, size_t element) { + const auto insert{[&](Opcode opcode, size_t limit) { + if (element >= limit) { + throw InvalidArgument("Out of bounds element {}", element); + } + return Inst(opcode, vector, object, Value{static_cast(element)}); + }}; + switch (vector.Type()) { + case Type::U32x2: + return insert(Opcode::CompositeInsertU32x2, 2); + case Type::U32x3: + return insert(Opcode::CompositeInsertU32x3, 3); + case Type::U32x4: + return insert(Opcode::CompositeInsertU32x4, 4); + case Type::F16x2: + return insert(Opcode::CompositeInsertF16x2, 2); + case Type::F16x3: + return insert(Opcode::CompositeInsertF16x3, 3); + case Type::F16x4: + return insert(Opcode::CompositeInsertF16x4, 4); + case Type::F32x2: + return insert(Opcode::CompositeInsertF32x2, 2); + case Type::F32x3: + return insert(Opcode::CompositeInsertF32x3, 3); + case Type::F32x4: + return insert(Opcode::CompositeInsertF32x4, 4); + case Type::F64x2: + return insert(Opcode::CompositeInsertF64x2, 2); + case Type::F64x3: + return insert(Opcode::CompositeInsertF64x3, 3); + case Type::F64x4: + return insert(Opcode::CompositeInsertF64x4, 4); + default: + ThrowInvalidType(vector.Type()); + } +} + +Value IREmitter::Select(const U1& condition, const Value& true_value, const Value& false_value) { + if (true_value.Type() != false_value.Type()) { + throw InvalidArgument("Mismatching types {} and {}", true_value.Type(), false_value.Type()); + } + switch (true_value.Type()) { + case Type::U1: + return Inst(Opcode::SelectU1, condition, true_value, false_value); + case Type::U8: + return Inst(Opcode::SelectU8, condition, true_value, false_value); + case Type::U16: + return Inst(Opcode::SelectU16, condition, true_value, false_value); + case Type::U32: + return Inst(Opcode::SelectU32, condition, true_value, false_value); + case Type::U64: + return Inst(Opcode::SelectU64, condition, true_value, false_value); + case Type::F32: + return Inst(Opcode::SelectF32, condition, true_value, false_value); + case Type::F64: + return Inst(Opcode::SelectF64, condition, true_value, false_value); + default: + throw InvalidArgument("Invalid type {}", true_value.Type()); + } +} + +template <> +IR::U32 IREmitter::BitCast(const IR::F32& value) { + return Inst(Opcode::BitCastU32F32, value); +} + +template <> +IR::F32 IREmitter::BitCast(const IR::U32& value) { + return Inst(Opcode::BitCastF32U32, value); +} + +template <> +IR::U16 IREmitter::BitCast(const IR::F16& value) { + return Inst(Opcode::BitCastU16F16, value); +} + +template <> +IR::F16 IREmitter::BitCast(const IR::U16& value) { + return Inst(Opcode::BitCastF16U16, value); +} + +template <> +IR::U64 IREmitter::BitCast(const IR::F64& value) { + return Inst(Opcode::BitCastU64F64, value); +} + +template <> +IR::F64 IREmitter::BitCast(const IR::U64& value) { + return Inst(Opcode::BitCastF64U64, value); +} + +U64 IREmitter::PackUint2x32(const Value& vector) { + return Inst(Opcode::PackUint2x32, vector); +} + +Value IREmitter::UnpackUint2x32(const U64& value) { + return Inst(Opcode::UnpackUint2x32, value); +} + +U32 IREmitter::PackFloat2x16(const Value& vector) { + return Inst(Opcode::PackFloat2x16, vector); +} + +Value IREmitter::UnpackFloat2x16(const U32& value) { + return Inst(Opcode::UnpackFloat2x16, value); +} + +U32 IREmitter::PackHalf2x16(const Value& vector) { + return Inst(Opcode::PackHalf2x16, vector); +} + +Value IREmitter::UnpackHalf2x16(const U32& value) { + return Inst(Opcode::UnpackHalf2x16, value); +} + +F64 IREmitter::PackDouble2x32(const Value& vector) { + return Inst(Opcode::PackDouble2x32, vector); +} + +Value IREmitter::UnpackDouble2x32(const F64& value) { + return Inst(Opcode::UnpackDouble2x32, value); +} + +F16F32F64 IREmitter::FPMul(const F16F32F64& a, const F16F32F64& b, FpControl control) { + if (a.Type() != b.Type()) { + throw InvalidArgument("Mismatching types {} and {}", a.Type(), b.Type()); + } + switch (a.Type()) { + case Type::F16: + return Inst(Opcode::FPMul16, Flags{control}, a, b); + case Type::F32: + return Inst(Opcode::FPMul32, Flags{control}, a, b); + case Type::F64: + return Inst(Opcode::FPMul64, Flags{control}, a, b); + default: + ThrowInvalidType(a.Type()); + } +} + +F16F32F64 IREmitter::FPFma(const F16F32F64& a, const F16F32F64& b, const F16F32F64& c, + FpControl control) { + if (a.Type() != b.Type() || a.Type() != c.Type()) { + throw InvalidArgument("Mismatching types {}, {}, and {}", a.Type(), b.Type(), c.Type()); + } + switch (a.Type()) { + case Type::F16: + return Inst(Opcode::FPFma16, Flags{control}, a, b, c); + case Type::F32: + return Inst(Opcode::FPFma32, Flags{control}, a, b, c); + case Type::F64: + return Inst(Opcode::FPFma64, Flags{control}, a, b, c); + default: + ThrowInvalidType(a.Type()); + } +} + +F16F32F64 IREmitter::FPAbs(const F16F32F64& value) { + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::FPAbs16, value); + case Type::F32: + return Inst(Opcode::FPAbs32, value); + case Type::F64: + return Inst(Opcode::FPAbs64, value); + default: + ThrowInvalidType(value.Type()); + } +} + +F16F32F64 IREmitter::FPNeg(const F16F32F64& value) { + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::FPNeg16, value); + case Type::F32: + return Inst(Opcode::FPNeg32, value); + case Type::F64: + return Inst(Opcode::FPNeg64, value); + default: + ThrowInvalidType(value.Type()); + } +} + +F16F32F64 IREmitter::FPAbsNeg(const F16F32F64& value, bool abs, bool neg) { + F16F32F64 result{value}; + if (abs) { + result = FPAbs(result); + } + if (neg) { + result = FPNeg(result); + } + return result; +} + +F32 IREmitter::FPCos(const F32& value) { + return Inst(Opcode::FPCos, value); +} + +F32 IREmitter::FPSin(const F32& value) { + return Inst(Opcode::FPSin, value); +} + +F32 IREmitter::FPExp2(const F32& value) { + return Inst(Opcode::FPExp2, value); +} + +F32 IREmitter::FPLog2(const F32& value) { + return Inst(Opcode::FPLog2, value); +} + +F32F64 IREmitter::FPRecip(const F32F64& value) { + switch (value.Type()) { + case Type::F32: + return Inst(Opcode::FPRecip32, value); + case Type::F64: + return Inst(Opcode::FPRecip64, value); + default: + ThrowInvalidType(value.Type()); + } +} + +F32F64 IREmitter::FPRecipSqrt(const F32F64& value) { + switch (value.Type()) { + case Type::F32: + return Inst(Opcode::FPRecipSqrt32, value); + case Type::F64: + return Inst(Opcode::FPRecipSqrt64, value); + default: + ThrowInvalidType(value.Type()); + } +} + +F32 IREmitter::FPSqrt(const F32& value) { + return Inst(Opcode::FPSqrt, value); +} + +F16F32F64 IREmitter::FPSaturate(const F16F32F64& value) { + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::FPSaturate16, value); + case Type::F32: + return Inst(Opcode::FPSaturate32, value); + case Type::F64: + return Inst(Opcode::FPSaturate64, value); + default: + ThrowInvalidType(value.Type()); + } +} + +F16F32F64 IREmitter::FPClamp(const F16F32F64& value, const F16F32F64& min_value, + const F16F32F64& max_value) { + if (value.Type() != min_value.Type() || value.Type() != max_value.Type()) { + throw InvalidArgument("Mismatching types {}, {}, and {}", value.Type(), min_value.Type(), + max_value.Type()); + } + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::FPClamp16, value, min_value, max_value); + case Type::F32: + return Inst(Opcode::FPClamp32, value, min_value, max_value); + case Type::F64: + return Inst(Opcode::FPClamp64, value, min_value, max_value); + default: + ThrowInvalidType(value.Type()); + } +} + +F16F32F64 IREmitter::FPRoundEven(const F16F32F64& value, FpControl control) { + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::FPRoundEven16, Flags{control}, value); + case Type::F32: + return Inst(Opcode::FPRoundEven32, Flags{control}, value); + case Type::F64: + return Inst(Opcode::FPRoundEven64, Flags{control}, value); + default: + ThrowInvalidType(value.Type()); + } +} + +F16F32F64 IREmitter::FPFloor(const F16F32F64& value, FpControl control) { + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::FPFloor16, Flags{control}, value); + case Type::F32: + return Inst(Opcode::FPFloor32, Flags{control}, value); + case Type::F64: + return Inst(Opcode::FPFloor64, Flags{control}, value); + default: + ThrowInvalidType(value.Type()); + } +} + +F16F32F64 IREmitter::FPCeil(const F16F32F64& value, FpControl control) { + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::FPCeil16, Flags{control}, value); + case Type::F32: + return Inst(Opcode::FPCeil32, Flags{control}, value); + case Type::F64: + return Inst(Opcode::FPCeil64, Flags{control}, value); + default: + ThrowInvalidType(value.Type()); + } +} + +F16F32F64 IREmitter::FPTrunc(const F16F32F64& value, FpControl control) { + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::FPTrunc16, Flags{control}, value); + case Type::F32: + return Inst(Opcode::FPTrunc32, Flags{control}, value); + case Type::F64: + return Inst(Opcode::FPTrunc64, Flags{control}, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U1 IREmitter::FPEqual(const F16F32F64& lhs, const F16F32F64& rhs, FpControl control, bool ordered) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + switch (lhs.Type()) { + case Type::F16: + return Inst(ordered ? Opcode::FPOrdEqual16 : Opcode::FPUnordEqual16, Flags{control}, + lhs, rhs); + case Type::F32: + return Inst(ordered ? Opcode::FPOrdEqual32 : Opcode::FPUnordEqual32, Flags{control}, + lhs, rhs); + case Type::F64: + return Inst(ordered ? Opcode::FPOrdEqual64 : Opcode::FPUnordEqual64, Flags{control}, + lhs, rhs); + default: + ThrowInvalidType(lhs.Type()); + } +} + +U1 IREmitter::FPNotEqual(const F16F32F64& lhs, const F16F32F64& rhs, FpControl control, + bool ordered) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + switch (lhs.Type()) { + case Type::F16: + return Inst(ordered ? Opcode::FPOrdNotEqual16 : Opcode::FPUnordNotEqual16, + Flags{control}, lhs, rhs); + case Type::F32: + return Inst(ordered ? Opcode::FPOrdNotEqual32 : Opcode::FPUnordNotEqual32, + Flags{control}, lhs, rhs); + case Type::F64: + return Inst(ordered ? Opcode::FPOrdNotEqual64 : Opcode::FPUnordNotEqual64, + Flags{control}, lhs, rhs); + default: + ThrowInvalidType(lhs.Type()); + } +} + +U1 IREmitter::FPLessThan(const F16F32F64& lhs, const F16F32F64& rhs, FpControl control, + bool ordered) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + switch (lhs.Type()) { + case Type::F16: + return Inst(ordered ? Opcode::FPOrdLessThan16 : Opcode::FPUnordLessThan16, + Flags{control}, lhs, rhs); + case Type::F32: + return Inst(ordered ? Opcode::FPOrdLessThan32 : Opcode::FPUnordLessThan32, + Flags{control}, lhs, rhs); + case Type::F64: + return Inst(ordered ? Opcode::FPOrdLessThan64 : Opcode::FPUnordLessThan64, + Flags{control}, lhs, rhs); + default: + ThrowInvalidType(lhs.Type()); + } +} + +U1 IREmitter::FPGreaterThan(const F16F32F64& lhs, const F16F32F64& rhs, FpControl control, + bool ordered) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + switch (lhs.Type()) { + case Type::F16: + return Inst(ordered ? Opcode::FPOrdGreaterThan16 : Opcode::FPUnordGreaterThan16, + Flags{control}, lhs, rhs); + case Type::F32: + return Inst(ordered ? Opcode::FPOrdGreaterThan32 : Opcode::FPUnordGreaterThan32, + Flags{control}, lhs, rhs); + case Type::F64: + return Inst(ordered ? Opcode::FPOrdGreaterThan64 : Opcode::FPUnordGreaterThan64, + Flags{control}, lhs, rhs); + default: + ThrowInvalidType(lhs.Type()); + } +} + +U1 IREmitter::FPLessThanEqual(const F16F32F64& lhs, const F16F32F64& rhs, FpControl control, + bool ordered) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + switch (lhs.Type()) { + case Type::F16: + return Inst(ordered ? Opcode::FPOrdLessThanEqual16 : Opcode::FPUnordLessThanEqual16, + Flags{control}, lhs, rhs); + case Type::F32: + return Inst(ordered ? Opcode::FPOrdLessThanEqual32 : Opcode::FPUnordLessThanEqual32, + Flags{control}, lhs, rhs); + case Type::F64: + return Inst(ordered ? Opcode::FPOrdLessThanEqual64 : Opcode::FPUnordLessThanEqual64, + Flags{control}, lhs, rhs); + default: + ThrowInvalidType(lhs.Type()); + } +} + +U1 IREmitter::FPGreaterThanEqual(const F16F32F64& lhs, const F16F32F64& rhs, FpControl control, + bool ordered) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + switch (lhs.Type()) { + case Type::F16: + return Inst(ordered ? Opcode::FPOrdGreaterThanEqual16 + : Opcode::FPUnordGreaterThanEqual16, + Flags{control}, lhs, rhs); + case Type::F32: + return Inst(ordered ? Opcode::FPOrdGreaterThanEqual32 + : Opcode::FPUnordGreaterThanEqual32, + Flags{control}, lhs, rhs); + case Type::F64: + return Inst(ordered ? Opcode::FPOrdGreaterThanEqual64 + : Opcode::FPUnordGreaterThanEqual64, + Flags{control}, lhs, rhs); + default: + ThrowInvalidType(lhs.Type()); + } +} + +U1 IREmitter::FPIsNan(const F16F32F64& value) { + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::FPIsNan16, value); + case Type::F32: + return Inst(Opcode::FPIsNan32, value); + case Type::F64: + return Inst(Opcode::FPIsNan64, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U1 IREmitter::FPOrdered(const F16F32F64& lhs, const F16F32F64& rhs) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + return LogicalAnd(LogicalNot(FPIsNan(lhs)), LogicalNot(FPIsNan(rhs))); +} + +U1 IREmitter::FPUnordered(const F16F32F64& lhs, const F16F32F64& rhs) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + return LogicalOr(FPIsNan(lhs), FPIsNan(rhs)); +} + +F32F64 IREmitter::FPMax(const F32F64& lhs, const F32F64& rhs, FpControl control) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + switch (lhs.Type()) { + case Type::F32: + return Inst(Opcode::FPMax32, Flags{control}, lhs, rhs); + case Type::F64: + return Inst(Opcode::FPMax64, Flags{control}, lhs, rhs); + default: + ThrowInvalidType(lhs.Type()); + } +} + +F32F64 IREmitter::FPMin(const F32F64& lhs, const F32F64& rhs, FpControl control) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + switch (lhs.Type()) { + case Type::F32: + return Inst(Opcode::FPMin32, Flags{control}, lhs, rhs); + case Type::F64: + return Inst(Opcode::FPMin64, Flags{control}, lhs, rhs); + default: + ThrowInvalidType(lhs.Type()); + } +} + +U32U64 IREmitter::IAdd(const U32U64& a, const U32U64& b) { + if (a.Type() != b.Type()) { + throw InvalidArgument("Mismatching types {} and {}", a.Type(), b.Type()); + } + switch (a.Type()) { + case Type::U32: + return Inst(Opcode::IAdd32, a, b); + case Type::U64: + return Inst(Opcode::IAdd64, a, b); + default: + ThrowInvalidType(a.Type()); + } +} + +U32U64 IREmitter::ISub(const U32U64& a, const U32U64& b) { + if (a.Type() != b.Type()) { + throw InvalidArgument("Mismatching types {} and {}", a.Type(), b.Type()); + } + switch (a.Type()) { + case Type::U32: + return Inst(Opcode::ISub32, a, b); + case Type::U64: + return Inst(Opcode::ISub64, a, b); + default: + ThrowInvalidType(a.Type()); + } +} + +U32 IREmitter::IMul(const U32& a, const U32& b) { + return Inst(Opcode::IMul32, a, b); +} + +U32U64 IREmitter::INeg(const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::INeg32, value); + case Type::U64: + return Inst(Opcode::INeg64, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U32 IREmitter::IAbs(const U32& value) { + return Inst(Opcode::IAbs32, value); +} + +U32U64 IREmitter::ShiftLeftLogical(const U32U64& base, const U32& shift) { + switch (base.Type()) { + case Type::U32: + return Inst(Opcode::ShiftLeftLogical32, base, shift); + case Type::U64: + return Inst(Opcode::ShiftLeftLogical64, base, shift); + default: + ThrowInvalidType(base.Type()); + } +} + +U32U64 IREmitter::ShiftRightLogical(const U32U64& base, const U32& shift) { + switch (base.Type()) { + case Type::U32: + return Inst(Opcode::ShiftRightLogical32, base, shift); + case Type::U64: + return Inst(Opcode::ShiftRightLogical64, base, shift); + default: + ThrowInvalidType(base.Type()); + } +} + +U32U64 IREmitter::ShiftRightArithmetic(const U32U64& base, const U32& shift) { + switch (base.Type()) { + case Type::U32: + return Inst(Opcode::ShiftRightArithmetic32, base, shift); + case Type::U64: + return Inst(Opcode::ShiftRightArithmetic64, base, shift); + default: + ThrowInvalidType(base.Type()); + } +} + +U32 IREmitter::BitwiseAnd(const U32& a, const U32& b) { + return Inst(Opcode::BitwiseAnd32, a, b); +} + +U32 IREmitter::BitwiseOr(const U32& a, const U32& b) { + return Inst(Opcode::BitwiseOr32, a, b); +} + +U32 IREmitter::BitwiseXor(const U32& a, const U32& b) { + return Inst(Opcode::BitwiseXor32, a, b); +} + +U32 IREmitter::BitFieldInsert(const U32& base, const U32& insert, const U32& offset, + const U32& count) { + return Inst(Opcode::BitFieldInsert, base, insert, offset, count); +} + +U32 IREmitter::BitFieldExtract(const U32& base, const U32& offset, const U32& count, + bool is_signed) { + return Inst(is_signed ? Opcode::BitFieldSExtract : Opcode::BitFieldUExtract, base, offset, + count); +} + +U32 IREmitter::BitReverse(const U32& value) { + return Inst(Opcode::BitReverse32, value); +} + +U32 IREmitter::BitCount(const U32& value) { + return Inst(Opcode::BitCount32, value); +} + +U32 IREmitter::BitwiseNot(const U32& value) { + return Inst(Opcode::BitwiseNot32, value); +} + +U32 IREmitter::FindSMsb(const U32& value) { + return Inst(Opcode::FindSMsb32, value); +} + +U32 IREmitter::FindUMsb(const U32& value) { + return Inst(Opcode::FindUMsb32, value); +} + +U32 IREmitter::SMin(const U32& a, const U32& b) { + return Inst(Opcode::SMin32, a, b); +} + +U32 IREmitter::UMin(const U32& a, const U32& b) { + return Inst(Opcode::UMin32, a, b); +} + +U32 IREmitter::IMin(const U32& a, const U32& b, bool is_signed) { + return is_signed ? SMin(a, b) : UMin(a, b); +} + +U32 IREmitter::SMax(const U32& a, const U32& b) { + return Inst(Opcode::SMax32, a, b); +} + +U32 IREmitter::UMax(const U32& a, const U32& b) { + return Inst(Opcode::UMax32, a, b); +} + +U32 IREmitter::IMax(const U32& a, const U32& b, bool is_signed) { + return is_signed ? SMax(a, b) : UMax(a, b); +} + +U32 IREmitter::SClamp(const U32& value, const U32& min, const U32& max) { + return Inst(Opcode::SClamp32, value, min, max); +} + +U32 IREmitter::UClamp(const U32& value, const U32& min, const U32& max) { + return Inst(Opcode::UClamp32, value, min, max); +} + +U1 IREmitter::ILessThan(const U32& lhs, const U32& rhs, bool is_signed) { + return Inst(is_signed ? Opcode::SLessThan : Opcode::ULessThan, lhs, rhs); +} + +U1 IREmitter::IEqual(const U32U64& lhs, const U32U64& rhs) { + if (lhs.Type() != rhs.Type()) { + throw InvalidArgument("Mismatching types {} and {}", lhs.Type(), rhs.Type()); + } + switch (lhs.Type()) { + case Type::U32: + return Inst(Opcode::IEqual, lhs, rhs); + case Type::U64: { + // Manually compare the unpacked values + const Value lhs_vector{UnpackUint2x32(lhs)}; + const Value rhs_vector{UnpackUint2x32(rhs)}; + return LogicalAnd(IEqual(IR::U32{CompositeExtract(lhs_vector, 0)}, + IR::U32{CompositeExtract(rhs_vector, 0)}), + IEqual(IR::U32{CompositeExtract(lhs_vector, 1)}, + IR::U32{CompositeExtract(rhs_vector, 1)})); + } + default: + ThrowInvalidType(lhs.Type()); + } +} + +U1 IREmitter::ILessThanEqual(const U32& lhs, const U32& rhs, bool is_signed) { + return Inst(is_signed ? Opcode::SLessThanEqual : Opcode::ULessThanEqual, lhs, rhs); +} + +U1 IREmitter::IGreaterThan(const U32& lhs, const U32& rhs, bool is_signed) { + return Inst(is_signed ? Opcode::SGreaterThan : Opcode::UGreaterThan, lhs, rhs); +} + +U1 IREmitter::INotEqual(const U32& lhs, const U32& rhs) { + return Inst(Opcode::INotEqual, lhs, rhs); +} + +U1 IREmitter::IGreaterThanEqual(const U32& lhs, const U32& rhs, bool is_signed) { + return Inst(is_signed ? Opcode::SGreaterThanEqual : Opcode::UGreaterThanEqual, lhs, rhs); +} + +U32 IREmitter::SharedAtomicIAdd(const U32& pointer_offset, const U32& value) { + return Inst(Opcode::SharedAtomicIAdd32, pointer_offset, value); +} + +U32 IREmitter::SharedAtomicSMin(const U32& pointer_offset, const U32& value) { + return Inst(Opcode::SharedAtomicSMin32, pointer_offset, value); +} + +U32 IREmitter::SharedAtomicUMin(const U32& pointer_offset, const U32& value) { + return Inst(Opcode::SharedAtomicUMin32, pointer_offset, value); +} + +U32 IREmitter::SharedAtomicIMin(const U32& pointer_offset, const U32& value, bool is_signed) { + return is_signed ? SharedAtomicSMin(pointer_offset, value) + : SharedAtomicUMin(pointer_offset, value); +} + +U32 IREmitter::SharedAtomicSMax(const U32& pointer_offset, const U32& value) { + return Inst(Opcode::SharedAtomicSMax32, pointer_offset, value); +} + +U32 IREmitter::SharedAtomicUMax(const U32& pointer_offset, const U32& value) { + return Inst(Opcode::SharedAtomicUMax32, pointer_offset, value); +} + +U32 IREmitter::SharedAtomicIMax(const U32& pointer_offset, const U32& value, bool is_signed) { + return is_signed ? SharedAtomicSMax(pointer_offset, value) + : SharedAtomicUMax(pointer_offset, value); +} + +U32 IREmitter::SharedAtomicInc(const U32& pointer_offset, const U32& value) { + return Inst(Opcode::SharedAtomicInc32, pointer_offset, value); +} + +U32 IREmitter::SharedAtomicDec(const U32& pointer_offset, const U32& value) { + return Inst(Opcode::SharedAtomicDec32, pointer_offset, value); +} + +U32 IREmitter::SharedAtomicAnd(const U32& pointer_offset, const U32& value) { + return Inst(Opcode::SharedAtomicAnd32, pointer_offset, value); +} + +U32 IREmitter::SharedAtomicOr(const U32& pointer_offset, const U32& value) { + return Inst(Opcode::SharedAtomicOr32, pointer_offset, value); +} + +U32 IREmitter::SharedAtomicXor(const U32& pointer_offset, const U32& value) { + return Inst(Opcode::SharedAtomicXor32, pointer_offset, value); +} + +U32U64 IREmitter::SharedAtomicExchange(const U32& pointer_offset, const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::SharedAtomicExchange32, pointer_offset, value); + case Type::U64: + return Inst(Opcode::SharedAtomicExchange64, pointer_offset, value); + default: + ThrowInvalidType(pointer_offset.Type()); + } +} + +U32U64 IREmitter::GlobalAtomicIAdd(const U64& pointer_offset, const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::GlobalAtomicIAdd32, pointer_offset, value); + case Type::U64: + return Inst(Opcode::GlobalAtomicIAdd64, pointer_offset, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U32U64 IREmitter::GlobalAtomicSMin(const U64& pointer_offset, const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::GlobalAtomicSMin32, pointer_offset, value); + case Type::U64: + return Inst(Opcode::GlobalAtomicSMin64, pointer_offset, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U32U64 IREmitter::GlobalAtomicUMin(const U64& pointer_offset, const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::GlobalAtomicUMin32, pointer_offset, value); + case Type::U64: + return Inst(Opcode::GlobalAtomicUMin64, pointer_offset, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U32U64 IREmitter::GlobalAtomicIMin(const U64& pointer_offset, const U32U64& value, bool is_signed) { + return is_signed ? GlobalAtomicSMin(pointer_offset, value) + : GlobalAtomicUMin(pointer_offset, value); +} + +U32U64 IREmitter::GlobalAtomicSMax(const U64& pointer_offset, const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::GlobalAtomicSMax32, pointer_offset, value); + case Type::U64: + return Inst(Opcode::GlobalAtomicSMax64, pointer_offset, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U32U64 IREmitter::GlobalAtomicUMax(const U64& pointer_offset, const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::GlobalAtomicUMax32, pointer_offset, value); + case Type::U64: + return Inst(Opcode::GlobalAtomicUMax64, pointer_offset, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U32U64 IREmitter::GlobalAtomicIMax(const U64& pointer_offset, const U32U64& value, bool is_signed) { + return is_signed ? GlobalAtomicSMax(pointer_offset, value) + : GlobalAtomicUMax(pointer_offset, value); +} + +U32 IREmitter::GlobalAtomicInc(const U64& pointer_offset, const U32& value) { + return Inst(Opcode::GlobalAtomicInc32, pointer_offset, value); +} + +U32 IREmitter::GlobalAtomicDec(const U64& pointer_offset, const U32& value) { + return Inst(Opcode::GlobalAtomicDec32, pointer_offset, value); +} + +U32U64 IREmitter::GlobalAtomicAnd(const U64& pointer_offset, const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::GlobalAtomicAnd32, pointer_offset, value); + case Type::U64: + return Inst(Opcode::GlobalAtomicAnd64, pointer_offset, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U32U64 IREmitter::GlobalAtomicOr(const U64& pointer_offset, const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::GlobalAtomicOr32, pointer_offset, value); + case Type::U64: + return Inst(Opcode::GlobalAtomicOr64, pointer_offset, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U32U64 IREmitter::GlobalAtomicXor(const U64& pointer_offset, const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::GlobalAtomicXor32, pointer_offset, value); + case Type::U64: + return Inst(Opcode::GlobalAtomicXor64, pointer_offset, value); + default: + ThrowInvalidType(value.Type()); + } +} + +U32U64 IREmitter::GlobalAtomicExchange(const U64& pointer_offset, const U32U64& value) { + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::GlobalAtomicExchange32, pointer_offset, value); + case Type::U64: + return Inst(Opcode::GlobalAtomicExchange64, pointer_offset, value); + default: + ThrowInvalidType(pointer_offset.Type()); + } +} + +F32 IREmitter::GlobalAtomicF32Add(const U64& pointer_offset, const Value& value, + const FpControl control) { + return Inst(Opcode::GlobalAtomicAddF32, Flags{control}, pointer_offset, value); +} + +Value IREmitter::GlobalAtomicF16x2Add(const U64& pointer_offset, const Value& value, + const FpControl control) { + return Inst(Opcode::GlobalAtomicAddF16x2, Flags{control}, pointer_offset, value); +} + +Value IREmitter::GlobalAtomicF16x2Min(const U64& pointer_offset, const Value& value, + const FpControl control) { + return Inst(Opcode::GlobalAtomicMinF16x2, Flags{control}, pointer_offset, value); +} + +Value IREmitter::GlobalAtomicF16x2Max(const U64& pointer_offset, const Value& value, + const FpControl control) { + return Inst(Opcode::GlobalAtomicMaxF16x2, Flags{control}, pointer_offset, value); +} + +U1 IREmitter::LogicalOr(const U1& a, const U1& b) { + return Inst(Opcode::LogicalOr, a, b); +} + +U1 IREmitter::LogicalAnd(const U1& a, const U1& b) { + return Inst(Opcode::LogicalAnd, a, b); +} + +U1 IREmitter::LogicalXor(const U1& a, const U1& b) { + return Inst(Opcode::LogicalXor, a, b); +} + +U1 IREmitter::LogicalNot(const U1& value) { + return Inst(Opcode::LogicalNot, value); +} + +U32U64 IREmitter::ConvertFToS(size_t bitsize, const F16F32F64& value) { + switch (bitsize) { + case 16: + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::ConvertS16F16, value); + case Type::F32: + return Inst(Opcode::ConvertS16F32, value); + case Type::F64: + return Inst(Opcode::ConvertS16F64, value); + default: + ThrowInvalidType(value.Type()); + } + case 32: + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::ConvertS32F16, value); + case Type::F32: + return Inst(Opcode::ConvertS32F32, value); + case Type::F64: + return Inst(Opcode::ConvertS32F64, value); + default: + ThrowInvalidType(value.Type()); + } + case 64: + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::ConvertS64F16, value); + case Type::F32: + return Inst(Opcode::ConvertS64F32, value); + case Type::F64: + return Inst(Opcode::ConvertS64F64, value); + default: + ThrowInvalidType(value.Type()); + } + default: + throw InvalidArgument("Invalid destination bitsize {}", bitsize); + } +} + +U32U64 IREmitter::ConvertFToU(size_t bitsize, const F16F32F64& value) { + switch (bitsize) { + case 16: + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::ConvertU16F16, value); + case Type::F32: + return Inst(Opcode::ConvertU16F32, value); + case Type::F64: + return Inst(Opcode::ConvertU16F64, value); + default: + ThrowInvalidType(value.Type()); + } + case 32: + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::ConvertU32F16, value); + case Type::F32: + return Inst(Opcode::ConvertU32F32, value); + case Type::F64: + return Inst(Opcode::ConvertU32F64, value); + default: + ThrowInvalidType(value.Type()); + } + case 64: + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::ConvertU64F16, value); + case Type::F32: + return Inst(Opcode::ConvertU64F32, value); + case Type::F64: + return Inst(Opcode::ConvertU64F64, value); + default: + ThrowInvalidType(value.Type()); + } + default: + throw InvalidArgument("Invalid destination bitsize {}", bitsize); + } +} + +U32U64 IREmitter::ConvertFToI(size_t bitsize, bool is_signed, const F16F32F64& value) { + return is_signed ? ConvertFToS(bitsize, value) : ConvertFToU(bitsize, value); +} + +F16F32F64 IREmitter::ConvertSToF(size_t dest_bitsize, size_t src_bitsize, const Value& value, + FpControl control) { + switch (dest_bitsize) { + case 16: + switch (src_bitsize) { + case 8: + return Inst(Opcode::ConvertF16S8, Flags{control}, value); + case 16: + return Inst(Opcode::ConvertF16S16, Flags{control}, value); + case 32: + return Inst(Opcode::ConvertF16S32, Flags{control}, value); + case 64: + return Inst(Opcode::ConvertF16S64, Flags{control}, value); + } + break; + case 32: + switch (src_bitsize) { + case 8: + return Inst(Opcode::ConvertF32S8, Flags{control}, value); + case 16: + return Inst(Opcode::ConvertF32S16, Flags{control}, value); + case 32: + return Inst(Opcode::ConvertF32S32, Flags{control}, value); + case 64: + return Inst(Opcode::ConvertF32S64, Flags{control}, value); + } + break; + case 64: + switch (src_bitsize) { + case 8: + return Inst(Opcode::ConvertF64S8, Flags{control}, value); + case 16: + return Inst(Opcode::ConvertF64S16, Flags{control}, value); + case 32: + return Inst(Opcode::ConvertF64S32, Flags{control}, value); + case 64: + return Inst(Opcode::ConvertF64S64, Flags{control}, value); + } + break; + } + throw InvalidArgument("Invalid bit size combination dst={} src={}", dest_bitsize, src_bitsize); +} + +F16F32F64 IREmitter::ConvertUToF(size_t dest_bitsize, size_t src_bitsize, const Value& value, + FpControl control) { + switch (dest_bitsize) { + case 16: + switch (src_bitsize) { + case 8: + return Inst(Opcode::ConvertF16U8, Flags{control}, value); + case 16: + return Inst(Opcode::ConvertF16U16, Flags{control}, value); + case 32: + return Inst(Opcode::ConvertF16U32, Flags{control}, value); + case 64: + return Inst(Opcode::ConvertF16U64, Flags{control}, value); + } + break; + case 32: + switch (src_bitsize) { + case 8: + return Inst(Opcode::ConvertF32U8, Flags{control}, value); + case 16: + return Inst(Opcode::ConvertF32U16, Flags{control}, value); + case 32: + return Inst(Opcode::ConvertF32U32, Flags{control}, value); + case 64: + return Inst(Opcode::ConvertF32U64, Flags{control}, value); + } + break; + case 64: + switch (src_bitsize) { + case 8: + return Inst(Opcode::ConvertF64U8, Flags{control}, value); + case 16: + return Inst(Opcode::ConvertF64U16, Flags{control}, value); + case 32: + return Inst(Opcode::ConvertF64U32, Flags{control}, value); + case 64: + return Inst(Opcode::ConvertF64U64, Flags{control}, value); + } + break; + } + throw InvalidArgument("Invalid bit size combination dst={} src={}", dest_bitsize, src_bitsize); +} + +F16F32F64 IREmitter::ConvertIToF(size_t dest_bitsize, size_t src_bitsize, bool is_signed, + const Value& value, FpControl control) { + return is_signed ? ConvertSToF(dest_bitsize, src_bitsize, value, control) + : ConvertUToF(dest_bitsize, src_bitsize, value, control); +} + +U32U64 IREmitter::UConvert(size_t result_bitsize, const U32U64& value) { + switch (result_bitsize) { + case 32: + switch (value.Type()) { + case Type::U32: + // Nothing to do + return value; + case Type::U64: + return Inst(Opcode::ConvertU32U64, value); + default: + break; + } + break; + case 64: + switch (value.Type()) { + case Type::U32: + return Inst(Opcode::ConvertU64U32, value); + case Type::U64: + // Nothing to do + return value; + default: + break; + } + } + throw NotImplementedException("Conversion from {} to {} bits", value.Type(), result_bitsize); +} + +F16F32F64 IREmitter::FPConvert(size_t result_bitsize, const F16F32F64& value, FpControl control) { + switch (result_bitsize) { + case 16: + switch (value.Type()) { + case Type::F16: + // Nothing to do + return value; + case Type::F32: + return Inst(Opcode::ConvertF16F32, Flags{control}, value); + case Type::F64: + throw LogicError("Illegal conversion from F64 to F16"); + default: + break; + } + break; + case 32: + switch (value.Type()) { + case Type::F16: + return Inst(Opcode::ConvertF32F16, Flags{control}, value); + case Type::F32: + // Nothing to do + return value; + case Type::F64: + return Inst(Opcode::ConvertF32F64, Flags{control}, value); + default: + break; + } + break; + case 64: + switch (value.Type()) { + case Type::F16: + throw LogicError("Illegal conversion from F16 to F64"); + case Type::F32: + return Inst(Opcode::ConvertF64F32, Flags{control}, value); + case Type::F64: + // Nothing to do + return value; + default: + break; + } + break; + } + throw NotImplementedException("Conversion from {} to {} bits", value.Type(), result_bitsize); +} + +Value IREmitter::ImageSampleImplicitLod(const Value& handle, const Value& coords, const F32& bias, + const Value& offset, const F32& lod_clamp, + TextureInstInfo info) { + const Value bias_lc{MakeLodClampPair(*this, bias, lod_clamp)}; + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageSampleImplicitLod + : Opcode::BindlessImageSampleImplicitLod}; + return Inst(op, Flags{info}, handle, coords, bias_lc, offset); +} + +Value IREmitter::ImageSampleExplicitLod(const Value& handle, const Value& coords, const F32& lod, + const Value& offset, TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageSampleExplicitLod + : Opcode::BindlessImageSampleExplicitLod}; + return Inst(op, Flags{info}, handle, coords, lod, offset); +} + +F32 IREmitter::ImageSampleDrefImplicitLod(const Value& handle, const Value& coords, const F32& dref, + const F32& bias, const Value& offset, + const F32& lod_clamp, TextureInstInfo info) { + const Value bias_lc{MakeLodClampPair(*this, bias, lod_clamp)}; + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageSampleDrefImplicitLod + : Opcode::BindlessImageSampleDrefImplicitLod}; + return Inst(op, Flags{info}, handle, coords, dref, bias_lc, offset); +} + +F32 IREmitter::ImageSampleDrefExplicitLod(const Value& handle, const Value& coords, const F32& dref, + const F32& lod, const Value& offset, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageSampleDrefExplicitLod + : Opcode::BindlessImageSampleDrefExplicitLod}; + return Inst(op, Flags{info}, handle, coords, dref, lod, offset); +} + +Value IREmitter::ImageGather(const Value& handle, const Value& coords, const Value& offset, + const Value& offset2, TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageGather : Opcode::BindlessImageGather}; + return Inst(op, Flags{info}, handle, coords, offset, offset2); +} + +Value IREmitter::ImageGatherDref(const Value& handle, const Value& coords, const Value& offset, + const Value& offset2, const F32& dref, TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageGatherDref + : Opcode::BindlessImageGatherDref}; + return Inst(op, Flags{info}, handle, coords, offset, offset2, dref); +} + +Value IREmitter::ImageFetch(const Value& handle, const Value& coords, const Value& offset, + const U32& lod, const U32& multisampling, TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageFetch : Opcode::BindlessImageFetch}; + return Inst(op, Flags{info}, handle, coords, offset, lod, multisampling); +} + +Value IREmitter::ImageQueryDimension(const Value& handle, const IR::U32& lod) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageQueryDimensions + : Opcode::BindlessImageQueryDimensions}; + return Inst(op, handle, lod); +} + +Value IREmitter::ImageQueryLod(const Value& handle, const Value& coords, TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageQueryLod + : Opcode::BindlessImageQueryLod}; + return Inst(op, Flags{info}, handle, coords); +} + +Value IREmitter::ImageGradient(const Value& handle, const Value& coords, const Value& derivates, + const Value& offset, const F32& lod_clamp, TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageGradient + : Opcode::BindlessImageGradient}; + return Inst(op, Flags{info}, handle, coords, derivates, offset, lod_clamp); +} + +Value IREmitter::ImageRead(const Value& handle, const Value& coords, TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageRead : Opcode::BindlessImageRead}; + return Inst(op, Flags{info}, handle, coords); +} + +void IREmitter::ImageWrite(const Value& handle, const Value& coords, const Value& color, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageWrite : Opcode::BindlessImageWrite}; + Inst(op, Flags{info}, handle, coords, color); +} + +Value IREmitter::ImageAtomicIAdd(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicIAdd32 + : Opcode::BindlessImageAtomicIAdd32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +Value IREmitter::ImageAtomicSMin(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicSMin32 + : Opcode::BindlessImageAtomicSMin32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +Value IREmitter::ImageAtomicUMin(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicUMin32 + : Opcode::BindlessImageAtomicUMin32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +Value IREmitter::ImageAtomicIMin(const Value& handle, const Value& coords, const Value& value, + bool is_signed, TextureInstInfo info) { + return is_signed ? ImageAtomicSMin(handle, coords, value, info) + : ImageAtomicUMin(handle, coords, value, info); +} + +Value IREmitter::ImageAtomicSMax(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicSMax32 + : Opcode::BindlessImageAtomicSMax32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +Value IREmitter::ImageAtomicUMax(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicUMax32 + : Opcode::BindlessImageAtomicUMax32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +Value IREmitter::ImageAtomicIMax(const Value& handle, const Value& coords, const Value& value, + bool is_signed, TextureInstInfo info) { + return is_signed ? ImageAtomicSMax(handle, coords, value, info) + : ImageAtomicUMax(handle, coords, value, info); +} + +Value IREmitter::ImageAtomicInc(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicInc32 + : Opcode::BindlessImageAtomicInc32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +Value IREmitter::ImageAtomicDec(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicDec32 + : Opcode::BindlessImageAtomicDec32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +Value IREmitter::ImageAtomicAnd(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicAnd32 + : Opcode::BindlessImageAtomicAnd32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +Value IREmitter::ImageAtomicOr(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicOr32 + : Opcode::BindlessImageAtomicOr32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +Value IREmitter::ImageAtomicXor(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicXor32 + : Opcode::BindlessImageAtomicXor32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +Value IREmitter::ImageAtomicExchange(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info) { + const Opcode op{handle.IsImmediate() ? Opcode::BoundImageAtomicExchange32 + : Opcode::BindlessImageAtomicExchange32}; + return Inst(op, Flags{info}, handle, coords, value); +} + +U1 IREmitter::VoteAll(const U1& value) { + return Inst(Opcode::VoteAll, value); +} + +U1 IREmitter::VoteAny(const U1& value) { + return Inst(Opcode::VoteAny, value); +} + +U1 IREmitter::VoteEqual(const U1& value) { + return Inst(Opcode::VoteEqual, value); +} + +U32 IREmitter::SubgroupBallot(const U1& value) { + return Inst(Opcode::SubgroupBallot, value); +} + +U32 IREmitter::SubgroupEqMask() { + return Inst(Opcode::SubgroupEqMask); +} + +U32 IREmitter::SubgroupLtMask() { + return Inst(Opcode::SubgroupLtMask); +} + +U32 IREmitter::SubgroupLeMask() { + return Inst(Opcode::SubgroupLeMask); +} + +U32 IREmitter::SubgroupGtMask() { + return Inst(Opcode::SubgroupGtMask); +} + +U32 IREmitter::SubgroupGeMask() { + return Inst(Opcode::SubgroupGeMask); +} + +U32 IREmitter::ShuffleIndex(const IR::U32& value, const IR::U32& index, const IR::U32& clamp, + const IR::U32& seg_mask) { + return Inst(Opcode::ShuffleIndex, value, index, clamp, seg_mask); +} + +U32 IREmitter::ShuffleUp(const IR::U32& value, const IR::U32& index, const IR::U32& clamp, + const IR::U32& seg_mask) { + return Inst(Opcode::ShuffleUp, value, index, clamp, seg_mask); +} + +U32 IREmitter::ShuffleDown(const IR::U32& value, const IR::U32& index, const IR::U32& clamp, + const IR::U32& seg_mask) { + return Inst(Opcode::ShuffleDown, value, index, clamp, seg_mask); +} + +U32 IREmitter::ShuffleButterfly(const IR::U32& value, const IR::U32& index, const IR::U32& clamp, + const IR::U32& seg_mask) { + return Inst(Opcode::ShuffleButterfly, value, index, clamp, seg_mask); +} + +F32 IREmitter::FSwizzleAdd(const F32& a, const F32& b, const U32& swizzle, FpControl control) { + return Inst(Opcode::FSwizzleAdd, Flags{control}, a, b, swizzle); +} + +F32 IREmitter::DPdxFine(const F32& a) { + return Inst(Opcode::DPdxFine, a); +} + +F32 IREmitter::DPdyFine(const F32& a) { + return Inst(Opcode::DPdyFine, a); +} + +F32 IREmitter::DPdxCoarse(const F32& a) { + return Inst(Opcode::DPdxCoarse, a); +} + +F32 IREmitter::DPdyCoarse(const F32& a) { + return Inst(Opcode::DPdyCoarse, a); +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/ir_emitter.h b/src/shader_recompiler/frontend/ir/ir_emitter.h new file mode 100755 index 000000000..53f7b3b06 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/ir_emitter.h @@ -0,0 +1,413 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include "shader_recompiler/frontend/ir/attribute.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::IR { + +class IREmitter { +public: + explicit IREmitter(Block& block_) : block{&block_}, insertion_point{block->end()} {} + explicit IREmitter(Block& block_, Block::iterator insertion_point_) + : block{&block_}, insertion_point{insertion_point_} {} + + Block* block; + + [[nodiscard]] U1 Imm1(bool value) const; + [[nodiscard]] U8 Imm8(u8 value) const; + [[nodiscard]] U16 Imm16(u16 value) const; + [[nodiscard]] U32 Imm32(u32 value) const; + [[nodiscard]] U32 Imm32(s32 value) const; + [[nodiscard]] F32 Imm32(f32 value) const; + [[nodiscard]] U64 Imm64(u64 value) const; + [[nodiscard]] U64 Imm64(s64 value) const; + [[nodiscard]] F64 Imm64(f64 value) const; + + U1 ConditionRef(const U1& value); + void Reference(const Value& value); + + void PhiMove(IR::Inst& phi, const Value& value); + + void Prologue(); + void Epilogue(); + void DemoteToHelperInvocation(); + void EmitVertex(const U32& stream); + void EndPrimitive(const U32& stream); + + [[nodiscard]] U32 GetReg(IR::Reg reg); + void SetReg(IR::Reg reg, const U32& value); + + [[nodiscard]] U1 GetPred(IR::Pred pred, bool is_negated = false); + void SetPred(IR::Pred pred, const U1& value); + + [[nodiscard]] U1 GetGotoVariable(u32 id); + void SetGotoVariable(u32 id, const U1& value); + + [[nodiscard]] U32 GetIndirectBranchVariable(); + void SetIndirectBranchVariable(const U32& value); + + [[nodiscard]] U32 GetCbuf(const U32& binding, const U32& byte_offset); + [[nodiscard]] Value GetCbuf(const U32& binding, const U32& byte_offset, size_t bitsize, + bool is_signed); + [[nodiscard]] F32 GetFloatCbuf(const U32& binding, const U32& byte_offset); + + [[nodiscard]] U1 GetZFlag(); + [[nodiscard]] U1 GetSFlag(); + [[nodiscard]] U1 GetCFlag(); + [[nodiscard]] U1 GetOFlag(); + + void SetZFlag(const U1& value); + void SetSFlag(const U1& value); + void SetCFlag(const U1& value); + void SetOFlag(const U1& value); + + [[nodiscard]] U1 Condition(IR::Condition cond); + [[nodiscard]] U1 GetFlowTestResult(FlowTest test); + + [[nodiscard]] F32 GetAttribute(IR::Attribute attribute); + [[nodiscard]] F32 GetAttribute(IR::Attribute attribute, const U32& vertex); + void SetAttribute(IR::Attribute attribute, const F32& value, const U32& vertex); + + [[nodiscard]] F32 GetAttributeIndexed(const U32& phys_address); + [[nodiscard]] F32 GetAttributeIndexed(const U32& phys_address, const U32& vertex); + void SetAttributeIndexed(const U32& phys_address, const F32& value, const U32& vertex); + + [[nodiscard]] F32 GetPatch(Patch patch); + void SetPatch(Patch patch, const F32& value); + + void SetFragColor(u32 index, u32 component, const F32& value); + void SetSampleMask(const U32& value); + void SetFragDepth(const F32& value); + + [[nodiscard]] U32 WorkgroupIdX(); + [[nodiscard]] U32 WorkgroupIdY(); + [[nodiscard]] U32 WorkgroupIdZ(); + + [[nodiscard]] Value LocalInvocationId(); + [[nodiscard]] U32 LocalInvocationIdX(); + [[nodiscard]] U32 LocalInvocationIdY(); + [[nodiscard]] U32 LocalInvocationIdZ(); + + [[nodiscard]] U32 InvocationId(); + [[nodiscard]] U32 SampleId(); + [[nodiscard]] U1 IsHelperInvocation(); + [[nodiscard]] F32 YDirection(); + + [[nodiscard]] U32 LaneId(); + + [[nodiscard]] U32 LoadGlobalU8(const U64& address); + [[nodiscard]] U32 LoadGlobalS8(const U64& address); + [[nodiscard]] U32 LoadGlobalU16(const U64& address); + [[nodiscard]] U32 LoadGlobalS16(const U64& address); + [[nodiscard]] U32 LoadGlobal32(const U64& address); + [[nodiscard]] Value LoadGlobal64(const U64& address); + [[nodiscard]] Value LoadGlobal128(const U64& address); + + void WriteGlobalU8(const U64& address, const U32& value); + void WriteGlobalS8(const U64& address, const U32& value); + void WriteGlobalU16(const U64& address, const U32& value); + void WriteGlobalS16(const U64& address, const U32& value); + void WriteGlobal32(const U64& address, const U32& value); + void WriteGlobal64(const U64& address, const IR::Value& vector); + void WriteGlobal128(const U64& address, const IR::Value& vector); + + [[nodiscard]] U32 LoadLocal(const U32& word_offset); + void WriteLocal(const U32& word_offset, const U32& value); + + [[nodiscard]] Value LoadShared(int bit_size, bool is_signed, const U32& offset); + void WriteShared(int bit_size, const U32& offset, const Value& value); + + [[nodiscard]] U1 GetZeroFromOp(const Value& op); + [[nodiscard]] U1 GetSignFromOp(const Value& op); + [[nodiscard]] U1 GetCarryFromOp(const Value& op); + [[nodiscard]] U1 GetOverflowFromOp(const Value& op); + [[nodiscard]] U1 GetSparseFromOp(const Value& op); + [[nodiscard]] U1 GetInBoundsFromOp(const Value& op); + + [[nodiscard]] Value CompositeConstruct(const Value& e1, const Value& e2); + [[nodiscard]] Value CompositeConstruct(const Value& e1, const Value& e2, const Value& e3); + [[nodiscard]] Value CompositeConstruct(const Value& e1, const Value& e2, const Value& e3, + const Value& e4); + [[nodiscard]] Value CompositeExtract(const Value& vector, size_t element); + [[nodiscard]] Value CompositeInsert(const Value& vector, const Value& object, size_t element); + + [[nodiscard]] Value Select(const U1& condition, const Value& true_value, + const Value& false_value); + + void Barrier(); + void WorkgroupMemoryBarrier(); + void DeviceMemoryBarrier(); + + template + [[nodiscard]] Dest BitCast(const Source& value); + + [[nodiscard]] U64 PackUint2x32(const Value& vector); + [[nodiscard]] Value UnpackUint2x32(const U64& value); + + [[nodiscard]] U32 PackFloat2x16(const Value& vector); + [[nodiscard]] Value UnpackFloat2x16(const U32& value); + + [[nodiscard]] U32 PackHalf2x16(const Value& vector); + [[nodiscard]] Value UnpackHalf2x16(const U32& value); + + [[nodiscard]] F64 PackDouble2x32(const Value& vector); + [[nodiscard]] Value UnpackDouble2x32(const F64& value); + + [[nodiscard]] F16F32F64 FPAdd(const F16F32F64& a, const F16F32F64& b, FpControl control = {}); + [[nodiscard]] F16F32F64 FPMul(const F16F32F64& a, const F16F32F64& b, FpControl control = {}); + [[nodiscard]] F16F32F64 FPFma(const F16F32F64& a, const F16F32F64& b, const F16F32F64& c, + FpControl control = {}); + + [[nodiscard]] F16F32F64 FPAbs(const F16F32F64& value); + [[nodiscard]] F16F32F64 FPNeg(const F16F32F64& value); + [[nodiscard]] F16F32F64 FPAbsNeg(const F16F32F64& value, bool abs, bool neg); + + [[nodiscard]] F32 FPCos(const F32& value); + [[nodiscard]] F32 FPSin(const F32& value); + [[nodiscard]] F32 FPExp2(const F32& value); + [[nodiscard]] F32 FPLog2(const F32& value); + [[nodiscard]] F32F64 FPRecip(const F32F64& value); + [[nodiscard]] F32F64 FPRecipSqrt(const F32F64& value); + [[nodiscard]] F32 FPSqrt(const F32& value); + [[nodiscard]] F16F32F64 FPSaturate(const F16F32F64& value); + [[nodiscard]] F16F32F64 FPClamp(const F16F32F64& value, const F16F32F64& min_value, + const F16F32F64& max_value); + [[nodiscard]] F16F32F64 FPRoundEven(const F16F32F64& value, FpControl control = {}); + [[nodiscard]] F16F32F64 FPFloor(const F16F32F64& value, FpControl control = {}); + [[nodiscard]] F16F32F64 FPCeil(const F16F32F64& value, FpControl control = {}); + [[nodiscard]] F16F32F64 FPTrunc(const F16F32F64& value, FpControl control = {}); + + [[nodiscard]] U1 FPEqual(const F16F32F64& lhs, const F16F32F64& rhs, FpControl control = {}, + bool ordered = true); + [[nodiscard]] U1 FPNotEqual(const F16F32F64& lhs, const F16F32F64& rhs, FpControl control = {}, + bool ordered = true); + [[nodiscard]] U1 FPLessThan(const F16F32F64& lhs, const F16F32F64& rhs, FpControl control = {}, + bool ordered = true); + [[nodiscard]] U1 FPGreaterThan(const F16F32F64& lhs, const F16F32F64& rhs, + FpControl control = {}, bool ordered = true); + [[nodiscard]] U1 FPLessThanEqual(const F16F32F64& lhs, const F16F32F64& rhs, + FpControl control = {}, bool ordered = true); + [[nodiscard]] U1 FPGreaterThanEqual(const F16F32F64& lhs, const F16F32F64& rhs, + FpControl control = {}, bool ordered = true); + [[nodiscard]] U1 FPIsNan(const F16F32F64& value); + [[nodiscard]] U1 FPOrdered(const F16F32F64& lhs, const F16F32F64& rhs); + [[nodiscard]] U1 FPUnordered(const F16F32F64& lhs, const F16F32F64& rhs); + [[nodiscard]] F32F64 FPMax(const F32F64& lhs, const F32F64& rhs, FpControl control = {}); + [[nodiscard]] F32F64 FPMin(const F32F64& lhs, const F32F64& rhs, FpControl control = {}); + + [[nodiscard]] U32U64 IAdd(const U32U64& a, const U32U64& b); + [[nodiscard]] U32U64 ISub(const U32U64& a, const U32U64& b); + [[nodiscard]] U32 IMul(const U32& a, const U32& b); + [[nodiscard]] U32U64 INeg(const U32U64& value); + [[nodiscard]] U32 IAbs(const U32& value); + [[nodiscard]] U32U64 ShiftLeftLogical(const U32U64& base, const U32& shift); + [[nodiscard]] U32U64 ShiftRightLogical(const U32U64& base, const U32& shift); + [[nodiscard]] U32U64 ShiftRightArithmetic(const U32U64& base, const U32& shift); + [[nodiscard]] U32 BitwiseAnd(const U32& a, const U32& b); + [[nodiscard]] U32 BitwiseOr(const U32& a, const U32& b); + [[nodiscard]] U32 BitwiseXor(const U32& a, const U32& b); + [[nodiscard]] U32 BitFieldInsert(const U32& base, const U32& insert, const U32& offset, + const U32& count); + [[nodiscard]] U32 BitFieldExtract(const U32& base, const U32& offset, const U32& count, + bool is_signed = false); + [[nodiscard]] U32 BitReverse(const U32& value); + [[nodiscard]] U32 BitCount(const U32& value); + [[nodiscard]] U32 BitwiseNot(const U32& value); + + [[nodiscard]] U32 FindSMsb(const U32& value); + [[nodiscard]] U32 FindUMsb(const U32& value); + [[nodiscard]] U32 SMin(const U32& a, const U32& b); + [[nodiscard]] U32 UMin(const U32& a, const U32& b); + [[nodiscard]] U32 IMin(const U32& a, const U32& b, bool is_signed); + [[nodiscard]] U32 SMax(const U32& a, const U32& b); + [[nodiscard]] U32 UMax(const U32& a, const U32& b); + [[nodiscard]] U32 IMax(const U32& a, const U32& b, bool is_signed); + [[nodiscard]] U32 SClamp(const U32& value, const U32& min, const U32& max); + [[nodiscard]] U32 UClamp(const U32& value, const U32& min, const U32& max); + + [[nodiscard]] U1 ILessThan(const U32& lhs, const U32& rhs, bool is_signed); + [[nodiscard]] U1 IEqual(const U32U64& lhs, const U32U64& rhs); + [[nodiscard]] U1 ILessThanEqual(const U32& lhs, const U32& rhs, bool is_signed); + [[nodiscard]] U1 IGreaterThan(const U32& lhs, const U32& rhs, bool is_signed); + [[nodiscard]] U1 INotEqual(const U32& lhs, const U32& rhs); + [[nodiscard]] U1 IGreaterThanEqual(const U32& lhs, const U32& rhs, bool is_signed); + + [[nodiscard]] U32 SharedAtomicIAdd(const U32& pointer_offset, const U32& value); + [[nodiscard]] U32 SharedAtomicSMin(const U32& pointer_offset, const U32& value); + [[nodiscard]] U32 SharedAtomicUMin(const U32& pointer_offset, const U32& value); + [[nodiscard]] U32 SharedAtomicIMin(const U32& pointer_offset, const U32& value, bool is_signed); + [[nodiscard]] U32 SharedAtomicSMax(const U32& pointer_offset, const U32& value); + [[nodiscard]] U32 SharedAtomicUMax(const U32& pointer_offset, const U32& value); + [[nodiscard]] U32 SharedAtomicIMax(const U32& pointer_offset, const U32& value, bool is_signed); + [[nodiscard]] U32 SharedAtomicInc(const U32& pointer_offset, const U32& value); + [[nodiscard]] U32 SharedAtomicDec(const U32& pointer_offset, const U32& value); + [[nodiscard]] U32 SharedAtomicAnd(const U32& pointer_offset, const U32& value); + [[nodiscard]] U32 SharedAtomicOr(const U32& pointer_offset, const U32& value); + [[nodiscard]] U32 SharedAtomicXor(const U32& pointer_offset, const U32& value); + [[nodiscard]] U32U64 SharedAtomicExchange(const U32& pointer_offset, const U32U64& value); + + [[nodiscard]] U32U64 GlobalAtomicIAdd(const U64& pointer_offset, const U32U64& value); + [[nodiscard]] U32U64 GlobalAtomicSMin(const U64& pointer_offset, const U32U64& value); + [[nodiscard]] U32U64 GlobalAtomicUMin(const U64& pointer_offset, const U32U64& value); + [[nodiscard]] U32U64 GlobalAtomicIMin(const U64& pointer_offset, const U32U64& value, + bool is_signed); + [[nodiscard]] U32U64 GlobalAtomicSMax(const U64& pointer_offset, const U32U64& value); + [[nodiscard]] U32U64 GlobalAtomicUMax(const U64& pointer_offset, const U32U64& value); + [[nodiscard]] U32U64 GlobalAtomicIMax(const U64& pointer_offset, const U32U64& value, + bool is_signed); + [[nodiscard]] U32 GlobalAtomicInc(const U64& pointer_offset, const U32& value); + [[nodiscard]] U32 GlobalAtomicDec(const U64& pointer_offset, const U32& value); + [[nodiscard]] U32U64 GlobalAtomicAnd(const U64& pointer_offset, const U32U64& value); + [[nodiscard]] U32U64 GlobalAtomicOr(const U64& pointer_offset, const U32U64& value); + [[nodiscard]] U32U64 GlobalAtomicXor(const U64& pointer_offset, const U32U64& value); + [[nodiscard]] U32U64 GlobalAtomicExchange(const U64& pointer_offset, const U32U64& value); + + [[nodiscard]] F32 GlobalAtomicF32Add(const U64& pointer_offset, const Value& value, + const FpControl control = {}); + [[nodiscard]] Value GlobalAtomicF16x2Add(const U64& pointer_offset, const Value& value, + const FpControl control = {}); + [[nodiscard]] Value GlobalAtomicF16x2Min(const U64& pointer_offset, const Value& value, + const FpControl control = {}); + [[nodiscard]] Value GlobalAtomicF16x2Max(const U64& pointer_offset, const Value& value, + const FpControl control = {}); + + [[nodiscard]] U1 LogicalOr(const U1& a, const U1& b); + [[nodiscard]] U1 LogicalAnd(const U1& a, const U1& b); + [[nodiscard]] U1 LogicalXor(const U1& a, const U1& b); + [[nodiscard]] U1 LogicalNot(const U1& value); + + [[nodiscard]] U32U64 ConvertFToS(size_t bitsize, const F16F32F64& value); + [[nodiscard]] U32U64 ConvertFToU(size_t bitsize, const F16F32F64& value); + [[nodiscard]] U32U64 ConvertFToI(size_t bitsize, bool is_signed, const F16F32F64& value); + [[nodiscard]] F16F32F64 ConvertSToF(size_t dest_bitsize, size_t src_bitsize, const Value& value, + FpControl control = {}); + [[nodiscard]] F16F32F64 ConvertUToF(size_t dest_bitsize, size_t src_bitsize, const Value& value, + FpControl control = {}); + [[nodiscard]] F16F32F64 ConvertIToF(size_t dest_bitsize, size_t src_bitsize, bool is_signed, + const Value& value, FpControl control = {}); + + [[nodiscard]] U32U64 UConvert(size_t result_bitsize, const U32U64& value); + [[nodiscard]] F16F32F64 FPConvert(size_t result_bitsize, const F16F32F64& value, + FpControl control = {}); + + [[nodiscard]] Value ImageSampleImplicitLod(const Value& handle, const Value& coords, + const F32& bias, const Value& offset, + const F32& lod_clamp, TextureInstInfo info); + [[nodiscard]] Value ImageSampleExplicitLod(const Value& handle, const Value& coords, + const F32& lod, const Value& offset, + TextureInstInfo info); + [[nodiscard]] F32 ImageSampleDrefImplicitLod(const Value& handle, const Value& coords, + const F32& dref, const F32& bias, + const Value& offset, const F32& lod_clamp, + TextureInstInfo info); + [[nodiscard]] F32 ImageSampleDrefExplicitLod(const Value& handle, const Value& coords, + const F32& dref, const F32& lod, + const Value& offset, TextureInstInfo info); + [[nodiscard]] Value ImageQueryDimension(const Value& handle, const IR::U32& lod); + + [[nodiscard]] Value ImageQueryLod(const Value& handle, const Value& coords, + TextureInstInfo info); + [[nodiscard]] Value ImageGather(const Value& handle, const Value& coords, const Value& offset, + const Value& offset2, TextureInstInfo info); + [[nodiscard]] Value ImageGatherDref(const Value& handle, const Value& coords, + const Value& offset, const Value& offset2, const F32& dref, + TextureInstInfo info); + [[nodiscard]] Value ImageFetch(const Value& handle, const Value& coords, const Value& offset, + const U32& lod, const U32& multisampling, TextureInstInfo info); + [[nodiscard]] Value ImageGradient(const Value& handle, const Value& coords, + const Value& derivates, const Value& offset, + const F32& lod_clamp, TextureInstInfo info); + [[nodiscard]] Value ImageRead(const Value& handle, const Value& coords, TextureInstInfo info); + [[nodiscard]] void ImageWrite(const Value& handle, const Value& coords, const Value& color, + TextureInstInfo info); + + [[nodiscard]] Value ImageAtomicIAdd(const Value& handle, const Value& coords, + const Value& value, TextureInstInfo info); + [[nodiscard]] Value ImageAtomicSMin(const Value& handle, const Value& coords, + const Value& value, TextureInstInfo info); + [[nodiscard]] Value ImageAtomicUMin(const Value& handle, const Value& coords, + const Value& value, TextureInstInfo info); + [[nodiscard]] Value ImageAtomicIMin(const Value& handle, const Value& coords, + const Value& value, bool is_signed, TextureInstInfo info); + [[nodiscard]] Value ImageAtomicSMax(const Value& handle, const Value& coords, + const Value& value, TextureInstInfo info); + [[nodiscard]] Value ImageAtomicUMax(const Value& handle, const Value& coords, + const Value& value, TextureInstInfo info); + [[nodiscard]] Value ImageAtomicIMax(const Value& handle, const Value& coords, + const Value& value, bool is_signed, TextureInstInfo info); + [[nodiscard]] Value ImageAtomicInc(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info); + [[nodiscard]] Value ImageAtomicDec(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info); + [[nodiscard]] Value ImageAtomicAnd(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info); + [[nodiscard]] Value ImageAtomicOr(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info); + [[nodiscard]] Value ImageAtomicXor(const Value& handle, const Value& coords, const Value& value, + TextureInstInfo info); + [[nodiscard]] Value ImageAtomicExchange(const Value& handle, const Value& coords, + const Value& value, TextureInstInfo info); + [[nodiscard]] U1 VoteAll(const U1& value); + [[nodiscard]] U1 VoteAny(const U1& value); + [[nodiscard]] U1 VoteEqual(const U1& value); + [[nodiscard]] U32 SubgroupBallot(const U1& value); + [[nodiscard]] U32 SubgroupEqMask(); + [[nodiscard]] U32 SubgroupLtMask(); + [[nodiscard]] U32 SubgroupLeMask(); + [[nodiscard]] U32 SubgroupGtMask(); + [[nodiscard]] U32 SubgroupGeMask(); + [[nodiscard]] U32 ShuffleIndex(const IR::U32& value, const IR::U32& index, const IR::U32& clamp, + const IR::U32& seg_mask); + [[nodiscard]] U32 ShuffleUp(const IR::U32& value, const IR::U32& index, const IR::U32& clamp, + const IR::U32& seg_mask); + [[nodiscard]] U32 ShuffleDown(const IR::U32& value, const IR::U32& index, const IR::U32& clamp, + const IR::U32& seg_mask); + [[nodiscard]] U32 ShuffleButterfly(const IR::U32& value, const IR::U32& index, + const IR::U32& clamp, const IR::U32& seg_mask); + [[nodiscard]] F32 FSwizzleAdd(const F32& a, const F32& b, const U32& swizzle, + FpControl control = {}); + + [[nodiscard]] F32 DPdxFine(const F32& a); + + [[nodiscard]] F32 DPdyFine(const F32& a); + + [[nodiscard]] F32 DPdxCoarse(const F32& a); + + [[nodiscard]] F32 DPdyCoarse(const F32& a); + +private: + IR::Block::iterator insertion_point; + + template + T Inst(Opcode op, Args... args) { + auto it{block->PrependNewInst(insertion_point, op, {Value{args}...})}; + return T{Value{&*it}}; + } + + template + requires(sizeof(T) <= sizeof(u32) && std::is_trivially_copyable_v) struct Flags { + Flags() = default; + Flags(T proxy_) : proxy{proxy_} {} + + T proxy; + }; + + template + T Inst(Opcode op, Flags flags, Args... args) { + u32 raw_flags{}; + std::memcpy(&raw_flags, &flags.proxy, sizeof(flags.proxy)); + auto it{block->PrependNewInst(insertion_point, op, {Value{args}...}, raw_flags)}; + return T{Value{&*it}}; + } +}; + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/microinstruction.cpp b/src/shader_recompiler/frontend/ir/microinstruction.cpp new file mode 100755 index 000000000..3dfa5a880 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/microinstruction.cpp @@ -0,0 +1,411 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/type.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::IR { +namespace { +void CheckPseudoInstruction(IR::Inst* inst, IR::Opcode opcode) { + if (inst && inst->GetOpcode() != opcode) { + throw LogicError("Invalid pseudo-instruction"); + } +} + +void SetPseudoInstruction(IR::Inst*& dest_inst, IR::Inst* pseudo_inst) { + if (dest_inst) { + throw LogicError("Only one of each type of pseudo-op allowed"); + } + dest_inst = pseudo_inst; +} + +void RemovePseudoInstruction(IR::Inst*& inst, IR::Opcode expected_opcode) { + if (inst->GetOpcode() != expected_opcode) { + throw LogicError("Undoing use of invalid pseudo-op"); + } + inst = nullptr; +} + +void AllocAssociatedInsts(std::unique_ptr& associated_insts) { + if (!associated_insts) { + associated_insts = std::make_unique(); + } +} +} // Anonymous namespace + +Inst::Inst(IR::Opcode op_, u32 flags_) noexcept : op{op_}, flags{flags_} { + if (op == Opcode::Phi) { + std::construct_at(&phi_args); + } else { + std::construct_at(&args); + } +} + +Inst::~Inst() { + if (op == Opcode::Phi) { + std::destroy_at(&phi_args); + } else { + std::destroy_at(&args); + } +} + +bool Inst::MayHaveSideEffects() const noexcept { + switch (op) { + case Opcode::ConditionRef: + case Opcode::Reference: + case Opcode::PhiMove: + case Opcode::Prologue: + case Opcode::Epilogue: + case Opcode::Join: + case Opcode::DemoteToHelperInvocation: + case Opcode::Barrier: + case Opcode::WorkgroupMemoryBarrier: + case Opcode::DeviceMemoryBarrier: + case Opcode::EmitVertex: + case Opcode::EndPrimitive: + case Opcode::SetAttribute: + case Opcode::SetAttributeIndexed: + case Opcode::SetPatch: + case Opcode::SetFragColor: + case Opcode::SetSampleMask: + case Opcode::SetFragDepth: + case Opcode::WriteGlobalU8: + case Opcode::WriteGlobalS8: + case Opcode::WriteGlobalU16: + case Opcode::WriteGlobalS16: + case Opcode::WriteGlobal32: + case Opcode::WriteGlobal64: + case Opcode::WriteGlobal128: + case Opcode::WriteStorageU8: + case Opcode::WriteStorageS8: + case Opcode::WriteStorageU16: + case Opcode::WriteStorageS16: + case Opcode::WriteStorage32: + case Opcode::WriteStorage64: + case Opcode::WriteStorage128: + case Opcode::WriteLocal: + case Opcode::WriteSharedU8: + case Opcode::WriteSharedU16: + case Opcode::WriteSharedU32: + case Opcode::WriteSharedU64: + case Opcode::WriteSharedU128: + case Opcode::SharedAtomicIAdd32: + case Opcode::SharedAtomicSMin32: + case Opcode::SharedAtomicUMin32: + case Opcode::SharedAtomicSMax32: + case Opcode::SharedAtomicUMax32: + case Opcode::SharedAtomicInc32: + case Opcode::SharedAtomicDec32: + case Opcode::SharedAtomicAnd32: + case Opcode::SharedAtomicOr32: + case Opcode::SharedAtomicXor32: + case Opcode::SharedAtomicExchange32: + case Opcode::SharedAtomicExchange64: + case Opcode::GlobalAtomicIAdd32: + case Opcode::GlobalAtomicSMin32: + case Opcode::GlobalAtomicUMin32: + case Opcode::GlobalAtomicSMax32: + case Opcode::GlobalAtomicUMax32: + case Opcode::GlobalAtomicInc32: + case Opcode::GlobalAtomicDec32: + case Opcode::GlobalAtomicAnd32: + case Opcode::GlobalAtomicOr32: + case Opcode::GlobalAtomicXor32: + case Opcode::GlobalAtomicExchange32: + case Opcode::GlobalAtomicIAdd64: + case Opcode::GlobalAtomicSMin64: + case Opcode::GlobalAtomicUMin64: + case Opcode::GlobalAtomicSMax64: + case Opcode::GlobalAtomicUMax64: + case Opcode::GlobalAtomicAnd64: + case Opcode::GlobalAtomicOr64: + case Opcode::GlobalAtomicXor64: + case Opcode::GlobalAtomicExchange64: + case Opcode::GlobalAtomicAddF32: + case Opcode::GlobalAtomicAddF16x2: + case Opcode::GlobalAtomicAddF32x2: + case Opcode::GlobalAtomicMinF16x2: + case Opcode::GlobalAtomicMinF32x2: + case Opcode::GlobalAtomicMaxF16x2: + case Opcode::GlobalAtomicMaxF32x2: + case Opcode::StorageAtomicIAdd32: + case Opcode::StorageAtomicSMin32: + case Opcode::StorageAtomicUMin32: + case Opcode::StorageAtomicSMax32: + case Opcode::StorageAtomicUMax32: + case Opcode::StorageAtomicInc32: + case Opcode::StorageAtomicDec32: + case Opcode::StorageAtomicAnd32: + case Opcode::StorageAtomicOr32: + case Opcode::StorageAtomicXor32: + case Opcode::StorageAtomicExchange32: + case Opcode::StorageAtomicIAdd64: + case Opcode::StorageAtomicSMin64: + case Opcode::StorageAtomicUMin64: + case Opcode::StorageAtomicSMax64: + case Opcode::StorageAtomicUMax64: + case Opcode::StorageAtomicAnd64: + case Opcode::StorageAtomicOr64: + case Opcode::StorageAtomicXor64: + case Opcode::StorageAtomicExchange64: + case Opcode::StorageAtomicAddF32: + case Opcode::StorageAtomicAddF16x2: + case Opcode::StorageAtomicAddF32x2: + case Opcode::StorageAtomicMinF16x2: + case Opcode::StorageAtomicMinF32x2: + case Opcode::StorageAtomicMaxF16x2: + case Opcode::StorageAtomicMaxF32x2: + case Opcode::BindlessImageWrite: + case Opcode::BoundImageWrite: + case Opcode::ImageWrite: + case IR::Opcode::BindlessImageAtomicIAdd32: + case IR::Opcode::BindlessImageAtomicSMin32: + case IR::Opcode::BindlessImageAtomicUMin32: + case IR::Opcode::BindlessImageAtomicSMax32: + case IR::Opcode::BindlessImageAtomicUMax32: + case IR::Opcode::BindlessImageAtomicInc32: + case IR::Opcode::BindlessImageAtomicDec32: + case IR::Opcode::BindlessImageAtomicAnd32: + case IR::Opcode::BindlessImageAtomicOr32: + case IR::Opcode::BindlessImageAtomicXor32: + case IR::Opcode::BindlessImageAtomicExchange32: + case IR::Opcode::BoundImageAtomicIAdd32: + case IR::Opcode::BoundImageAtomicSMin32: + case IR::Opcode::BoundImageAtomicUMin32: + case IR::Opcode::BoundImageAtomicSMax32: + case IR::Opcode::BoundImageAtomicUMax32: + case IR::Opcode::BoundImageAtomicInc32: + case IR::Opcode::BoundImageAtomicDec32: + case IR::Opcode::BoundImageAtomicAnd32: + case IR::Opcode::BoundImageAtomicOr32: + case IR::Opcode::BoundImageAtomicXor32: + case IR::Opcode::BoundImageAtomicExchange32: + case IR::Opcode::ImageAtomicIAdd32: + case IR::Opcode::ImageAtomicSMin32: + case IR::Opcode::ImageAtomicUMin32: + case IR::Opcode::ImageAtomicSMax32: + case IR::Opcode::ImageAtomicUMax32: + case IR::Opcode::ImageAtomicInc32: + case IR::Opcode::ImageAtomicDec32: + case IR::Opcode::ImageAtomicAnd32: + case IR::Opcode::ImageAtomicOr32: + case IR::Opcode::ImageAtomicXor32: + case IR::Opcode::ImageAtomicExchange32: + return true; + default: + return false; + } +} + +bool Inst::IsPseudoInstruction() const noexcept { + switch (op) { + case Opcode::GetZeroFromOp: + case Opcode::GetSignFromOp: + case Opcode::GetCarryFromOp: + case Opcode::GetOverflowFromOp: + case Opcode::GetSparseFromOp: + case Opcode::GetInBoundsFromOp: + return true; + default: + return false; + } +} + +bool Inst::AreAllArgsImmediates() const { + if (op == Opcode::Phi) { + throw LogicError("Testing for all arguments are immediates on phi instruction"); + } + return std::all_of(args.begin(), args.begin() + NumArgs(), + [](const IR::Value& value) { return value.IsImmediate(); }); +} + +Inst* Inst::GetAssociatedPseudoOperation(IR::Opcode opcode) { + if (!associated_insts) { + return nullptr; + } + switch (opcode) { + case Opcode::GetZeroFromOp: + CheckPseudoInstruction(associated_insts->zero_inst, Opcode::GetZeroFromOp); + return associated_insts->zero_inst; + case Opcode::GetSignFromOp: + CheckPseudoInstruction(associated_insts->sign_inst, Opcode::GetSignFromOp); + return associated_insts->sign_inst; + case Opcode::GetCarryFromOp: + CheckPseudoInstruction(associated_insts->carry_inst, Opcode::GetCarryFromOp); + return associated_insts->carry_inst; + case Opcode::GetOverflowFromOp: + CheckPseudoInstruction(associated_insts->overflow_inst, Opcode::GetOverflowFromOp); + return associated_insts->overflow_inst; + case Opcode::GetSparseFromOp: + CheckPseudoInstruction(associated_insts->sparse_inst, Opcode::GetSparseFromOp); + return associated_insts->sparse_inst; + case Opcode::GetInBoundsFromOp: + CheckPseudoInstruction(associated_insts->in_bounds_inst, Opcode::GetInBoundsFromOp); + return associated_insts->in_bounds_inst; + default: + throw InvalidArgument("{} is not a pseudo-instruction", opcode); + } +} + +IR::Type Inst::Type() const { + return TypeOf(op); +} + +void Inst::SetArg(size_t index, Value value) { + if (index >= NumArgs()) { + throw InvalidArgument("Out of bounds argument index {} in opcode {}", index, op); + } + const IR::Value arg{Arg(index)}; + if (!arg.IsImmediate()) { + UndoUse(arg); + } + if (!value.IsImmediate()) { + Use(value); + } + if (op == Opcode::Phi) { + phi_args[index].second = value; + } else { + args[index] = value; + } +} + +Block* Inst::PhiBlock(size_t index) const { + if (op != Opcode::Phi) { + throw LogicError("{} is not a Phi instruction", op); + } + if (index >= phi_args.size()) { + throw InvalidArgument("Out of bounds argument index {} in phi instruction"); + } + return phi_args[index].first; +} + +void Inst::AddPhiOperand(Block* predecessor, const Value& value) { + if (!value.IsImmediate()) { + Use(value); + } + phi_args.emplace_back(predecessor, value); +} + +void Inst::Invalidate() { + ClearArgs(); + ReplaceOpcode(Opcode::Void); +} + +void Inst::ClearArgs() { + if (op == Opcode::Phi) { + for (auto& pair : phi_args) { + IR::Value& value{pair.second}; + if (!value.IsImmediate()) { + UndoUse(value); + } + } + phi_args.clear(); + } else { + for (auto& value : args) { + if (!value.IsImmediate()) { + UndoUse(value); + } + } + // Reset arguments to null + // std::memset was measured to be faster on MSVC than std::ranges:fill + std::memset(reinterpret_cast(&args), 0, sizeof(args)); + } +} + +void Inst::ReplaceUsesWith(Value replacement) { + Invalidate(); + ReplaceOpcode(Opcode::Identity); + if (!replacement.IsImmediate()) { + Use(replacement); + } + args[0] = replacement; +} + +void Inst::ReplaceOpcode(IR::Opcode opcode) { + if (opcode == IR::Opcode::Phi) { + throw LogicError("Cannot transition into Phi"); + } + if (op == Opcode::Phi) { + // Transition out of phi arguments into non-phi + std::destroy_at(&phi_args); + std::construct_at(&args); + } + op = opcode; +} + +void Inst::Use(const Value& value) { + Inst* const inst{value.Inst()}; + ++inst->use_count; + + std::unique_ptr& assoc_inst{inst->associated_insts}; + switch (op) { + case Opcode::GetZeroFromOp: + AllocAssociatedInsts(assoc_inst); + SetPseudoInstruction(assoc_inst->zero_inst, this); + break; + case Opcode::GetSignFromOp: + AllocAssociatedInsts(assoc_inst); + SetPseudoInstruction(assoc_inst->sign_inst, this); + break; + case Opcode::GetCarryFromOp: + AllocAssociatedInsts(assoc_inst); + SetPseudoInstruction(assoc_inst->carry_inst, this); + break; + case Opcode::GetOverflowFromOp: + AllocAssociatedInsts(assoc_inst); + SetPseudoInstruction(assoc_inst->overflow_inst, this); + break; + case Opcode::GetSparseFromOp: + AllocAssociatedInsts(assoc_inst); + SetPseudoInstruction(assoc_inst->sparse_inst, this); + break; + case Opcode::GetInBoundsFromOp: + AllocAssociatedInsts(assoc_inst); + SetPseudoInstruction(assoc_inst->in_bounds_inst, this); + break; + default: + break; + } +} + +void Inst::UndoUse(const Value& value) { + Inst* const inst{value.Inst()}; + --inst->use_count; + + std::unique_ptr& assoc_inst{inst->associated_insts}; + switch (op) { + case Opcode::GetZeroFromOp: + AllocAssociatedInsts(assoc_inst); + RemovePseudoInstruction(assoc_inst->zero_inst, Opcode::GetZeroFromOp); + break; + case Opcode::GetSignFromOp: + AllocAssociatedInsts(assoc_inst); + RemovePseudoInstruction(assoc_inst->sign_inst, Opcode::GetSignFromOp); + break; + case Opcode::GetCarryFromOp: + AllocAssociatedInsts(assoc_inst); + RemovePseudoInstruction(assoc_inst->carry_inst, Opcode::GetCarryFromOp); + break; + case Opcode::GetOverflowFromOp: + AllocAssociatedInsts(assoc_inst); + RemovePseudoInstruction(assoc_inst->overflow_inst, Opcode::GetOverflowFromOp); + break; + case Opcode::GetSparseFromOp: + AllocAssociatedInsts(assoc_inst); + RemovePseudoInstruction(assoc_inst->sparse_inst, Opcode::GetSparseFromOp); + break; + case Opcode::GetInBoundsFromOp: + AllocAssociatedInsts(assoc_inst); + RemovePseudoInstruction(assoc_inst->in_bounds_inst, Opcode::GetInBoundsFromOp); + break; + default: + break; + } +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/modifiers.h b/src/shader_recompiler/frontend/ir/modifiers.h new file mode 100755 index 000000000..77cda1f8a --- /dev/null +++ b/src/shader_recompiler/frontend/ir/modifiers.h @@ -0,0 +1,49 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/shader_info.h" + +namespace Shader::IR { + +enum class FmzMode : u8 { + DontCare, // Not specified for this instruction + FTZ, // Flush denorms to zero, NAN is propagated (D3D11, NVN, GL, VK) + FMZ, // Flush denorms to zero, x * 0 == 0 (D3D9) + None, // Denorms are not flushed, NAN is propagated (nouveau) +}; + +enum class FpRounding : u8 { + DontCare, // Not specified for this instruction + RN, // Round to nearest even, + RM, // Round towards negative infinity + RP, // Round towards positive infinity + RZ, // Round towards zero +}; + +struct FpControl { + bool no_contraction{false}; + FpRounding rounding{FpRounding::DontCare}; + FmzMode fmz_mode{FmzMode::DontCare}; +}; +static_assert(sizeof(FpControl) <= sizeof(u32)); + +union TextureInstInfo { + u32 raw; + BitField<0, 16, u32> descriptor_index; + BitField<16, 3, TextureType> type; + BitField<19, 1, u32> is_depth; + BitField<20, 1, u32> has_bias; + BitField<21, 1, u32> has_lod_clamp; + BitField<22, 1, u32> relaxed_precision; + BitField<23, 2, u32> gather_component; + BitField<25, 2, u32> num_derivates; + BitField<27, 3, ImageFormat> image_format; +}; +static_assert(sizeof(TextureInstInfo) <= sizeof(u32)); + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/opcodes.cpp b/src/shader_recompiler/frontend/ir/opcodes.cpp new file mode 100755 index 000000000..24d024ad7 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/opcodes.cpp @@ -0,0 +1,15 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/frontend/ir/opcodes.h" + +namespace Shader::IR { + +std::string_view NameOf(Opcode op) { + return Detail::META_TABLE[static_cast(op)].name; +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/opcodes.h b/src/shader_recompiler/frontend/ir/opcodes.h new file mode 100755 index 000000000..56b001902 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/opcodes.h @@ -0,0 +1,109 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include + +#include + +#include "shader_recompiler/frontend/ir/type.h" + +namespace Shader::IR { + +enum class Opcode { +#define OPCODE(name, ...) name, +#include "opcodes.inc" +#undef OPCODE +}; + +namespace Detail { +struct OpcodeMeta { + std::string_view name; + Type type; + std::array arg_types; +}; + +// using enum Type; +constexpr Type Void{Type::Void}; +constexpr Type Opaque{Type::Opaque}; +constexpr Type Reg{Type::Reg}; +constexpr Type Pred{Type::Pred}; +constexpr Type Attribute{Type::Attribute}; +constexpr Type Patch{Type::Patch}; +constexpr Type U1{Type::U1}; +constexpr Type U8{Type::U8}; +constexpr Type U16{Type::U16}; +constexpr Type U32{Type::U32}; +constexpr Type U64{Type::U64}; +constexpr Type F16{Type::F16}; +constexpr Type F32{Type::F32}; +constexpr Type F64{Type::F64}; +constexpr Type U32x2{Type::U32x2}; +constexpr Type U32x3{Type::U32x3}; +constexpr Type U32x4{Type::U32x4}; +constexpr Type F16x2{Type::F16x2}; +constexpr Type F16x3{Type::F16x3}; +constexpr Type F16x4{Type::F16x4}; +constexpr Type F32x2{Type::F32x2}; +constexpr Type F32x3{Type::F32x3}; +constexpr Type F32x4{Type::F32x4}; +constexpr Type F64x2{Type::F64x2}; +constexpr Type F64x3{Type::F64x3}; +constexpr Type F64x4{Type::F64x4}; + +constexpr OpcodeMeta META_TABLE[]{ +#define OPCODE(name_token, type_token, ...) \ + { \ + .name{#name_token}, \ + .type = type_token, \ + .arg_types{__VA_ARGS__}, \ + }, +#include "opcodes.inc" +#undef OPCODE +}; +constexpr size_t CalculateNumArgsOf(Opcode op) { + const auto& arg_types{META_TABLE[static_cast(op)].arg_types}; + return std::distance(arg_types.begin(), std::ranges::find(arg_types, Type::Void)); +} + +constexpr u8 NUM_ARGS[]{ +#define OPCODE(name_token, type_token, ...) static_cast(CalculateNumArgsOf(Opcode::name_token)), +#include "opcodes.inc" +#undef OPCODE +}; +} // namespace Detail + +/// Get return type of an opcode +[[nodiscard]] inline Type TypeOf(Opcode op) noexcept { + return Detail::META_TABLE[static_cast(op)].type; +} + +/// Get the number of arguments an opcode accepts +[[nodiscard]] inline size_t NumArgsOf(Opcode op) noexcept { + return static_cast(Detail::NUM_ARGS[static_cast(op)]); +} + +/// Get the required type of an argument of an opcode +[[nodiscard]] inline Type ArgTypeOf(Opcode op, size_t arg_index) noexcept { + return Detail::META_TABLE[static_cast(op)].arg_types[arg_index]; +} + +/// Get the name of an opcode +[[nodiscard]] std::string_view NameOf(Opcode op); + +} // namespace Shader::IR + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::IR::Opcode& op, FormatContext& ctx) { + return format_to(ctx.out(), "{}", Shader::IR::NameOf(op)); + } +}; diff --git a/src/shader_recompiler/frontend/ir/opcodes.inc b/src/shader_recompiler/frontend/ir/opcodes.inc new file mode 100755 index 000000000..d91098c80 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/opcodes.inc @@ -0,0 +1,550 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +// opcode name, return type, arg1 type, arg2 type, arg3 type, arg4 type, arg4 type, ... +OPCODE(Phi, Opaque, ) +OPCODE(Identity, Opaque, Opaque, ) +OPCODE(Void, Void, ) +OPCODE(ConditionRef, U1, U1, ) +OPCODE(Reference, Void, Opaque, ) +OPCODE(PhiMove, Void, Opaque, Opaque, ) + +// Special operations +OPCODE(Prologue, Void, ) +OPCODE(Epilogue, Void, ) +OPCODE(Join, Void, ) +OPCODE(DemoteToHelperInvocation, Void, ) +OPCODE(EmitVertex, Void, U32, ) +OPCODE(EndPrimitive, Void, U32, ) + +// Barriers +OPCODE(Barrier, Void, ) +OPCODE(WorkgroupMemoryBarrier, Void, ) +OPCODE(DeviceMemoryBarrier, Void, ) + +// Context getters/setters +OPCODE(GetRegister, U32, Reg, ) +OPCODE(SetRegister, Void, Reg, U32, ) +OPCODE(GetPred, U1, Pred, ) +OPCODE(SetPred, Void, Pred, U1, ) +OPCODE(GetGotoVariable, U1, U32, ) +OPCODE(SetGotoVariable, Void, U32, U1, ) +OPCODE(GetIndirectBranchVariable, U32, ) +OPCODE(SetIndirectBranchVariable, Void, U32, ) +OPCODE(GetCbufU8, U32, U32, U32, ) +OPCODE(GetCbufS8, U32, U32, U32, ) +OPCODE(GetCbufU16, U32, U32, U32, ) +OPCODE(GetCbufS16, U32, U32, U32, ) +OPCODE(GetCbufU32, U32, U32, U32, ) +OPCODE(GetCbufF32, F32, U32, U32, ) +OPCODE(GetCbufU32x2, U32x2, U32, U32, ) +OPCODE(GetAttribute, F32, Attribute, U32, ) +OPCODE(SetAttribute, Void, Attribute, F32, U32, ) +OPCODE(GetAttributeIndexed, F32, U32, U32, ) +OPCODE(SetAttributeIndexed, Void, U32, F32, U32, ) +OPCODE(GetPatch, F32, Patch, ) +OPCODE(SetPatch, Void, Patch, F32, ) +OPCODE(SetFragColor, Void, U32, U32, F32, ) +OPCODE(SetSampleMask, Void, U32, ) +OPCODE(SetFragDepth, Void, F32, ) +OPCODE(GetZFlag, U1, Void, ) +OPCODE(GetSFlag, U1, Void, ) +OPCODE(GetCFlag, U1, Void, ) +OPCODE(GetOFlag, U1, Void, ) +OPCODE(SetZFlag, Void, U1, ) +OPCODE(SetSFlag, Void, U1, ) +OPCODE(SetCFlag, Void, U1, ) +OPCODE(SetOFlag, Void, U1, ) +OPCODE(WorkgroupId, U32x3, ) +OPCODE(LocalInvocationId, U32x3, ) +OPCODE(InvocationId, U32, ) +OPCODE(SampleId, U32, ) +OPCODE(IsHelperInvocation, U1, ) +OPCODE(YDirection, F32, ) + +// Undefined +OPCODE(UndefU1, U1, ) +OPCODE(UndefU8, U8, ) +OPCODE(UndefU16, U16, ) +OPCODE(UndefU32, U32, ) +OPCODE(UndefU64, U64, ) + +// Memory operations +OPCODE(LoadGlobalU8, U32, Opaque, ) +OPCODE(LoadGlobalS8, U32, Opaque, ) +OPCODE(LoadGlobalU16, U32, Opaque, ) +OPCODE(LoadGlobalS16, U32, Opaque, ) +OPCODE(LoadGlobal32, U32, Opaque, ) +OPCODE(LoadGlobal64, U32x2, Opaque, ) +OPCODE(LoadGlobal128, U32x4, Opaque, ) +OPCODE(WriteGlobalU8, Void, Opaque, U32, ) +OPCODE(WriteGlobalS8, Void, Opaque, U32, ) +OPCODE(WriteGlobalU16, Void, Opaque, U32, ) +OPCODE(WriteGlobalS16, Void, Opaque, U32, ) +OPCODE(WriteGlobal32, Void, Opaque, U32, ) +OPCODE(WriteGlobal64, Void, Opaque, U32x2, ) +OPCODE(WriteGlobal128, Void, Opaque, U32x4, ) + +// Storage buffer operations +OPCODE(LoadStorageU8, U32, U32, U32, ) +OPCODE(LoadStorageS8, U32, U32, U32, ) +OPCODE(LoadStorageU16, U32, U32, U32, ) +OPCODE(LoadStorageS16, U32, U32, U32, ) +OPCODE(LoadStorage32, U32, U32, U32, ) +OPCODE(LoadStorage64, U32x2, U32, U32, ) +OPCODE(LoadStorage128, U32x4, U32, U32, ) +OPCODE(WriteStorageU8, Void, U32, U32, U32, ) +OPCODE(WriteStorageS8, Void, U32, U32, U32, ) +OPCODE(WriteStorageU16, Void, U32, U32, U32, ) +OPCODE(WriteStorageS16, Void, U32, U32, U32, ) +OPCODE(WriteStorage32, Void, U32, U32, U32, ) +OPCODE(WriteStorage64, Void, U32, U32, U32x2, ) +OPCODE(WriteStorage128, Void, U32, U32, U32x4, ) + +// Local memory operations +OPCODE(LoadLocal, U32, U32, ) +OPCODE(WriteLocal, Void, U32, U32, ) + +// Shared memory operations +OPCODE(LoadSharedU8, U32, U32, ) +OPCODE(LoadSharedS8, U32, U32, ) +OPCODE(LoadSharedU16, U32, U32, ) +OPCODE(LoadSharedS16, U32, U32, ) +OPCODE(LoadSharedU32, U32, U32, ) +OPCODE(LoadSharedU64, U32x2, U32, ) +OPCODE(LoadSharedU128, U32x4, U32, ) +OPCODE(WriteSharedU8, Void, U32, U32, ) +OPCODE(WriteSharedU16, Void, U32, U32, ) +OPCODE(WriteSharedU32, Void, U32, U32, ) +OPCODE(WriteSharedU64, Void, U32, U32x2, ) +OPCODE(WriteSharedU128, Void, U32, U32x4, ) + +// Vector utility +OPCODE(CompositeConstructU32x2, U32x2, U32, U32, ) +OPCODE(CompositeConstructU32x3, U32x3, U32, U32, U32, ) +OPCODE(CompositeConstructU32x4, U32x4, U32, U32, U32, U32, ) +OPCODE(CompositeExtractU32x2, U32, U32x2, U32, ) +OPCODE(CompositeExtractU32x3, U32, U32x3, U32, ) +OPCODE(CompositeExtractU32x4, U32, U32x4, U32, ) +OPCODE(CompositeInsertU32x2, U32x2, U32x2, U32, U32, ) +OPCODE(CompositeInsertU32x3, U32x3, U32x3, U32, U32, ) +OPCODE(CompositeInsertU32x4, U32x4, U32x4, U32, U32, ) +OPCODE(CompositeConstructF16x2, F16x2, F16, F16, ) +OPCODE(CompositeConstructF16x3, F16x3, F16, F16, F16, ) +OPCODE(CompositeConstructF16x4, F16x4, F16, F16, F16, F16, ) +OPCODE(CompositeExtractF16x2, F16, F16x2, U32, ) +OPCODE(CompositeExtractF16x3, F16, F16x3, U32, ) +OPCODE(CompositeExtractF16x4, F16, F16x4, U32, ) +OPCODE(CompositeInsertF16x2, F16x2, F16x2, F16, U32, ) +OPCODE(CompositeInsertF16x3, F16x3, F16x3, F16, U32, ) +OPCODE(CompositeInsertF16x4, F16x4, F16x4, F16, U32, ) +OPCODE(CompositeConstructF32x2, F32x2, F32, F32, ) +OPCODE(CompositeConstructF32x3, F32x3, F32, F32, F32, ) +OPCODE(CompositeConstructF32x4, F32x4, F32, F32, F32, F32, ) +OPCODE(CompositeExtractF32x2, F32, F32x2, U32, ) +OPCODE(CompositeExtractF32x3, F32, F32x3, U32, ) +OPCODE(CompositeExtractF32x4, F32, F32x4, U32, ) +OPCODE(CompositeInsertF32x2, F32x2, F32x2, F32, U32, ) +OPCODE(CompositeInsertF32x3, F32x3, F32x3, F32, U32, ) +OPCODE(CompositeInsertF32x4, F32x4, F32x4, F32, U32, ) +OPCODE(CompositeConstructF64x2, F64x2, F64, F64, ) +OPCODE(CompositeConstructF64x3, F64x3, F64, F64, F64, ) +OPCODE(CompositeConstructF64x4, F64x4, F64, F64, F64, F64, ) +OPCODE(CompositeExtractF64x2, F64, F64x2, U32, ) +OPCODE(CompositeExtractF64x3, F64, F64x3, U32, ) +OPCODE(CompositeExtractF64x4, F64, F64x4, U32, ) +OPCODE(CompositeInsertF64x2, F64x2, F64x2, F64, U32, ) +OPCODE(CompositeInsertF64x3, F64x3, F64x3, F64, U32, ) +OPCODE(CompositeInsertF64x4, F64x4, F64x4, F64, U32, ) + +// Select operations +OPCODE(SelectU1, U1, U1, U1, U1, ) +OPCODE(SelectU8, U8, U1, U8, U8, ) +OPCODE(SelectU16, U16, U1, U16, U16, ) +OPCODE(SelectU32, U32, U1, U32, U32, ) +OPCODE(SelectU64, U64, U1, U64, U64, ) +OPCODE(SelectF16, F16, U1, F16, F16, ) +OPCODE(SelectF32, F32, U1, F32, F32, ) +OPCODE(SelectF64, F64, U1, F64, F64, ) + +// Bitwise conversions +OPCODE(BitCastU16F16, U16, F16, ) +OPCODE(BitCastU32F32, U32, F32, ) +OPCODE(BitCastU64F64, U64, F64, ) +OPCODE(BitCastF16U16, F16, U16, ) +OPCODE(BitCastF32U32, F32, U32, ) +OPCODE(BitCastF64U64, F64, U64, ) +OPCODE(PackUint2x32, U64, U32x2, ) +OPCODE(UnpackUint2x32, U32x2, U64, ) +OPCODE(PackFloat2x16, U32, F16x2, ) +OPCODE(UnpackFloat2x16, F16x2, U32, ) +OPCODE(PackHalf2x16, U32, F32x2, ) +OPCODE(UnpackHalf2x16, F32x2, U32, ) +OPCODE(PackDouble2x32, F64, U32x2, ) +OPCODE(UnpackDouble2x32, U32x2, F64, ) + +// Pseudo-operation, handled specially at final emit +OPCODE(GetZeroFromOp, U1, Opaque, ) +OPCODE(GetSignFromOp, U1, Opaque, ) +OPCODE(GetCarryFromOp, U1, Opaque, ) +OPCODE(GetOverflowFromOp, U1, Opaque, ) +OPCODE(GetSparseFromOp, U1, Opaque, ) +OPCODE(GetInBoundsFromOp, U1, Opaque, ) + +// Floating-point operations +OPCODE(FPAbs16, F16, F16, ) +OPCODE(FPAbs32, F32, F32, ) +OPCODE(FPAbs64, F64, F64, ) +OPCODE(FPAdd16, F16, F16, F16, ) +OPCODE(FPAdd32, F32, F32, F32, ) +OPCODE(FPAdd64, F64, F64, F64, ) +OPCODE(FPFma16, F16, F16, F16, F16, ) +OPCODE(FPFma32, F32, F32, F32, F32, ) +OPCODE(FPFma64, F64, F64, F64, F64, ) +OPCODE(FPMax32, F32, F32, F32, ) +OPCODE(FPMax64, F64, F64, F64, ) +OPCODE(FPMin32, F32, F32, F32, ) +OPCODE(FPMin64, F64, F64, F64, ) +OPCODE(FPMul16, F16, F16, F16, ) +OPCODE(FPMul32, F32, F32, F32, ) +OPCODE(FPMul64, F64, F64, F64, ) +OPCODE(FPNeg16, F16, F16, ) +OPCODE(FPNeg32, F32, F32, ) +OPCODE(FPNeg64, F64, F64, ) +OPCODE(FPRecip32, F32, F32, ) +OPCODE(FPRecip64, F64, F64, ) +OPCODE(FPRecipSqrt32, F32, F32, ) +OPCODE(FPRecipSqrt64, F64, F64, ) +OPCODE(FPSqrt, F32, F32, ) +OPCODE(FPSin, F32, F32, ) +OPCODE(FPExp2, F32, F32, ) +OPCODE(FPCos, F32, F32, ) +OPCODE(FPLog2, F32, F32, ) +OPCODE(FPSaturate16, F16, F16, ) +OPCODE(FPSaturate32, F32, F32, ) +OPCODE(FPSaturate64, F64, F64, ) +OPCODE(FPClamp16, F16, F16, F16, F16, ) +OPCODE(FPClamp32, F32, F32, F32, F32, ) +OPCODE(FPClamp64, F64, F64, F64, F64, ) +OPCODE(FPRoundEven16, F16, F16, ) +OPCODE(FPRoundEven32, F32, F32, ) +OPCODE(FPRoundEven64, F64, F64, ) +OPCODE(FPFloor16, F16, F16, ) +OPCODE(FPFloor32, F32, F32, ) +OPCODE(FPFloor64, F64, F64, ) +OPCODE(FPCeil16, F16, F16, ) +OPCODE(FPCeil32, F32, F32, ) +OPCODE(FPCeil64, F64, F64, ) +OPCODE(FPTrunc16, F16, F16, ) +OPCODE(FPTrunc32, F32, F32, ) +OPCODE(FPTrunc64, F64, F64, ) + +OPCODE(FPOrdEqual16, U1, F16, F16, ) +OPCODE(FPOrdEqual32, U1, F32, F32, ) +OPCODE(FPOrdEqual64, U1, F64, F64, ) +OPCODE(FPUnordEqual16, U1, F16, F16, ) +OPCODE(FPUnordEqual32, U1, F32, F32, ) +OPCODE(FPUnordEqual64, U1, F64, F64, ) +OPCODE(FPOrdNotEqual16, U1, F16, F16, ) +OPCODE(FPOrdNotEqual32, U1, F32, F32, ) +OPCODE(FPOrdNotEqual64, U1, F64, F64, ) +OPCODE(FPUnordNotEqual16, U1, F16, F16, ) +OPCODE(FPUnordNotEqual32, U1, F32, F32, ) +OPCODE(FPUnordNotEqual64, U1, F64, F64, ) +OPCODE(FPOrdLessThan16, U1, F16, F16, ) +OPCODE(FPOrdLessThan32, U1, F32, F32, ) +OPCODE(FPOrdLessThan64, U1, F64, F64, ) +OPCODE(FPUnordLessThan16, U1, F16, F16, ) +OPCODE(FPUnordLessThan32, U1, F32, F32, ) +OPCODE(FPUnordLessThan64, U1, F64, F64, ) +OPCODE(FPOrdGreaterThan16, U1, F16, F16, ) +OPCODE(FPOrdGreaterThan32, U1, F32, F32, ) +OPCODE(FPOrdGreaterThan64, U1, F64, F64, ) +OPCODE(FPUnordGreaterThan16, U1, F16, F16, ) +OPCODE(FPUnordGreaterThan32, U1, F32, F32, ) +OPCODE(FPUnordGreaterThan64, U1, F64, F64, ) +OPCODE(FPOrdLessThanEqual16, U1, F16, F16, ) +OPCODE(FPOrdLessThanEqual32, U1, F32, F32, ) +OPCODE(FPOrdLessThanEqual64, U1, F64, F64, ) +OPCODE(FPUnordLessThanEqual16, U1, F16, F16, ) +OPCODE(FPUnordLessThanEqual32, U1, F32, F32, ) +OPCODE(FPUnordLessThanEqual64, U1, F64, F64, ) +OPCODE(FPOrdGreaterThanEqual16, U1, F16, F16, ) +OPCODE(FPOrdGreaterThanEqual32, U1, F32, F32, ) +OPCODE(FPOrdGreaterThanEqual64, U1, F64, F64, ) +OPCODE(FPUnordGreaterThanEqual16, U1, F16, F16, ) +OPCODE(FPUnordGreaterThanEqual32, U1, F32, F32, ) +OPCODE(FPUnordGreaterThanEqual64, U1, F64, F64, ) +OPCODE(FPIsNan16, U1, F16, ) +OPCODE(FPIsNan32, U1, F32, ) +OPCODE(FPIsNan64, U1, F64, ) + +// Integer operations +OPCODE(IAdd32, U32, U32, U32, ) +OPCODE(IAdd64, U64, U64, U64, ) +OPCODE(ISub32, U32, U32, U32, ) +OPCODE(ISub64, U64, U64, U64, ) +OPCODE(IMul32, U32, U32, U32, ) +OPCODE(INeg32, U32, U32, ) +OPCODE(INeg64, U64, U64, ) +OPCODE(IAbs32, U32, U32, ) +OPCODE(ShiftLeftLogical32, U32, U32, U32, ) +OPCODE(ShiftLeftLogical64, U64, U64, U32, ) +OPCODE(ShiftRightLogical32, U32, U32, U32, ) +OPCODE(ShiftRightLogical64, U64, U64, U32, ) +OPCODE(ShiftRightArithmetic32, U32, U32, U32, ) +OPCODE(ShiftRightArithmetic64, U64, U64, U32, ) +OPCODE(BitwiseAnd32, U32, U32, U32, ) +OPCODE(BitwiseOr32, U32, U32, U32, ) +OPCODE(BitwiseXor32, U32, U32, U32, ) +OPCODE(BitFieldInsert, U32, U32, U32, U32, U32, ) +OPCODE(BitFieldSExtract, U32, U32, U32, U32, ) +OPCODE(BitFieldUExtract, U32, U32, U32, U32, ) +OPCODE(BitReverse32, U32, U32, ) +OPCODE(BitCount32, U32, U32, ) +OPCODE(BitwiseNot32, U32, U32, ) + +OPCODE(FindSMsb32, U32, U32, ) +OPCODE(FindUMsb32, U32, U32, ) +OPCODE(SMin32, U32, U32, U32, ) +OPCODE(UMin32, U32, U32, U32, ) +OPCODE(SMax32, U32, U32, U32, ) +OPCODE(UMax32, U32, U32, U32, ) +OPCODE(SClamp32, U32, U32, U32, U32, ) +OPCODE(UClamp32, U32, U32, U32, U32, ) +OPCODE(SLessThan, U1, U32, U32, ) +OPCODE(ULessThan, U1, U32, U32, ) +OPCODE(IEqual, U1, U32, U32, ) +OPCODE(SLessThanEqual, U1, U32, U32, ) +OPCODE(ULessThanEqual, U1, U32, U32, ) +OPCODE(SGreaterThan, U1, U32, U32, ) +OPCODE(UGreaterThan, U1, U32, U32, ) +OPCODE(INotEqual, U1, U32, U32, ) +OPCODE(SGreaterThanEqual, U1, U32, U32, ) +OPCODE(UGreaterThanEqual, U1, U32, U32, ) + +// Atomic operations +OPCODE(SharedAtomicIAdd32, U32, U32, U32, ) +OPCODE(SharedAtomicSMin32, U32, U32, U32, ) +OPCODE(SharedAtomicUMin32, U32, U32, U32, ) +OPCODE(SharedAtomicSMax32, U32, U32, U32, ) +OPCODE(SharedAtomicUMax32, U32, U32, U32, ) +OPCODE(SharedAtomicInc32, U32, U32, U32, ) +OPCODE(SharedAtomicDec32, U32, U32, U32, ) +OPCODE(SharedAtomicAnd32, U32, U32, U32, ) +OPCODE(SharedAtomicOr32, U32, U32, U32, ) +OPCODE(SharedAtomicXor32, U32, U32, U32, ) +OPCODE(SharedAtomicExchange32, U32, U32, U32, ) +OPCODE(SharedAtomicExchange64, U64, U32, U64, ) + +OPCODE(GlobalAtomicIAdd32, U32, U64, U32, ) +OPCODE(GlobalAtomicSMin32, U32, U64, U32, ) +OPCODE(GlobalAtomicUMin32, U32, U64, U32, ) +OPCODE(GlobalAtomicSMax32, U32, U64, U32, ) +OPCODE(GlobalAtomicUMax32, U32, U64, U32, ) +OPCODE(GlobalAtomicInc32, U32, U64, U32, ) +OPCODE(GlobalAtomicDec32, U32, U64, U32, ) +OPCODE(GlobalAtomicAnd32, U32, U64, U32, ) +OPCODE(GlobalAtomicOr32, U32, U64, U32, ) +OPCODE(GlobalAtomicXor32, U32, U64, U32, ) +OPCODE(GlobalAtomicExchange32, U32, U64, U32, ) +OPCODE(GlobalAtomicIAdd64, U64, U64, U64, ) +OPCODE(GlobalAtomicSMin64, U64, U64, U64, ) +OPCODE(GlobalAtomicUMin64, U64, U64, U64, ) +OPCODE(GlobalAtomicSMax64, U64, U64, U64, ) +OPCODE(GlobalAtomicUMax64, U64, U64, U64, ) +OPCODE(GlobalAtomicAnd64, U64, U64, U64, ) +OPCODE(GlobalAtomicOr64, U64, U64, U64, ) +OPCODE(GlobalAtomicXor64, U64, U64, U64, ) +OPCODE(GlobalAtomicExchange64, U64, U64, U64, ) +OPCODE(GlobalAtomicAddF32, F32, U64, F32, ) +OPCODE(GlobalAtomicAddF16x2, U32, U64, F16x2, ) +OPCODE(GlobalAtomicAddF32x2, U32, U64, F32x2, ) +OPCODE(GlobalAtomicMinF16x2, U32, U64, F16x2, ) +OPCODE(GlobalAtomicMinF32x2, U32, U64, F32x2, ) +OPCODE(GlobalAtomicMaxF16x2, U32, U64, F16x2, ) +OPCODE(GlobalAtomicMaxF32x2, U32, U64, F32x2, ) + +OPCODE(StorageAtomicIAdd32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicSMin32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicUMin32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicSMax32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicUMax32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicInc32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicDec32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicAnd32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicOr32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicXor32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicExchange32, U32, U32, U32, U32, ) +OPCODE(StorageAtomicIAdd64, U64, U32, U32, U64, ) +OPCODE(StorageAtomicSMin64, U64, U32, U32, U64, ) +OPCODE(StorageAtomicUMin64, U64, U32, U32, U64, ) +OPCODE(StorageAtomicSMax64, U64, U32, U32, U64, ) +OPCODE(StorageAtomicUMax64, U64, U32, U32, U64, ) +OPCODE(StorageAtomicAnd64, U64, U32, U32, U64, ) +OPCODE(StorageAtomicOr64, U64, U32, U32, U64, ) +OPCODE(StorageAtomicXor64, U64, U32, U32, U64, ) +OPCODE(StorageAtomicExchange64, U64, U32, U32, U64, ) +OPCODE(StorageAtomicAddF32, F32, U32, U32, F32, ) +OPCODE(StorageAtomicAddF16x2, U32, U32, U32, F16x2, ) +OPCODE(StorageAtomicAddF32x2, U32, U32, U32, F32x2, ) +OPCODE(StorageAtomicMinF16x2, U32, U32, U32, F16x2, ) +OPCODE(StorageAtomicMinF32x2, U32, U32, U32, F32x2, ) +OPCODE(StorageAtomicMaxF16x2, U32, U32, U32, F16x2, ) +OPCODE(StorageAtomicMaxF32x2, U32, U32, U32, F32x2, ) + +// Logical operations +OPCODE(LogicalOr, U1, U1, U1, ) +OPCODE(LogicalAnd, U1, U1, U1, ) +OPCODE(LogicalXor, U1, U1, U1, ) +OPCODE(LogicalNot, U1, U1, ) + +// Conversion operations +OPCODE(ConvertS16F16, U32, F16, ) +OPCODE(ConvertS16F32, U32, F32, ) +OPCODE(ConvertS16F64, U32, F64, ) +OPCODE(ConvertS32F16, U32, F16, ) +OPCODE(ConvertS32F32, U32, F32, ) +OPCODE(ConvertS32F64, U32, F64, ) +OPCODE(ConvertS64F16, U64, F16, ) +OPCODE(ConvertS64F32, U64, F32, ) +OPCODE(ConvertS64F64, U64, F64, ) +OPCODE(ConvertU16F16, U32, F16, ) +OPCODE(ConvertU16F32, U32, F32, ) +OPCODE(ConvertU16F64, U32, F64, ) +OPCODE(ConvertU32F16, U32, F16, ) +OPCODE(ConvertU32F32, U32, F32, ) +OPCODE(ConvertU32F64, U32, F64, ) +OPCODE(ConvertU64F16, U64, F16, ) +OPCODE(ConvertU64F32, U64, F32, ) +OPCODE(ConvertU64F64, U64, F64, ) +OPCODE(ConvertU64U32, U64, U32, ) +OPCODE(ConvertU32U64, U32, U64, ) +OPCODE(ConvertF16F32, F16, F32, ) +OPCODE(ConvertF32F16, F32, F16, ) +OPCODE(ConvertF32F64, F32, F64, ) +OPCODE(ConvertF64F32, F64, F32, ) +OPCODE(ConvertF16S8, F16, U32, ) +OPCODE(ConvertF16S16, F16, U32, ) +OPCODE(ConvertF16S32, F16, U32, ) +OPCODE(ConvertF16S64, F16, U64, ) +OPCODE(ConvertF16U8, F16, U32, ) +OPCODE(ConvertF16U16, F16, U32, ) +OPCODE(ConvertF16U32, F16, U32, ) +OPCODE(ConvertF16U64, F16, U64, ) +OPCODE(ConvertF32S8, F32, U32, ) +OPCODE(ConvertF32S16, F32, U32, ) +OPCODE(ConvertF32S32, F32, U32, ) +OPCODE(ConvertF32S64, F32, U64, ) +OPCODE(ConvertF32U8, F32, U32, ) +OPCODE(ConvertF32U16, F32, U32, ) +OPCODE(ConvertF32U32, F32, U32, ) +OPCODE(ConvertF32U64, F32, U64, ) +OPCODE(ConvertF64S8, F64, U32, ) +OPCODE(ConvertF64S16, F64, U32, ) +OPCODE(ConvertF64S32, F64, U32, ) +OPCODE(ConvertF64S64, F64, U64, ) +OPCODE(ConvertF64U8, F64, U32, ) +OPCODE(ConvertF64U16, F64, U32, ) +OPCODE(ConvertF64U32, F64, U32, ) +OPCODE(ConvertF64U64, F64, U64, ) + +// Image operations +OPCODE(BindlessImageSampleImplicitLod, F32x4, U32, Opaque, Opaque, Opaque, ) +OPCODE(BindlessImageSampleExplicitLod, F32x4, U32, Opaque, Opaque, Opaque, ) +OPCODE(BindlessImageSampleDrefImplicitLod, F32, U32, Opaque, F32, Opaque, Opaque, ) +OPCODE(BindlessImageSampleDrefExplicitLod, F32, U32, Opaque, F32, Opaque, Opaque, ) +OPCODE(BindlessImageGather, F32x4, U32, Opaque, Opaque, Opaque, ) +OPCODE(BindlessImageGatherDref, F32x4, U32, Opaque, Opaque, Opaque, F32, ) +OPCODE(BindlessImageFetch, F32x4, U32, Opaque, Opaque, U32, Opaque, ) +OPCODE(BindlessImageQueryDimensions, U32x4, U32, U32, ) +OPCODE(BindlessImageQueryLod, F32x4, U32, Opaque, ) +OPCODE(BindlessImageGradient, F32x4, U32, Opaque, Opaque, Opaque, Opaque, ) +OPCODE(BindlessImageRead, U32x4, U32, Opaque, ) +OPCODE(BindlessImageWrite, Void, U32, Opaque, U32x4, ) + +OPCODE(BoundImageSampleImplicitLod, F32x4, U32, Opaque, Opaque, Opaque, ) +OPCODE(BoundImageSampleExplicitLod, F32x4, U32, Opaque, Opaque, Opaque, ) +OPCODE(BoundImageSampleDrefImplicitLod, F32, U32, Opaque, F32, Opaque, Opaque, ) +OPCODE(BoundImageSampleDrefExplicitLod, F32, U32, Opaque, F32, Opaque, Opaque, ) +OPCODE(BoundImageGather, F32x4, U32, Opaque, Opaque, Opaque, ) +OPCODE(BoundImageGatherDref, F32x4, U32, Opaque, Opaque, Opaque, F32, ) +OPCODE(BoundImageFetch, F32x4, U32, Opaque, Opaque, U32, Opaque, ) +OPCODE(BoundImageQueryDimensions, U32x4, U32, U32, ) +OPCODE(BoundImageQueryLod, F32x4, U32, Opaque, ) +OPCODE(BoundImageGradient, F32x4, U32, Opaque, Opaque, Opaque, Opaque, ) +OPCODE(BoundImageRead, U32x4, U32, Opaque, ) +OPCODE(BoundImageWrite, Void, U32, Opaque, U32x4, ) + +OPCODE(ImageSampleImplicitLod, F32x4, Opaque, Opaque, Opaque, Opaque, ) +OPCODE(ImageSampleExplicitLod, F32x4, Opaque, Opaque, Opaque, Opaque, ) +OPCODE(ImageSampleDrefImplicitLod, F32, Opaque, Opaque, F32, Opaque, Opaque, ) +OPCODE(ImageSampleDrefExplicitLod, F32, Opaque, Opaque, F32, Opaque, Opaque, ) +OPCODE(ImageGather, F32x4, Opaque, Opaque, Opaque, Opaque, ) +OPCODE(ImageGatherDref, F32x4, Opaque, Opaque, Opaque, Opaque, F32, ) +OPCODE(ImageFetch, F32x4, Opaque, Opaque, Opaque, U32, Opaque, ) +OPCODE(ImageQueryDimensions, U32x4, Opaque, U32, ) +OPCODE(ImageQueryLod, F32x4, Opaque, Opaque, ) +OPCODE(ImageGradient, F32x4, Opaque, Opaque, Opaque, Opaque, Opaque, ) +OPCODE(ImageRead, U32x4, Opaque, Opaque, ) +OPCODE(ImageWrite, Void, Opaque, Opaque, U32x4, ) + +// Atomic Image operations + +OPCODE(BindlessImageAtomicIAdd32, U32, U32, Opaque, U32, ) +OPCODE(BindlessImageAtomicSMin32, U32, U32, Opaque, U32, ) +OPCODE(BindlessImageAtomicUMin32, U32, U32, Opaque, U32, ) +OPCODE(BindlessImageAtomicSMax32, U32, U32, Opaque, U32, ) +OPCODE(BindlessImageAtomicUMax32, U32, U32, Opaque, U32, ) +OPCODE(BindlessImageAtomicInc32, U32, U32, Opaque, U32, ) +OPCODE(BindlessImageAtomicDec32, U32, U32, Opaque, U32, ) +OPCODE(BindlessImageAtomicAnd32, U32, U32, Opaque, U32, ) +OPCODE(BindlessImageAtomicOr32, U32, U32, Opaque, U32, ) +OPCODE(BindlessImageAtomicXor32, U32, U32, Opaque, U32, ) +OPCODE(BindlessImageAtomicExchange32, U32, U32, Opaque, U32, ) + +OPCODE(BoundImageAtomicIAdd32, U32, U32, Opaque, U32, ) +OPCODE(BoundImageAtomicSMin32, U32, U32, Opaque, U32, ) +OPCODE(BoundImageAtomicUMin32, U32, U32, Opaque, U32, ) +OPCODE(BoundImageAtomicSMax32, U32, U32, Opaque, U32, ) +OPCODE(BoundImageAtomicUMax32, U32, U32, Opaque, U32, ) +OPCODE(BoundImageAtomicInc32, U32, U32, Opaque, U32, ) +OPCODE(BoundImageAtomicDec32, U32, U32, Opaque, U32, ) +OPCODE(BoundImageAtomicAnd32, U32, U32, Opaque, U32, ) +OPCODE(BoundImageAtomicOr32, U32, U32, Opaque, U32, ) +OPCODE(BoundImageAtomicXor32, U32, U32, Opaque, U32, ) +OPCODE(BoundImageAtomicExchange32, U32, U32, Opaque, U32, ) + +OPCODE(ImageAtomicIAdd32, U32, Opaque, Opaque, U32, ) +OPCODE(ImageAtomicSMin32, U32, Opaque, Opaque, U32, ) +OPCODE(ImageAtomicUMin32, U32, Opaque, Opaque, U32, ) +OPCODE(ImageAtomicSMax32, U32, Opaque, Opaque, U32, ) +OPCODE(ImageAtomicUMax32, U32, Opaque, Opaque, U32, ) +OPCODE(ImageAtomicInc32, U32, Opaque, Opaque, U32, ) +OPCODE(ImageAtomicDec32, U32, Opaque, Opaque, U32, ) +OPCODE(ImageAtomicAnd32, U32, Opaque, Opaque, U32, ) +OPCODE(ImageAtomicOr32, U32, Opaque, Opaque, U32, ) +OPCODE(ImageAtomicXor32, U32, Opaque, Opaque, U32, ) +OPCODE(ImageAtomicExchange32, U32, Opaque, Opaque, U32, ) + +// Warp operations +OPCODE(LaneId, U32, ) +OPCODE(VoteAll, U1, U1, ) +OPCODE(VoteAny, U1, U1, ) +OPCODE(VoteEqual, U1, U1, ) +OPCODE(SubgroupBallot, U32, U1, ) +OPCODE(SubgroupEqMask, U32, ) +OPCODE(SubgroupLtMask, U32, ) +OPCODE(SubgroupLeMask, U32, ) +OPCODE(SubgroupGtMask, U32, ) +OPCODE(SubgroupGeMask, U32, ) +OPCODE(ShuffleIndex, U32, U32, U32, U32, U32, ) +OPCODE(ShuffleUp, U32, U32, U32, U32, U32, ) +OPCODE(ShuffleDown, U32, U32, U32, U32, U32, ) +OPCODE(ShuffleButterfly, U32, U32, U32, U32, U32, ) +OPCODE(FSwizzleAdd, F32, F32, F32, U32, ) +OPCODE(DPdxFine, F32, F32, ) +OPCODE(DPdyFine, F32, F32, ) +OPCODE(DPdxCoarse, F32, F32, ) +OPCODE(DPdyCoarse, F32, F32, ) diff --git a/src/shader_recompiler/frontend/ir/patch.cpp b/src/shader_recompiler/frontend/ir/patch.cpp new file mode 100755 index 000000000..4c956a970 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/patch.cpp @@ -0,0 +1,28 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/patch.h" + +namespace Shader::IR { + +bool IsGeneric(Patch patch) noexcept { + return patch >= Patch::Component0 && patch <= Patch::Component119; +} + +u32 GenericPatchIndex(Patch patch) { + if (!IsGeneric(patch)) { + throw InvalidArgument("Patch {} is not generic", patch); + } + return (static_cast(patch) - static_cast(Patch::Component0)) / 4; +} + +u32 GenericPatchElement(Patch patch) { + if (!IsGeneric(patch)) { + throw InvalidArgument("Patch {} is not generic", patch); + } + return (static_cast(patch) - static_cast(Patch::Component0)) % 4; +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/patch.h b/src/shader_recompiler/frontend/ir/patch.h new file mode 100755 index 000000000..6d66ff0d6 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/patch.h @@ -0,0 +1,149 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/common_types.h" + +namespace Shader::IR { + +enum class Patch : u64 { + TessellationLodLeft, + TessellationLodTop, + TessellationLodRight, + TessellationLodBottom, + TessellationLodInteriorU, + TessellationLodInteriorV, + ComponentPadding0, + ComponentPadding1, + Component0, + Component1, + Component2, + Component3, + Component4, + Component5, + Component6, + Component7, + Component8, + Component9, + Component10, + Component11, + Component12, + Component13, + Component14, + Component15, + Component16, + Component17, + Component18, + Component19, + Component20, + Component21, + Component22, + Component23, + Component24, + Component25, + Component26, + Component27, + Component28, + Component29, + Component30, + Component31, + Component32, + Component33, + Component34, + Component35, + Component36, + Component37, + Component38, + Component39, + Component40, + Component41, + Component42, + Component43, + Component44, + Component45, + Component46, + Component47, + Component48, + Component49, + Component50, + Component51, + Component52, + Component53, + Component54, + Component55, + Component56, + Component57, + Component58, + Component59, + Component60, + Component61, + Component62, + Component63, + Component64, + Component65, + Component66, + Component67, + Component68, + Component69, + Component70, + Component71, + Component72, + Component73, + Component74, + Component75, + Component76, + Component77, + Component78, + Component79, + Component80, + Component81, + Component82, + Component83, + Component84, + Component85, + Component86, + Component87, + Component88, + Component89, + Component90, + Component91, + Component92, + Component93, + Component94, + Component95, + Component96, + Component97, + Component98, + Component99, + Component100, + Component101, + Component102, + Component103, + Component104, + Component105, + Component106, + Component107, + Component108, + Component109, + Component110, + Component111, + Component112, + Component113, + Component114, + Component115, + Component116, + Component117, + Component118, + Component119, +}; +static_assert(static_cast(Patch::Component119) == 127); + +[[nodiscard]] bool IsGeneric(Patch patch) noexcept; + +[[nodiscard]] u32 GenericPatchIndex(Patch patch); + +[[nodiscard]] u32 GenericPatchElement(Patch patch); + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/post_order.cpp b/src/shader_recompiler/frontend/ir/post_order.cpp new file mode 100755 index 000000000..16bc44101 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/post_order.cpp @@ -0,0 +1,46 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include +#include + +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/post_order.h" + +namespace Shader::IR { + +BlockList PostOrder(const AbstractSyntaxNode& root) { + boost::container::small_vector block_stack; + boost::container::flat_set visited; + BlockList post_order_blocks; + + if (root.type != AbstractSyntaxNode::Type::Block) { + throw LogicError("First node in abstract syntax list root is not a block"); + } + Block* const first_block{root.data.block}; + visited.insert(first_block); + block_stack.push_back(first_block); + + while (!block_stack.empty()) { + Block* const block{block_stack.back()}; + const auto visit{[&](Block* branch) { + if (!visited.insert(branch).second) { + return false; + } + // Calling push_back twice is faster than insert on MSVC + block_stack.push_back(block); + block_stack.push_back(branch); + return true; + }}; + block_stack.pop_back(); + if (std::ranges::none_of(block->ImmSuccessors(), visit)) { + post_order_blocks.push_back(block); + } + } + return post_order_blocks; +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/post_order.h b/src/shader_recompiler/frontend/ir/post_order.h new file mode 100755 index 000000000..07bfbadc3 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/post_order.h @@ -0,0 +1,14 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "shader_recompiler/frontend/ir/abstract_syntax_list.h" +#include "shader_recompiler/frontend/ir/basic_block.h" + +namespace Shader::IR { + +BlockList PostOrder(const AbstractSyntaxNode& root); + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/pred.h b/src/shader_recompiler/frontend/ir/pred.h new file mode 100755 index 000000000..4e7f32423 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/pred.h @@ -0,0 +1,44 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +namespace Shader::IR { + +enum class Pred : u64 { + P0, + P1, + P2, + P3, + P4, + P5, + P6, + PT, +}; + +constexpr size_t NUM_USER_PREDS = 7; +constexpr size_t NUM_PREDS = 8; + +[[nodiscard]] constexpr size_t PredIndex(Pred pred) noexcept { + return static_cast(pred); +} + +} // namespace Shader::IR + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::IR::Pred& pred, FormatContext& ctx) { + if (pred == Shader::IR::Pred::PT) { + return fmt::format_to(ctx.out(), "PT"); + } else { + return fmt::format_to(ctx.out(), "P{}", static_cast(pred)); + } + } +}; diff --git a/src/shader_recompiler/frontend/ir/program.cpp b/src/shader_recompiler/frontend/ir/program.cpp new file mode 100755 index 000000000..3fc06f855 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/program.cpp @@ -0,0 +1,32 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include + +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::IR { + +std::string DumpProgram(const Program& program) { + size_t index{0}; + std::map inst_to_index; + std::map block_to_index; + + for (const IR::Block* const block : program.blocks) { + block_to_index.emplace(block, index); + ++index; + } + std::string ret; + for (const auto& block : program.blocks) { + ret += IR::DumpBlock(*block, block_to_index, inst_to_index, index) + '\n'; + } + return ret; +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/program.h b/src/shader_recompiler/frontend/ir/program.h new file mode 100755 index 000000000..ebcaa8bc2 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/program.h @@ -0,0 +1,35 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include "shader_recompiler/frontend/ir/abstract_syntax_list.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/program_header.h" +#include "shader_recompiler/shader_info.h" +#include "shader_recompiler/stage.h" + +namespace Shader::IR { + +struct Program { + AbstractSyntaxList syntax_list; + BlockList blocks; + BlockList post_order_blocks; + Info info; + Stage stage{}; + std::array workgroup_size{}; + OutputTopology output_topology{}; + u32 output_vertices{}; + u32 invocations{}; + u32 local_memory_size{}; + u32 shared_memory_size{}; + bool is_geometry_passthrough{}; +}; + +[[nodiscard]] std::string DumpProgram(const Program& program); + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/reg.h b/src/shader_recompiler/frontend/ir/reg.h new file mode 100755 index 000000000..a4b635792 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/reg.h @@ -0,0 +1,332 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" + +namespace Shader::IR { + +enum class Reg : u64 { + R0, + R1, + R2, + R3, + R4, + R5, + R6, + R7, + R8, + R9, + R10, + R11, + R12, + R13, + R14, + R15, + R16, + R17, + R18, + R19, + R20, + R21, + R22, + R23, + R24, + R25, + R26, + R27, + R28, + R29, + R30, + R31, + R32, + R33, + R34, + R35, + R36, + R37, + R38, + R39, + R40, + R41, + R42, + R43, + R44, + R45, + R46, + R47, + R48, + R49, + R50, + R51, + R52, + R53, + R54, + R55, + R56, + R57, + R58, + R59, + R60, + R61, + R62, + R63, + R64, + R65, + R66, + R67, + R68, + R69, + R70, + R71, + R72, + R73, + R74, + R75, + R76, + R77, + R78, + R79, + R80, + R81, + R82, + R83, + R84, + R85, + R86, + R87, + R88, + R89, + R90, + R91, + R92, + R93, + R94, + R95, + R96, + R97, + R98, + R99, + R100, + R101, + R102, + R103, + R104, + R105, + R106, + R107, + R108, + R109, + R110, + R111, + R112, + R113, + R114, + R115, + R116, + R117, + R118, + R119, + R120, + R121, + R122, + R123, + R124, + R125, + R126, + R127, + R128, + R129, + R130, + R131, + R132, + R133, + R134, + R135, + R136, + R137, + R138, + R139, + R140, + R141, + R142, + R143, + R144, + R145, + R146, + R147, + R148, + R149, + R150, + R151, + R152, + R153, + R154, + R155, + R156, + R157, + R158, + R159, + R160, + R161, + R162, + R163, + R164, + R165, + R166, + R167, + R168, + R169, + R170, + R171, + R172, + R173, + R174, + R175, + R176, + R177, + R178, + R179, + R180, + R181, + R182, + R183, + R184, + R185, + R186, + R187, + R188, + R189, + R190, + R191, + R192, + R193, + R194, + R195, + R196, + R197, + R198, + R199, + R200, + R201, + R202, + R203, + R204, + R205, + R206, + R207, + R208, + R209, + R210, + R211, + R212, + R213, + R214, + R215, + R216, + R217, + R218, + R219, + R220, + R221, + R222, + R223, + R224, + R225, + R226, + R227, + R228, + R229, + R230, + R231, + R232, + R233, + R234, + R235, + R236, + R237, + R238, + R239, + R240, + R241, + R242, + R243, + R244, + R245, + R246, + R247, + R248, + R249, + R250, + R251, + R252, + R253, + R254, + RZ, +}; +static_assert(static_cast(Reg::RZ) == 255); + +constexpr size_t NUM_USER_REGS = 255; +constexpr size_t NUM_REGS = 256; + +[[nodiscard]] constexpr Reg operator+(Reg reg, int num) { + if (reg == Reg::RZ) { + // Adding or subtracting registers from RZ yields RZ + return Reg::RZ; + } + const int result{static_cast(reg) + num}; + if (result >= static_cast(Reg::RZ)) { + throw LogicError("Overflow on register arithmetic"); + } + if (result < 0) { + throw LogicError("Underflow on register arithmetic"); + } + return static_cast(result); +} + +[[nodiscard]] constexpr Reg operator-(Reg reg, int num) { + return reg + (-num); +} + +constexpr Reg operator++(Reg& reg) { + reg = reg + 1; + return reg; +} + +constexpr Reg operator++(Reg& reg, int) { + const Reg copy{reg}; + reg = reg + 1; + return copy; +} + +[[nodiscard]] constexpr size_t RegIndex(Reg reg) noexcept { + return static_cast(reg); +} + +[[nodiscard]] constexpr bool IsAligned(Reg reg, size_t align) { + return RegIndex(reg) % align == 0 || reg == Reg::RZ; +} + +} // namespace Shader::IR + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::IR::Reg& reg, FormatContext& ctx) { + if (reg == Shader::IR::Reg::RZ) { + return fmt::format_to(ctx.out(), "RZ"); + } else if (static_cast(reg) >= 0 && static_cast(reg) < 255) { + return fmt::format_to(ctx.out(), "R{}", static_cast(reg)); + } else { + throw Shader::LogicError("Invalid register with raw value {}", static_cast(reg)); + } + } +}; diff --git a/src/shader_recompiler/frontend/ir/type.cpp b/src/shader_recompiler/frontend/ir/type.cpp new file mode 100755 index 000000000..f28341bfe --- /dev/null +++ b/src/shader_recompiler/frontend/ir/type.cpp @@ -0,0 +1,38 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include "shader_recompiler/frontend/ir/type.h" + +namespace Shader::IR { + +std::string NameOf(Type type) { + static constexpr std::array names{ + "Opaque", "Label", "Reg", "Pred", "Attribute", "U1", "U8", "U16", "U32", + "U64", "F16", "F32", "F64", "U32x2", "U32x3", "U32x4", "F16x2", "F16x3", + "F16x4", "F32x2", "F32x3", "F32x4", "F64x2", "F64x3", "F64x4", + }; + const size_t bits{static_cast(type)}; + if (bits == 0) { + return "Void"; + } + std::string result; + for (size_t i = 0; i < names.size(); i++) { + if ((bits & (size_t{1} << i)) != 0) { + if (!result.empty()) { + result += '|'; + } + result += names[i]; + } + } + return result; +} + +bool AreTypesCompatible(Type lhs, Type rhs) noexcept { + return lhs == rhs || lhs == Type::Opaque || rhs == Type::Opaque; +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/type.h b/src/shader_recompiler/frontend/ir/type.h new file mode 100755 index 000000000..294b230c4 --- /dev/null +++ b/src/shader_recompiler/frontend/ir/type.h @@ -0,0 +1,61 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include + +#include "common/common_funcs.h" +#include "shader_recompiler/exception.h" + +namespace Shader::IR { + +enum class Type { + Void = 0, + Opaque = 1 << 0, + Reg = 1 << 1, + Pred = 1 << 2, + Attribute = 1 << 3, + Patch = 1 << 4, + U1 = 1 << 5, + U8 = 1 << 6, + U16 = 1 << 7, + U32 = 1 << 8, + U64 = 1 << 9, + F16 = 1 << 10, + F32 = 1 << 11, + F64 = 1 << 12, + U32x2 = 1 << 13, + U32x3 = 1 << 14, + U32x4 = 1 << 15, + F16x2 = 1 << 16, + F16x3 = 1 << 17, + F16x4 = 1 << 18, + F32x2 = 1 << 19, + F32x3 = 1 << 20, + F32x4 = 1 << 21, + F64x2 = 1 << 22, + F64x3 = 1 << 23, + F64x4 = 1 << 24, +}; +DECLARE_ENUM_FLAG_OPERATORS(Type) + +[[nodiscard]] std::string NameOf(Type type); + +[[nodiscard]] bool AreTypesCompatible(Type lhs, Type rhs) noexcept; + +} // namespace Shader::IR + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::IR::Type& type, FormatContext& ctx) { + return fmt::format_to(ctx.out(), "{}", NameOf(type)); + } +}; diff --git a/src/shader_recompiler/frontend/ir/value.cpp b/src/shader_recompiler/frontend/ir/value.cpp new file mode 100755 index 000000000..d365ea1bc --- /dev/null +++ b/src/shader_recompiler/frontend/ir/value.cpp @@ -0,0 +1,99 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/frontend/ir/opcodes.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::IR { + +Value::Value(IR::Inst* value) noexcept : type{Type::Opaque}, inst{value} {} + +Value::Value(IR::Reg value) noexcept : type{Type::Reg}, reg{value} {} + +Value::Value(IR::Pred value) noexcept : type{Type::Pred}, pred{value} {} + +Value::Value(IR::Attribute value) noexcept : type{Type::Attribute}, attribute{value} {} + +Value::Value(IR::Patch value) noexcept : type{Type::Patch}, patch{value} {} + +Value::Value(bool value) noexcept : type{Type::U1}, imm_u1{value} {} + +Value::Value(u8 value) noexcept : type{Type::U8}, imm_u8{value} {} + +Value::Value(u16 value) noexcept : type{Type::U16}, imm_u16{value} {} + +Value::Value(u32 value) noexcept : type{Type::U32}, imm_u32{value} {} + +Value::Value(f32 value) noexcept : type{Type::F32}, imm_f32{value} {} + +Value::Value(u64 value) noexcept : type{Type::U64}, imm_u64{value} {} + +Value::Value(f64 value) noexcept : type{Type::F64}, imm_f64{value} {} + +IR::Type Value::Type() const noexcept { + if (IsPhi()) { + // The type of a phi node is stored in its flags + return inst->Flags(); + } + if (IsIdentity()) { + return inst->Arg(0).Type(); + } + if (type == Type::Opaque) { + return inst->Type(); + } + return type; +} + +bool Value::operator==(const Value& other) const { + if (type != other.type) { + return false; + } + switch (type) { + case Type::Void: + return true; + case Type::Opaque: + return inst == other.inst; + case Type::Reg: + return reg == other.reg; + case Type::Pred: + return pred == other.pred; + case Type::Attribute: + return attribute == other.attribute; + case Type::Patch: + return patch == other.patch; + case Type::U1: + return imm_u1 == other.imm_u1; + case Type::U8: + return imm_u8 == other.imm_u8; + case Type::U16: + case Type::F16: + return imm_u16 == other.imm_u16; + case Type::U32: + case Type::F32: + return imm_u32 == other.imm_u32; + case Type::U64: + case Type::F64: + return imm_u64 == other.imm_u64; + case Type::U32x2: + case Type::U32x3: + case Type::U32x4: + case Type::F16x2: + case Type::F16x3: + case Type::F16x4: + case Type::F32x2: + case Type::F32x3: + case Type::F32x4: + case Type::F64x2: + case Type::F64x3: + case Type::F64x4: + break; + } + throw LogicError("Invalid type {}", type); +} + +bool Value::operator!=(const Value& other) const { + return !operator==(other); +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/ir/value.h b/src/shader_recompiler/frontend/ir/value.h new file mode 100755 index 000000000..0c6bf684d --- /dev/null +++ b/src/shader_recompiler/frontend/ir/value.h @@ -0,0 +1,398 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "common/assert.h" +#include "common/bit_cast.h" +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/attribute.h" +#include "shader_recompiler/frontend/ir/opcodes.h" +#include "shader_recompiler/frontend/ir/patch.h" +#include "shader_recompiler/frontend/ir/pred.h" +#include "shader_recompiler/frontend/ir/reg.h" +#include "shader_recompiler/frontend/ir/type.h" +#include "shader_recompiler/frontend/ir/value.h" + +namespace Shader::IR { + +class Block; +class Inst; + +struct AssociatedInsts; + +class Value { +public: + Value() noexcept = default; + explicit Value(IR::Inst* value) noexcept; + explicit Value(IR::Reg value) noexcept; + explicit Value(IR::Pred value) noexcept; + explicit Value(IR::Attribute value) noexcept; + explicit Value(IR::Patch value) noexcept; + explicit Value(bool value) noexcept; + explicit Value(u8 value) noexcept; + explicit Value(u16 value) noexcept; + explicit Value(u32 value) noexcept; + explicit Value(f32 value) noexcept; + explicit Value(u64 value) noexcept; + explicit Value(f64 value) noexcept; + + [[nodiscard]] bool IsIdentity() const noexcept; + [[nodiscard]] bool IsPhi() const noexcept; + [[nodiscard]] bool IsEmpty() const noexcept; + [[nodiscard]] bool IsImmediate() const noexcept; + [[nodiscard]] IR::Type Type() const noexcept; + + [[nodiscard]] IR::Inst* Inst() const; + [[nodiscard]] IR::Inst* InstRecursive() const; + [[nodiscard]] IR::Value Resolve() const; + [[nodiscard]] IR::Reg Reg() const; + [[nodiscard]] IR::Pred Pred() const; + [[nodiscard]] IR::Attribute Attribute() const; + [[nodiscard]] IR::Patch Patch() const; + [[nodiscard]] bool U1() const; + [[nodiscard]] u8 U8() const; + [[nodiscard]] u16 U16() const; + [[nodiscard]] u32 U32() const; + [[nodiscard]] f32 F32() const; + [[nodiscard]] u64 U64() const; + [[nodiscard]] f64 F64() const; + + [[nodiscard]] bool operator==(const Value& other) const; + [[nodiscard]] bool operator!=(const Value& other) const; + +private: + IR::Type type{}; + union { + IR::Inst* inst{}; + IR::Reg reg; + IR::Pred pred; + IR::Attribute attribute; + IR::Patch patch; + bool imm_u1; + u8 imm_u8; + u16 imm_u16; + u32 imm_u32; + f32 imm_f32; + u64 imm_u64; + f64 imm_f64; + }; +}; +static_assert(static_cast(IR::Type::Void) == 0, "memset relies on IR::Type being zero"); +static_assert(std::is_trivially_copyable_v); + +template +class TypedValue : public Value { +public: + TypedValue() = default; + + template + requires((other_type & type_) != IR::Type::Void) explicit(false) + TypedValue(const TypedValue& value) + : Value(value) {} + + explicit TypedValue(const Value& value) : Value(value) { + if ((value.Type() & type_) == IR::Type::Void) { + throw InvalidArgument("Incompatible types {} and {}", type_, value.Type()); + } + } + + explicit TypedValue(IR::Inst* inst_) : TypedValue(Value(inst_)) {} +}; + +class Inst : public boost::intrusive::list_base_hook<> { +public: + explicit Inst(IR::Opcode op_, u32 flags_) noexcept; + ~Inst(); + + Inst& operator=(const Inst&) = delete; + Inst(const Inst&) = delete; + + Inst& operator=(Inst&&) = delete; + Inst(Inst&&) = delete; + + /// Get the number of uses this instruction has. + [[nodiscard]] int UseCount() const noexcept { + return use_count; + } + + /// Determines whether this instruction has uses or not. + [[nodiscard]] bool HasUses() const noexcept { + return use_count > 0; + } + + /// Get the opcode this microinstruction represents. + [[nodiscard]] IR::Opcode GetOpcode() const noexcept { + return op; + } + + /// Determines if there is a pseudo-operation associated with this instruction. + [[nodiscard]] bool HasAssociatedPseudoOperation() const noexcept { + return associated_insts != nullptr; + } + + /// Determines whether or not this instruction may have side effects. + [[nodiscard]] bool MayHaveSideEffects() const noexcept; + + /// Determines whether or not this instruction is a pseudo-instruction. + /// Pseudo-instructions depend on their parent instructions for their semantics. + [[nodiscard]] bool IsPseudoInstruction() const noexcept; + + /// Determines if all arguments of this instruction are immediates. + [[nodiscard]] bool AreAllArgsImmediates() const; + + /// Gets a pseudo-operation associated with this instruction + [[nodiscard]] Inst* GetAssociatedPseudoOperation(IR::Opcode opcode); + + /// Get the type this instruction returns. + [[nodiscard]] IR::Type Type() const; + + /// Get the number of arguments this instruction has. + [[nodiscard]] size_t NumArgs() const { + return op == IR::Opcode::Phi ? phi_args.size() : NumArgsOf(op); + } + + /// Get the value of a given argument index. + [[nodiscard]] Value Arg(size_t index) const noexcept { + if (op == IR::Opcode::Phi) { + return phi_args[index].second; + } else { + return args[index]; + } + } + + /// Set the value of a given argument index. + void SetArg(size_t index, Value value); + + /// Get a pointer to the block of a phi argument. + [[nodiscard]] Block* PhiBlock(size_t index) const; + /// Add phi operand to a phi instruction. + void AddPhiOperand(Block* predecessor, const Value& value); + + void Invalidate(); + void ClearArgs(); + + void ReplaceUsesWith(Value replacement); + + void ReplaceOpcode(IR::Opcode opcode); + + template + requires(sizeof(FlagsType) <= sizeof(u32) && std::is_trivially_copyable_v) + [[nodiscard]] FlagsType Flags() const noexcept { + FlagsType ret; + std::memcpy(reinterpret_cast(&ret), &flags, sizeof(ret)); + return ret; + } + + template + requires(sizeof(FlagsType) <= sizeof(u32) && std::is_trivially_copyable_v) + [[nodiscard]] void SetFlags(FlagsType value) noexcept { + std::memcpy(&flags, &value, sizeof(value)); + } + + /// Intrusively store the host definition of this instruction. + template + void SetDefinition(DefinitionType def) { + definition = Common::BitCast(def); + } + + /// Return the intrusively stored host definition of this instruction. + template + [[nodiscard]] DefinitionType Definition() const noexcept { + return Common::BitCast(definition); + } + + /// Destructively remove one reference count from the instruction + /// Useful for register allocation + void DestructiveRemoveUsage() { + --use_count; + } + + /// Destructively add usages to the instruction + /// Useful for register allocation + void DestructiveAddUsage(int count) { + use_count += count; + } + +private: + struct NonTriviallyDummy { + NonTriviallyDummy() noexcept {} + }; + + void Use(const Value& value); + void UndoUse(const Value& value); + + IR::Opcode op{}; + int use_count{}; + u32 flags{}; + u32 definition{}; + union { + NonTriviallyDummy dummy{}; + boost::container::small_vector, 2> phi_args; + std::array args; + }; + std::unique_ptr associated_insts; +}; +static_assert(sizeof(Inst) <= 128, "Inst size unintentionally increased"); + +struct AssociatedInsts { + union { + Inst* in_bounds_inst; + Inst* sparse_inst; + Inst* zero_inst{}; + }; + Inst* sign_inst{}; + Inst* carry_inst{}; + Inst* overflow_inst{}; +}; + +using U1 = TypedValue; +using U8 = TypedValue; +using U16 = TypedValue; +using U32 = TypedValue; +using U64 = TypedValue; +using F16 = TypedValue; +using F32 = TypedValue; +using F64 = TypedValue; +using U32U64 = TypedValue; +using F32F64 = TypedValue; +using U16U32U64 = TypedValue; +using F16F32F64 = TypedValue; +using UAny = TypedValue; + +inline bool Value::IsIdentity() const noexcept { + return type == Type::Opaque && inst->GetOpcode() == Opcode::Identity; +} + +inline bool Value::IsPhi() const noexcept { + return type == Type::Opaque && inst->GetOpcode() == Opcode::Phi; +} + +inline bool Value::IsEmpty() const noexcept { + return type == Type::Void; +} + +inline bool Value::IsImmediate() const noexcept { + IR::Type current_type{type}; + const IR::Inst* current_inst{inst}; + while (current_type == Type::Opaque && current_inst->GetOpcode() == Opcode::Identity) { + const Value& arg{current_inst->Arg(0)}; + current_type = arg.type; + current_inst = arg.inst; + } + return current_type != Type::Opaque; +} + +inline IR::Inst* Value::Inst() const { + DEBUG_ASSERT(type == Type::Opaque); + return inst; +} + +inline IR::Inst* Value::InstRecursive() const { + DEBUG_ASSERT(type == Type::Opaque); + if (IsIdentity()) { + return inst->Arg(0).InstRecursive(); + } + return inst; +} + +inline IR::Value Value::Resolve() const { + if (IsIdentity()) { + return inst->Arg(0).Resolve(); + } + return *this; +} + +inline IR::Reg Value::Reg() const { + DEBUG_ASSERT(type == Type::Reg); + return reg; +} + +inline IR::Pred Value::Pred() const { + DEBUG_ASSERT(type == Type::Pred); + return pred; +} + +inline IR::Attribute Value::Attribute() const { + DEBUG_ASSERT(type == Type::Attribute); + return attribute; +} + +inline IR::Patch Value::Patch() const { + DEBUG_ASSERT(type == Type::Patch); + return patch; +} + +inline bool Value::U1() const { + if (IsIdentity()) { + return inst->Arg(0).U1(); + } + DEBUG_ASSERT(type == Type::U1); + return imm_u1; +} + +inline u8 Value::U8() const { + if (IsIdentity()) { + return inst->Arg(0).U8(); + } + DEBUG_ASSERT(type == Type::U8); + return imm_u8; +} + +inline u16 Value::U16() const { + if (IsIdentity()) { + return inst->Arg(0).U16(); + } + DEBUG_ASSERT(type == Type::U16); + return imm_u16; +} + +inline u32 Value::U32() const { + if (IsIdentity()) { + return inst->Arg(0).U32(); + } + DEBUG_ASSERT(type == Type::U32); + return imm_u32; +} + +inline f32 Value::F32() const { + if (IsIdentity()) { + return inst->Arg(0).F32(); + } + DEBUG_ASSERT(type == Type::F32); + return imm_f32; +} + +inline u64 Value::U64() const { + if (IsIdentity()) { + return inst->Arg(0).U64(); + } + DEBUG_ASSERT(type == Type::U64); + return imm_u64; +} + +inline f64 Value::F64() const { + if (IsIdentity()) { + return inst->Arg(0).F64(); + } + DEBUG_ASSERT(type == Type::F64); + return imm_f64; +} + +[[nodiscard]] inline bool IsPhi(const Inst& inst) { + return inst.GetOpcode() == Opcode::Phi; +} + +} // namespace Shader::IR diff --git a/src/shader_recompiler/frontend/maxwell/control_flow.cpp b/src/shader_recompiler/frontend/maxwell/control_flow.cpp new file mode 100755 index 000000000..e7abea82f --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/control_flow.cpp @@ -0,0 +1,643 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include +#include + +#include + +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/control_flow.h" +#include "shader_recompiler/frontend/maxwell/decode.h" +#include "shader_recompiler/frontend/maxwell/indirect_branch_table_track.h" +#include "shader_recompiler/frontend/maxwell/location.h" + +namespace Shader::Maxwell::Flow { +namespace { +struct Compare { + bool operator()(const Block& lhs, Location rhs) const noexcept { + return lhs.begin < rhs; + } + + bool operator()(Location lhs, const Block& rhs) const noexcept { + return lhs < rhs.begin; + } + + bool operator()(const Block& lhs, const Block& rhs) const noexcept { + return lhs.begin < rhs.begin; + } +}; + +u32 BranchOffset(Location pc, Instruction inst) { + return pc.Offset() + static_cast(inst.branch.Offset()) + 8u; +} + +void Split(Block* old_block, Block* new_block, Location pc) { + if (pc <= old_block->begin || pc >= old_block->end) { + throw InvalidArgument("Invalid address to split={}", pc); + } + *new_block = Block{}; + new_block->begin = pc; + new_block->end = old_block->end; + new_block->end_class = old_block->end_class; + new_block->cond = old_block->cond; + new_block->stack = old_block->stack; + new_block->branch_true = old_block->branch_true; + new_block->branch_false = old_block->branch_false; + new_block->function_call = old_block->function_call; + new_block->return_block = old_block->return_block; + new_block->branch_reg = old_block->branch_reg; + new_block->branch_offset = old_block->branch_offset; + new_block->indirect_branches = std::move(old_block->indirect_branches); + + const Location old_begin{old_block->begin}; + Stack old_stack{std::move(old_block->stack)}; + *old_block = Block{}; + old_block->begin = old_begin; + old_block->end = pc; + old_block->end_class = EndClass::Branch; + old_block->cond = IR::Condition(true); + old_block->stack = old_stack; + old_block->branch_true = new_block; + old_block->branch_false = nullptr; +} + +Token OpcodeToken(Opcode opcode) { + switch (opcode) { + case Opcode::PBK: + case Opcode::BRK: + return Token::PBK; + case Opcode::PCNT: + case Opcode::CONT: + return Token::PBK; + case Opcode::PEXIT: + case Opcode::EXIT: + return Token::PEXIT; + case Opcode::PLONGJMP: + case Opcode::LONGJMP: + return Token::PLONGJMP; + case Opcode::PRET: + case Opcode::RET: + case Opcode::CAL: + return Token::PRET; + case Opcode::SSY: + case Opcode::SYNC: + return Token::SSY; + default: + throw InvalidArgument("{}", opcode); + } +} + +bool IsAbsoluteJump(Opcode opcode) { + switch (opcode) { + case Opcode::JCAL: + case Opcode::JMP: + case Opcode::JMX: + return true; + default: + return false; + } +} + +bool HasFlowTest(Opcode opcode) { + switch (opcode) { + case Opcode::BRA: + case Opcode::BRX: + case Opcode::EXIT: + case Opcode::JMP: + case Opcode::JMX: + case Opcode::KIL: + case Opcode::BRK: + case Opcode::CONT: + case Opcode::LONGJMP: + case Opcode::RET: + case Opcode::SYNC: + return true; + case Opcode::CAL: + case Opcode::JCAL: + return false; + default: + throw InvalidArgument("Invalid branch {}", opcode); + } +} + +std::string NameOf(const Block& block) { + if (block.begin.IsVirtual()) { + return fmt::format("\"Virtual {}\"", block.begin); + } else { + return fmt::format("\"{}\"", block.begin); + } +} +} // Anonymous namespace + +void Stack::Push(Token token, Location target) { + entries.push_back({ + .token = token, + .target{target}, + }); +} + +std::pair Stack::Pop(Token token) const { + const std::optional pc{Peek(token)}; + if (!pc) { + throw LogicError("Token could not be found"); + } + return {*pc, Remove(token)}; +} + +std::optional Stack::Peek(Token token) const { + const auto reverse_entries{entries | std::views::reverse}; + const auto it{std::ranges::find(reverse_entries, token, &StackEntry::token)}; + if (it == reverse_entries.end()) { + return std::nullopt; + } + return it->target; +} + +Stack Stack::Remove(Token token) const { + const auto reverse_entries{entries | std::views::reverse}; + const auto it{std::ranges::find(reverse_entries, token, &StackEntry::token)}; + const auto pos{std::distance(reverse_entries.begin(), it)}; + Stack result; + result.entries.insert(result.entries.end(), entries.begin(), entries.end() - pos - 1); + return result; +} + +bool Block::Contains(Location pc) const noexcept { + return pc >= begin && pc < end; +} + +Function::Function(ObjectPool& block_pool, Location start_address) + : entrypoint{start_address} { + Label& label{labels.emplace_back()}; + label.address = start_address; + label.block = block_pool.Create(Block{}); + label.block->begin = start_address; + label.block->end = start_address; + label.block->end_class = EndClass::Branch; + label.block->cond = IR::Condition(true); + label.block->branch_true = nullptr; + label.block->branch_false = nullptr; +} + +CFG::CFG(Environment& env_, ObjectPool& block_pool_, Location start_address, + bool exits_to_dispatcher_) + : env{env_}, block_pool{block_pool_}, program_start{start_address}, exits_to_dispatcher{ + exits_to_dispatcher_} { + if (exits_to_dispatcher) { + dispatch_block = block_pool.Create(Block{}); + dispatch_block->begin = {}; + dispatch_block->end = {}; + dispatch_block->end_class = EndClass::Exit; + dispatch_block->cond = IR::Condition(true); + dispatch_block->stack = {}; + dispatch_block->branch_true = nullptr; + dispatch_block->branch_false = nullptr; + } + functions.emplace_back(block_pool, start_address); + for (FunctionId function_id = 0; function_id < functions.size(); ++function_id) { + while (!functions[function_id].labels.empty()) { + Function& function{functions[function_id]}; + Label label{function.labels.back()}; + function.labels.pop_back(); + AnalyzeLabel(function_id, label); + } + } + if (exits_to_dispatcher) { + const auto last_block{functions[0].blocks.rbegin()}; + dispatch_block->begin = last_block->end + 1; + dispatch_block->end = last_block->end + 1; + functions[0].blocks.insert(*dispatch_block); + } +} + +void CFG::AnalyzeLabel(FunctionId function_id, Label& label) { + if (InspectVisitedBlocks(function_id, label)) { + // Label address has been visited + return; + } + // Try to find the next block + Function* const function{&functions[function_id]}; + Location pc{label.address}; + const auto next_it{function->blocks.upper_bound(pc, Compare{})}; + const bool is_last{next_it == function->blocks.end()}; + Block* const next{is_last ? nullptr : &*next_it}; + // Insert before the next block + Block* const block{label.block}; + // Analyze instructions until it reaches an already visited block or there's a branch + bool is_branch{false}; + while (!next || pc < next->begin) { + is_branch = AnalyzeInst(block, function_id, pc) == AnalysisState::Branch; + if (is_branch) { + break; + } + ++pc; + } + if (!is_branch) { + // If the block finished without a branch, + // it means that the next instruction is already visited, jump to it + block->end = pc; + block->cond = IR::Condition{true}; + block->branch_true = next; + block->branch_false = nullptr; + } + // Function's pointer might be invalid, resolve it again + // Insert the new block + functions[function_id].blocks.insert(*block); +} + +bool CFG::InspectVisitedBlocks(FunctionId function_id, const Label& label) { + const Location pc{label.address}; + Function& function{functions[function_id]}; + const auto it{ + std::ranges::find_if(function.blocks, [pc](auto& block) { return block.Contains(pc); })}; + if (it == function.blocks.end()) { + // Address has not been visited + return false; + } + Block* const visited_block{&*it}; + if (visited_block->begin == pc) { + throw LogicError("Dangling block"); + } + Block* const new_block{label.block}; + Split(visited_block, new_block, pc); + function.blocks.insert(it, *new_block); + return true; +} + +CFG::AnalysisState CFG::AnalyzeInst(Block* block, FunctionId function_id, Location pc) { + const Instruction inst{env.ReadInstruction(pc.Offset())}; + const Opcode opcode{Decode(inst.raw)}; + switch (opcode) { + case Opcode::BRA: + case Opcode::JMP: + case Opcode::RET: + if (!AnalyzeBranch(block, function_id, pc, inst, opcode)) { + return AnalysisState::Continue; + } + switch (opcode) { + case Opcode::BRA: + case Opcode::JMP: + AnalyzeBRA(block, function_id, pc, inst, IsAbsoluteJump(opcode)); + break; + case Opcode::RET: + block->end_class = EndClass::Return; + break; + default: + break; + } + block->end = pc; + return AnalysisState::Branch; + case Opcode::BRK: + case Opcode::CONT: + case Opcode::LONGJMP: + case Opcode::SYNC: { + if (!AnalyzeBranch(block, function_id, pc, inst, opcode)) { + return AnalysisState::Continue; + } + const auto [stack_pc, new_stack]{block->stack.Pop(OpcodeToken(opcode))}; + block->branch_true = AddLabel(block, new_stack, stack_pc, function_id); + block->end = pc; + return AnalysisState::Branch; + } + case Opcode::KIL: { + const Predicate pred{inst.Pred()}; + const auto ir_pred{static_cast(pred.index)}; + const IR::Condition cond{inst.branch.flow_test, ir_pred, pred.negated}; + AnalyzeCondInst(block, function_id, pc, EndClass::Kill, cond); + return AnalysisState::Branch; + } + case Opcode::PBK: + case Opcode::PCNT: + case Opcode::PEXIT: + case Opcode::PLONGJMP: + case Opcode::SSY: + block->stack.Push(OpcodeToken(opcode), BranchOffset(pc, inst)); + return AnalysisState::Continue; + case Opcode::BRX: + case Opcode::JMX: + return AnalyzeBRX(block, pc, inst, IsAbsoluteJump(opcode), function_id); + case Opcode::EXIT: + return AnalyzeEXIT(block, function_id, pc, inst); + case Opcode::PRET: + throw NotImplementedException("PRET flow analysis"); + case Opcode::CAL: + case Opcode::JCAL: { + const bool is_absolute{IsAbsoluteJump(opcode)}; + const Location cal_pc{is_absolute ? inst.branch.Absolute() : BranchOffset(pc, inst)}; + // Technically CAL pushes into PRET, but that's implicit in the function call for us + // Insert the function into the list if it doesn't exist + const auto it{std::ranges::find(functions, cal_pc, &Function::entrypoint)}; + const bool exists{it != functions.end()}; + const FunctionId call_id{exists ? static_cast(std::distance(functions.begin(), it)) + : functions.size()}; + if (!exists) { + functions.emplace_back(block_pool, cal_pc); + } + block->end_class = EndClass::Call; + block->function_call = call_id; + block->return_block = AddLabel(block, block->stack, pc + 1, function_id); + block->end = pc; + return AnalysisState::Branch; + } + default: + break; + } + const Predicate pred{inst.Pred()}; + if (pred == Predicate{true} || pred == Predicate{false}) { + return AnalysisState::Continue; + } + const IR::Condition cond{static_cast(pred.index), pred.negated}; + AnalyzeCondInst(block, function_id, pc, EndClass::Branch, cond); + return AnalysisState::Branch; +} + +void CFG::AnalyzeCondInst(Block* block, FunctionId function_id, Location pc, + EndClass insn_end_class, IR::Condition cond) { + if (block->begin != pc) { + // If the block doesn't start in the conditional instruction + // mark it as a label to visit it later + block->end = pc; + block->cond = IR::Condition{true}; + block->branch_true = AddLabel(block, block->stack, pc, function_id); + block->branch_false = nullptr; + return; + } + // Create a virtual block and a conditional block + Block* const conditional_block{block_pool.Create()}; + Block virtual_block{}; + virtual_block.begin = block->begin.Virtual(); + virtual_block.end = block->begin.Virtual(); + virtual_block.end_class = EndClass::Branch; + virtual_block.stack = block->stack; + virtual_block.cond = cond; + virtual_block.branch_true = conditional_block; + virtual_block.branch_false = nullptr; + // Save the contents of the visited block in the conditional block + *conditional_block = std::move(*block); + // Impersonate the visited block with a virtual block + *block = std::move(virtual_block); + // Set the end properties of the conditional instruction + conditional_block->end = pc + 1; + conditional_block->end_class = insn_end_class; + // Add a label to the instruction after the conditional instruction + Block* const endif_block{AddLabel(conditional_block, block->stack, pc + 1, function_id)}; + // Branch to the next instruction from the virtual block + block->branch_false = endif_block; + // And branch to it from the conditional instruction if it is a branch or a kill instruction + // Kill instructions are considered a branch because they demote to a helper invocation and + // execution may continue. + if (insn_end_class == EndClass::Branch || insn_end_class == EndClass::Kill) { + conditional_block->cond = IR::Condition{true}; + conditional_block->branch_true = endif_block; + conditional_block->branch_false = nullptr; + } + // Finally insert the condition block into the list of blocks + functions[function_id].blocks.insert(*conditional_block); +} + +bool CFG::AnalyzeBranch(Block* block, FunctionId function_id, Location pc, Instruction inst, + Opcode opcode) { + if (inst.branch.is_cbuf) { + throw NotImplementedException("Branch with constant buffer offset"); + } + const Predicate pred{inst.Pred()}; + if (pred == Predicate{false}) { + return false; + } + const bool has_flow_test{HasFlowTest(opcode)}; + const IR::FlowTest flow_test{has_flow_test ? inst.branch.flow_test.Value() : IR::FlowTest::T}; + if (pred != Predicate{true} || flow_test != IR::FlowTest::T) { + block->cond = IR::Condition(flow_test, static_cast(pred.index), pred.negated); + block->branch_false = AddLabel(block, block->stack, pc + 1, function_id); + } else { + block->cond = IR::Condition{true}; + } + return true; +} + +void CFG::AnalyzeBRA(Block* block, FunctionId function_id, Location pc, Instruction inst, + bool is_absolute) { + const Location bra_pc{is_absolute ? inst.branch.Absolute() : BranchOffset(pc, inst)}; + block->branch_true = AddLabel(block, block->stack, bra_pc, function_id); +} + +CFG::AnalysisState CFG::AnalyzeBRX(Block* block, Location pc, Instruction inst, bool is_absolute, + FunctionId function_id) { + const std::optional brx_table{TrackIndirectBranchTable(env, pc, program_start)}; + if (!brx_table) { + TrackIndirectBranchTable(env, pc, program_start); + throw NotImplementedException("Failed to track indirect branch"); + } + const IR::FlowTest flow_test{inst.branch.flow_test}; + const Predicate pred{inst.Pred()}; + if (flow_test != IR::FlowTest::T || pred != Predicate{true}) { + throw NotImplementedException("Conditional indirect branch"); + } + std::vector targets; + targets.reserve(brx_table->num_entries); + for (u32 i = 0; i < brx_table->num_entries; ++i) { + u32 target{env.ReadCbufValue(brx_table->cbuf_index, brx_table->cbuf_offset + i * 4)}; + if (!is_absolute) { + target += pc.Offset(); + } + target += static_cast(brx_table->branch_offset); + target += 8; + targets.push_back(target); + } + std::ranges::sort(targets); + targets.erase(std::unique(targets.begin(), targets.end()), targets.end()); + + block->indirect_branches.reserve(targets.size()); + for (const u32 target : targets) { + Block* const branch{AddLabel(block, block->stack, target, function_id)}; + block->indirect_branches.push_back({ + .block = branch, + .address = target, + }); + } + block->cond = IR::Condition{true}; + block->end = pc + 1; + block->end_class = EndClass::IndirectBranch; + block->branch_reg = brx_table->branch_reg; + block->branch_offset = brx_table->branch_offset + 8; + if (!is_absolute) { + block->branch_offset += pc.Offset(); + } + return AnalysisState::Branch; +} + +CFG::AnalysisState CFG::AnalyzeEXIT(Block* block, FunctionId function_id, Location pc, + Instruction inst) { + const IR::FlowTest flow_test{inst.branch.flow_test}; + const Predicate pred{inst.Pred()}; + if (pred == Predicate{false} || flow_test == IR::FlowTest::F) { + // EXIT will never be taken + return AnalysisState::Continue; + } + if (exits_to_dispatcher && function_id != 0) { + throw NotImplementedException("Dispatch EXIT on external function"); + } + if (pred != Predicate{true} || flow_test != IR::FlowTest::T) { + if (block->stack.Peek(Token::PEXIT).has_value()) { + throw NotImplementedException("Conditional EXIT with PEXIT token"); + } + const IR::Condition cond{flow_test, static_cast(pred.index), pred.negated}; + if (exits_to_dispatcher) { + block->end = pc; + block->end_class = EndClass::Branch; + block->cond = cond; + block->branch_true = dispatch_block; + block->branch_false = AddLabel(block, block->stack, pc + 1, function_id); + return AnalysisState::Branch; + } + AnalyzeCondInst(block, function_id, pc, EndClass::Exit, cond); + return AnalysisState::Branch; + } + if (const std::optional exit_pc{block->stack.Peek(Token::PEXIT)}) { + const Stack popped_stack{block->stack.Remove(Token::PEXIT)}; + block->cond = IR::Condition{true}; + block->branch_true = AddLabel(block, popped_stack, *exit_pc, function_id); + block->branch_false = nullptr; + return AnalysisState::Branch; + } + if (exits_to_dispatcher) { + block->cond = IR::Condition{true}; + block->end = pc; + block->end_class = EndClass::Branch; + block->branch_true = dispatch_block; + block->branch_false = nullptr; + return AnalysisState::Branch; + } + block->end = pc + 1; + block->end_class = EndClass::Exit; + return AnalysisState::Branch; +} + +Block* CFG::AddLabel(Block* block, Stack stack, Location pc, FunctionId function_id) { + Function& function{functions[function_id]}; + if (block->begin == pc) { + // Jumps to itself + return block; + } + if (const auto it{function.blocks.find(pc, Compare{})}; it != function.blocks.end()) { + // Block already exists and it has been visited + if (function.blocks.begin() != it) { + // Check if the previous node is the virtual variant of the label + // This won't exist if a virtual node is not needed or it hasn't been visited + // If it hasn't been visited and a virtual node is needed, this will still behave as + // expected because the node impersonated with its virtual node. + const auto prev{std::prev(it)}; + if (it->begin.Virtual() == prev->begin) { + return &*prev; + } + } + return &*it; + } + // Make sure we don't insert the same layer twice + const auto label_it{std::ranges::find(function.labels, pc, &Label::address)}; + if (label_it != function.labels.end()) { + return label_it->block; + } + Block* const new_block{block_pool.Create()}; + new_block->begin = pc; + new_block->end = pc; + new_block->end_class = EndClass::Branch; + new_block->cond = IR::Condition(true); + new_block->stack = stack; + new_block->branch_true = nullptr; + new_block->branch_false = nullptr; + function.labels.push_back(Label{ + .address{pc}, + .block = new_block, + .stack{std::move(stack)}, + }); + return new_block; +} + +std::string CFG::Dot() const { + int node_uid{0}; + + std::string dot{"digraph shader {\n"}; + for (const Function& function : functions) { + dot += fmt::format("\tsubgraph cluster_{} {{\n", function.entrypoint); + dot += fmt::format("\t\tnode [style=filled];\n"); + for (const Block& block : function.blocks) { + const std::string name{NameOf(block)}; + const auto add_branch = [&](Block* branch, bool add_label) { + dot += fmt::format("\t\t{}->{}", name, NameOf(*branch)); + if (add_label && block.cond != IR::Condition{true} && + block.cond != IR::Condition{false}) { + dot += fmt::format(" [label=\"{}\"]", block.cond); + } + dot += '\n'; + }; + dot += fmt::format("\t\t{};\n", name); + switch (block.end_class) { + case EndClass::Branch: + if (block.cond != IR::Condition{false}) { + add_branch(block.branch_true, true); + } + if (block.cond != IR::Condition{true}) { + add_branch(block.branch_false, false); + } + break; + case EndClass::IndirectBranch: + for (const IndirectBranch& branch : block.indirect_branches) { + add_branch(branch.block, false); + } + break; + case EndClass::Call: + dot += fmt::format("\t\t{}->N{};\n", name, node_uid); + dot += fmt::format("\t\tN{}->{};\n", node_uid, NameOf(*block.return_block)); + dot += fmt::format("\t\tN{} [label=\"Call {}\"][shape=square][style=stripped];\n", + node_uid, block.function_call); + dot += '\n'; + ++node_uid; + break; + case EndClass::Exit: + dot += fmt::format("\t\t{}->N{};\n", name, node_uid); + dot += fmt::format("\t\tN{} [label=\"Exit\"][shape=square][style=stripped];\n", + node_uid); + ++node_uid; + break; + case EndClass::Return: + dot += fmt::format("\t\t{}->N{};\n", name, node_uid); + dot += fmt::format("\t\tN{} [label=\"Return\"][shape=square][style=stripped];\n", + node_uid); + ++node_uid; + break; + case EndClass::Kill: + dot += fmt::format("\t\t{}->N{};\n", name, node_uid); + dot += fmt::format("\t\tN{} [label=\"Kill\"][shape=square][style=stripped];\n", + node_uid); + ++node_uid; + break; + } + } + if (function.entrypoint == 8) { + dot += fmt::format("\t\tlabel = \"main\";\n"); + } else { + dot += fmt::format("\t\tlabel = \"Function {}\";\n", function.entrypoint); + } + dot += "\t}\n"; + } + if (!functions.empty()) { + auto& function{functions.front()}; + if (function.blocks.empty()) { + dot += "Start;\n"; + } else { + dot += fmt::format("\tStart -> {};\n", NameOf(*function.blocks.begin())); + } + dot += fmt::format("\tStart [shape=diamond];\n"); + } + dot += "}\n"; + return dot; +} + +} // namespace Shader::Maxwell::Flow diff --git a/src/shader_recompiler/frontend/maxwell/control_flow.h b/src/shader_recompiler/frontend/maxwell/control_flow.h new file mode 100755 index 000000000..0e515c3b6 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/control_flow.h @@ -0,0 +1,170 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include +#include +#include + +#include +#include + +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/condition.h" +#include "shader_recompiler/frontend/maxwell/instruction.h" +#include "shader_recompiler/frontend/maxwell/location.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" +#include "shader_recompiler/object_pool.h" + +namespace Shader::Maxwell::Flow { + +struct Block; + +using FunctionId = size_t; + +enum class EndClass { + Branch, + IndirectBranch, + Call, + Exit, + Return, + Kill, +}; + +enum class Token { + SSY, + PBK, + PEXIT, + PRET, + PCNT, + PLONGJMP, +}; + +struct StackEntry { + auto operator<=>(const StackEntry&) const noexcept = default; + + Token token; + Location target; +}; + +class Stack { +public: + void Push(Token token, Location target); + [[nodiscard]] std::pair Pop(Token token) const; + [[nodiscard]] std::optional Peek(Token token) const; + [[nodiscard]] Stack Remove(Token token) const; + +private: + boost::container::small_vector entries; +}; + +struct IndirectBranch { + Block* block; + u32 address; +}; + +struct Block : boost::intrusive::set_base_hook< + // Normal link is ~2.5% faster compared to safe link + boost::intrusive::link_mode> { + [[nodiscard]] bool Contains(Location pc) const noexcept; + + bool operator<(const Block& rhs) const noexcept { + return begin < rhs.begin; + } + + Location begin; + Location end; + EndClass end_class{}; + IR::Condition cond{}; + Stack stack; + Block* branch_true{}; + Block* branch_false{}; + FunctionId function_call{}; + Block* return_block{}; + IR::Reg branch_reg{}; + s32 branch_offset{}; + std::vector indirect_branches; +}; + +struct Label { + Location address; + Block* block; + Stack stack; +}; + +struct Function { + explicit Function(ObjectPool& block_pool, Location start_address); + + Location entrypoint; + boost::container::small_vector labels; + boost::intrusive::set blocks; +}; + +class CFG { + enum class AnalysisState { + Branch, + Continue, + }; + +public: + explicit CFG(Environment& env, ObjectPool& block_pool, Location start_address, + bool exits_to_dispatcher = false); + + CFG& operator=(const CFG&) = delete; + CFG(const CFG&) = delete; + + CFG& operator=(CFG&&) = delete; + CFG(CFG&&) = delete; + + [[nodiscard]] std::string Dot() const; + + [[nodiscard]] std::span Functions() const noexcept { + return std::span(functions.data(), functions.size()); + } + [[nodiscard]] std::span Functions() noexcept { + return std::span(functions.data(), functions.size()); + } + + [[nodiscard]] bool ExitsToDispatcher() const { + return exits_to_dispatcher; + } + +private: + void AnalyzeLabel(FunctionId function_id, Label& label); + + /// Inspect already visited blocks. + /// Return true when the block has already been visited + bool InspectVisitedBlocks(FunctionId function_id, const Label& label); + + AnalysisState AnalyzeInst(Block* block, FunctionId function_id, Location pc); + + void AnalyzeCondInst(Block* block, FunctionId function_id, Location pc, EndClass insn_end_class, + IR::Condition cond); + + /// Return true when the branch instruction is confirmed to be a branch + bool AnalyzeBranch(Block* block, FunctionId function_id, Location pc, Instruction inst, + Opcode opcode); + + void AnalyzeBRA(Block* block, FunctionId function_id, Location pc, Instruction inst, + bool is_absolute); + AnalysisState AnalyzeBRX(Block* block, Location pc, Instruction inst, bool is_absolute, + FunctionId function_id); + AnalysisState AnalyzeEXIT(Block* block, FunctionId function_id, Location pc, Instruction inst); + + /// Return the branch target block id + Block* AddLabel(Block* block, Stack stack, Location pc, FunctionId function_id); + + Environment& env; + ObjectPool& block_pool; + boost::container::small_vector functions; + FunctionId current_function_id{0}; + Location program_start; + bool exits_to_dispatcher{}; + Block* dispatch_block{}; +}; + +} // namespace Shader::Maxwell::Flow diff --git a/src/shader_recompiler/frontend/maxwell/decode.cpp b/src/shader_recompiler/frontend/maxwell/decode.cpp new file mode 100755 index 000000000..972f677dc --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/decode.cpp @@ -0,0 +1,149 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/decode.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" + +namespace Shader::Maxwell { +namespace { +struct MaskValue { + u64 mask; + u64 value; +}; + +constexpr MaskValue MaskValueFromEncoding(const char* encoding) { + u64 mask{}; + u64 value{}; + u64 bit{u64(1) << 63}; + while (*encoding) { + switch (*encoding) { + case '0': + mask |= bit; + break; + case '1': + mask |= bit; + value |= bit; + break; + case '-': + break; + case ' ': + break; + default: + throw LogicError("Invalid encoding character '{}'", *encoding); + } + ++encoding; + if (*encoding != ' ') { + bit >>= 1; + } + } + return MaskValue{.mask = mask, .value = value}; +} + +struct InstEncoding { + MaskValue mask_value; + Opcode opcode; +}; +constexpr std::array UNORDERED_ENCODINGS{ +#define INST(name, cute, encode) \ + InstEncoding{ \ + .mask_value{MaskValueFromEncoding(encode)}, \ + .opcode = Opcode::name, \ + }, +#include "maxwell.inc" +#undef INST +}; + +constexpr auto SortedEncodings() { + std::array encodings{UNORDERED_ENCODINGS}; + std::ranges::sort(encodings, [](const InstEncoding& lhs, const InstEncoding& rhs) { + return std::popcount(lhs.mask_value.mask) > std::popcount(rhs.mask_value.mask); + }); + return encodings; +} +constexpr auto ENCODINGS{SortedEncodings()}; + +constexpr int WidestLeftBits() { + int bits{64}; + for (const InstEncoding& encoding : ENCODINGS) { + bits = std::min(bits, std::countr_zero(encoding.mask_value.mask)); + } + return 64 - bits; +} +constexpr int WIDEST_LEFT_BITS{WidestLeftBits()}; +constexpr int MASK_SHIFT{64 - WIDEST_LEFT_BITS}; + +constexpr size_t ToFastLookupIndex(u64 value) { + return static_cast(value >> MASK_SHIFT); +} + +constexpr size_t FastLookupSize() { + size_t max_width{}; + for (const InstEncoding& encoding : ENCODINGS) { + max_width = std::max(max_width, ToFastLookupIndex(encoding.mask_value.mask)); + } + return max_width + 1; +} +constexpr size_t FAST_LOOKUP_SIZE{FastLookupSize()}; + +struct InstInfo { + [[nodiscard]] u64 Mask() const noexcept { + return static_cast(high_mask) << MASK_SHIFT; + } + + [[nodiscard]] u64 Value() const noexcept { + return static_cast(high_value) << MASK_SHIFT; + } + + u16 high_mask; + u16 high_value; + Opcode opcode; +}; + +constexpr auto MakeFastLookupTableIndex(size_t index) { + std::array encodings{}; + size_t element{}; + for (const auto& encoding : ENCODINGS) { + const size_t mask{ToFastLookupIndex(encoding.mask_value.mask)}; + const size_t value{ToFastLookupIndex(encoding.mask_value.value)}; + if ((index & mask) == value) { + encodings.at(element) = InstInfo{ + .high_mask = static_cast(encoding.mask_value.mask >> MASK_SHIFT), + .high_value = static_cast(encoding.mask_value.value >> MASK_SHIFT), + .opcode = encoding.opcode, + }; + ++element; + } + } + return encodings; +} + +/*constexpr*/ auto MakeFastLookupTable() { + auto encodings{std::make_unique, FAST_LOOKUP_SIZE>>()}; + for (size_t index = 0; index < FAST_LOOKUP_SIZE; ++index) { + (*encodings)[index] = MakeFastLookupTableIndex(index); + } + return encodings; +} +const auto FAST_LOOKUP_TABLE{MakeFastLookupTable()}; +} // Anonymous namespace + +Opcode Decode(u64 insn) { + const auto& table{(*FAST_LOOKUP_TABLE)[ToFastLookupIndex(insn)]}; + const auto it{std::ranges::find_if( + table, [insn](const InstInfo& info) { return (insn & info.Mask()) == info.Value(); })}; + if (it == table.end()) { + throw NotImplementedException("Instruction 0x{:016x} is unknown / unimplemented", insn); + } + return it->opcode; +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/decode.h b/src/shader_recompiler/frontend/maxwell/decode.h new file mode 100755 index 000000000..b4f080fd7 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/decode.h @@ -0,0 +1,14 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" + +namespace Shader::Maxwell { + +[[nodiscard]] Opcode Decode(u64 insn); + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/indirect_branch_table_track.cpp b/src/shader_recompiler/frontend/maxwell/indirect_branch_table_track.cpp new file mode 100755 index 000000000..008625cb3 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/indirect_branch_table_track.cpp @@ -0,0 +1,108 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/decode.h" +#include "shader_recompiler/frontend/maxwell/indirect_branch_table_track.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/load_constant.h" + +namespace Shader::Maxwell { +namespace { +union Encoding { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<20, 19, u64> immediate; + BitField<56, 1, u64> is_negative; + BitField<20, 24, s64> brx_offset; +}; + +template +std::optional Track(Environment& env, Location block_begin, Location& pos, Callable&& func) { + while (pos >= block_begin) { + const u64 insn{env.ReadInstruction(pos.Offset())}; + --pos; + if (func(insn, Decode(insn))) { + return insn; + } + } + return std::nullopt; +} + +std::optional TrackLDC(Environment& env, Location block_begin, Location& pos, + IR::Reg brx_reg) { + return Track(env, block_begin, pos, [brx_reg](u64 insn, Opcode opcode) { + const LDC::Encoding ldc{insn}; + return opcode == Opcode::LDC && ldc.dest_reg == brx_reg && ldc.size == LDC::Size::B32 && + ldc.mode == LDC::Mode::Default; + }); +} + +std::optional TrackSHL(Environment& env, Location block_begin, Location& pos, + IR::Reg ldc_reg) { + return Track(env, block_begin, pos, [ldc_reg](u64 insn, Opcode opcode) { + const Encoding shl{insn}; + return opcode == Opcode::SHL_imm && shl.dest_reg == ldc_reg; + }); +} + +std::optional TrackIMNMX(Environment& env, Location block_begin, Location& pos, + IR::Reg shl_reg) { + return Track(env, block_begin, pos, [shl_reg](u64 insn, Opcode opcode) { + const Encoding imnmx{insn}; + return opcode == Opcode::IMNMX_imm && imnmx.dest_reg == shl_reg; + }); +} +} // Anonymous namespace + +std::optional TrackIndirectBranchTable(Environment& env, Location brx_pos, + Location block_begin) { + const u64 brx_insn{env.ReadInstruction(brx_pos.Offset())}; + const Opcode brx_opcode{Decode(brx_insn)}; + if (brx_opcode != Opcode::BRX && brx_opcode != Opcode::JMX) { + throw LogicError("Tracked instruction is not BRX or JMX"); + } + const IR::Reg brx_reg{Encoding{brx_insn}.src_reg}; + const s32 brx_offset{static_cast(Encoding{brx_insn}.brx_offset)}; + + Location pos{brx_pos}; + const std::optional ldc_insn{TrackLDC(env, block_begin, pos, brx_reg)}; + if (!ldc_insn) { + return std::nullopt; + } + const LDC::Encoding ldc{*ldc_insn}; + const u32 cbuf_index{static_cast(ldc.index)}; + const u32 cbuf_offset{static_cast(static_cast(ldc.offset.Value()))}; + const IR::Reg ldc_reg{ldc.src_reg}; + + const std::optional shl_insn{TrackSHL(env, block_begin, pos, ldc_reg)}; + if (!shl_insn) { + return std::nullopt; + } + const Encoding shl{*shl_insn}; + const IR::Reg shl_reg{shl.src_reg}; + + const std::optional imnmx_insn{TrackIMNMX(env, block_begin, pos, shl_reg)}; + if (!imnmx_insn) { + return std::nullopt; + } + const Encoding imnmx{*imnmx_insn}; + if (imnmx.is_negative != 0) { + return std::nullopt; + } + const u32 imnmx_immediate{static_cast(imnmx.immediate.Value())}; + return IndirectBranchTableInfo{ + .cbuf_index = cbuf_index, + .cbuf_offset = cbuf_offset, + .num_entries = imnmx_immediate + 1, + .branch_offset = brx_offset, + .branch_reg = brx_reg, + }; +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/indirect_branch_table_track.h b/src/shader_recompiler/frontend/maxwell/indirect_branch_table_track.h new file mode 100755 index 000000000..eee5102fa --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/indirect_branch_table_track.h @@ -0,0 +1,28 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/reg.h" +#include "shader_recompiler/frontend/maxwell/location.h" + +namespace Shader::Maxwell { + +struct IndirectBranchTableInfo { + u32 cbuf_index{}; + u32 cbuf_offset{}; + u32 num_entries{}; + s32 branch_offset{}; + IR::Reg branch_reg{}; +}; + +std::optional TrackIndirectBranchTable(Environment& env, Location brx_pos, + Location block_begin); + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/instruction.h b/src/shader_recompiler/frontend/maxwell/instruction.h new file mode 100755 index 000000000..743d68d61 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/instruction.h @@ -0,0 +1,63 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/flow_test.h" +#include "shader_recompiler/frontend/ir/reg.h" + +namespace Shader::Maxwell { + +struct Predicate { + Predicate() = default; + Predicate(unsigned index_, bool negated_ = false) : index{index_}, negated{negated_} {} + Predicate(bool value) : index{7}, negated{!value} {} + Predicate(u64 raw) : index{static_cast(raw & 7)}, negated{(raw & 8) != 0} {} + + unsigned index; + bool negated; +}; + +inline bool operator==(const Predicate& lhs, const Predicate& rhs) noexcept { + return lhs.index == rhs.index && lhs.negated == rhs.negated; +} + +inline bool operator!=(const Predicate& lhs, const Predicate& rhs) noexcept { + return !(lhs == rhs); +} + +union Instruction { + Instruction(u64 raw_) : raw{raw_} {} + + u64 raw; + + union { + BitField<5, 1, u64> is_cbuf; + BitField<0, 5, IR::FlowTest> flow_test; + + [[nodiscard]] u32 Absolute() const noexcept { + return static_cast(absolute); + } + + [[nodiscard]] s32 Offset() const noexcept { + return static_cast(offset); + } + + private: + BitField<20, 24, s64> offset; + BitField<20, 32, u64> absolute; + } branch; + + [[nodiscard]] Predicate Pred() const noexcept { + return Predicate{pred}; + } + +private: + BitField<16, 4, u64> pred; +}; +static_assert(std::is_trivially_copyable_v); + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/location.h b/src/shader_recompiler/frontend/maxwell/location.h new file mode 100755 index 000000000..26d29eae2 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/location.h @@ -0,0 +1,112 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" + +namespace Shader::Maxwell { + +class Location { + static constexpr u32 VIRTUAL_BIAS{4}; + +public: + constexpr Location() = default; + + constexpr Location(u32 initial_offset) : offset{initial_offset} { + if (initial_offset % 8 != 0) { + throw InvalidArgument("initial_offset={} is not a multiple of 8", initial_offset); + } + Align(); + } + + constexpr Location Virtual() const noexcept { + Location virtual_location; + virtual_location.offset = offset - VIRTUAL_BIAS; + return virtual_location; + } + + [[nodiscard]] constexpr u32 Offset() const noexcept { + return offset; + } + + [[nodiscard]] constexpr bool IsVirtual() const { + return offset % 8 == VIRTUAL_BIAS; + } + + constexpr auto operator<=>(const Location&) const noexcept = default; + + constexpr Location operator++() noexcept { + const Location copy{*this}; + Step(); + return copy; + } + + constexpr Location operator++(int) noexcept { + Step(); + return *this; + } + + constexpr Location operator--() noexcept { + const Location copy{*this}; + Back(); + return copy; + } + + constexpr Location operator--(int) noexcept { + Back(); + return *this; + } + + constexpr Location operator+(int number) const { + Location new_pc{*this}; + while (number > 0) { + --number; + ++new_pc; + } + while (number < 0) { + ++number; + --new_pc; + } + return new_pc; + } + + constexpr Location operator-(int number) const { + return operator+(-number); + } + +private: + constexpr void Align() { + offset += offset % 32 == 0 ? 8 : 0; + } + + constexpr void Step() { + offset += 8 + (offset % 32 == 24 ? 8 : 0); + } + + constexpr void Back() { + offset -= 8 + (offset % 32 == 8 ? 8 : 0); + } + + u32 offset{0xcccccccc}; +}; + +} // namespace Shader::Maxwell + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::Maxwell::Location& location, FormatContext& ctx) { + return fmt::format_to(ctx.out(), "{:04x}", location.Offset()); + } +}; diff --git a/src/shader_recompiler/frontend/maxwell/maxwell.inc b/src/shader_recompiler/frontend/maxwell/maxwell.inc new file mode 100755 index 000000000..2fee591bb --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/maxwell.inc @@ -0,0 +1,286 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +INST(AL2P, "AL2P", "1110 1111 1010 0---") +INST(ALD, "ALD", "1110 1111 1101 1---") +INST(AST, "AST", "1110 1111 1111 0---") +INST(ATOM_cas, "ATOM (cas)", "1110 1110 1111 ----") +INST(ATOM, "ATOM", "1110 1101 ---- ----") +INST(ATOMS_cas, "ATOMS (cas)", "1110 1110 ---- ----") +INST(ATOMS, "ATOMS", "1110 1100 ---- ----") +INST(B2R, "B2R", "1111 0000 1011 1---") +INST(BAR, "BAR", "1111 0000 1010 1---") +INST(BFE_reg, "BFE (reg)", "0101 1100 0000 0---") +INST(BFE_cbuf, "BFE (cbuf)", "0100 1100 0000 0---") +INST(BFE_imm, "BFE (imm)", "0011 100- 0000 0---") +INST(BFI_reg, "BFI (reg)", "0101 1011 1111 0---") +INST(BFI_rc, "BFI (rc)", "0101 0011 1111 0---") +INST(BFI_cr, "BFI (cr)", "0100 1011 1111 0---") +INST(BFI_imm, "BFI (imm)", "0011 011- 1111 0---") +INST(BPT, "BPT", "1110 0011 1010 ----") +INST(BRA, "BRA", "1110 0010 0100 ----") +INST(BRK, "BRK", "1110 0011 0100 ----") +INST(BRX, "BRX", "1110 0010 0101 ----") +INST(CAL, "CAL", "1110 0010 0110 ----") +INST(CCTL, "CCTL", "1110 1111 011- ----") +INST(CCTLL, "CCTLL", "1110 1111 100- ----") +INST(CONT, "CONT", "1110 0011 0101 ----") +INST(CS2R, "CS2R", "0101 0000 1100 1---") +INST(CSET, "CSET", "0101 0000 1001 1---") +INST(CSETP, "CSETP", "0101 0000 1010 0---") +INST(DADD_reg, "DADD (reg)", "0101 1100 0111 0---") +INST(DADD_cbuf, "DADD (cbuf)", "0100 1100 0111 0---") +INST(DADD_imm, "DADD (imm)", "0011 100- 0111 0---") +INST(DEPBAR, "DEPBAR", "1111 0000 1111 0---") +INST(DFMA_reg, "DFMA (reg)", "0101 1011 0111 ----") +INST(DFMA_rc, "DFMA (rc)", "0101 0011 0111 ----") +INST(DFMA_cr, "DFMA (cr)", "0100 1011 0111 ----") +INST(DFMA_imm, "DFMA (imm)", "0011 011- 0111 ----") +INST(DMNMX_reg, "DMNMX (reg)", "0101 1100 0101 0---") +INST(DMNMX_cbuf, "DMNMX (cbuf)", "0100 1100 0101 0---") +INST(DMNMX_imm, "DMNMX (imm)", "0011 100- 0101 0---") +INST(DMUL_reg, "DMUL (reg)", "0101 1100 1000 0---") +INST(DMUL_cbuf, "DMUL (cbuf)", "0100 1100 1000 0---") +INST(DMUL_imm, "DMUL (imm)", "0011 100- 1000 0---") +INST(DSET_reg, "DSET (reg)", "0101 1001 0--- ----") +INST(DSET_cbuf, "DSET (cbuf)", "0100 1001 0--- ----") +INST(DSET_imm, "DSET (imm)", "0011 001- 0--- ----") +INST(DSETP_reg, "DSETP (reg)", "0101 1011 1000 ----") +INST(DSETP_cbuf, "DSETP (cbuf)", "0100 1011 1000 ----") +INST(DSETP_imm, "DSETP (imm)", "0011 011- 1000 ----") +INST(EXIT, "EXIT", "1110 0011 0000 ----") +INST(F2F_reg, "F2F (reg)", "0101 1100 1010 1---") +INST(F2F_cbuf, "F2F (cbuf)", "0100 1100 1010 1---") +INST(F2F_imm, "F2F (imm)", "0011 100- 1010 1---") +INST(F2I_reg, "F2I (reg)", "0101 1100 1011 0---") +INST(F2I_cbuf, "F2I (cbuf)", "0100 1100 1011 0---") +INST(F2I_imm, "F2I (imm)", "0011 100- 1011 0---") +INST(FADD_reg, "FADD (reg)", "0101 1100 0101 1---") +INST(FADD_cbuf, "FADD (cbuf)", "0100 1100 0101 1---") +INST(FADD_imm, "FADD (imm)", "0011 100- 0101 1---") +INST(FADD32I, "FADD32I", "0000 10-- ---- ----") +INST(FCHK_reg, "FCHK (reg)", "0101 1100 1000 1---") +INST(FCHK_cbuf, "FCHK (cbuf)", "0100 1100 1000 1---") +INST(FCHK_imm, "FCHK (imm)", "0011 100- 1000 1---") +INST(FCMP_reg, "FCMP (reg)", "0101 1011 1010 ----") +INST(FCMP_rc, "FCMP (rc)", "0101 0011 1010 ----") +INST(FCMP_cr, "FCMP (cr)", "0100 1011 1010 ----") +INST(FCMP_imm, "FCMP (imm)", "0011 011- 1010 ----") +INST(FFMA_reg, "FFMA (reg)", "0101 1001 1--- ----") +INST(FFMA_rc, "FFMA (rc)", "0101 0001 1--- ----") +INST(FFMA_cr, "FFMA (cr)", "0100 1001 1--- ----") +INST(FFMA_imm, "FFMA (imm)", "0011 001- 1--- ----") +INST(FFMA32I, "FFMA32I", "0000 11-- ---- ----") +INST(FLO_reg, "FLO (reg)", "0101 1100 0011 0---") +INST(FLO_cbuf, "FLO (cbuf)", "0100 1100 0011 0---") +INST(FLO_imm, "FLO (imm)", "0011 100- 0011 0---") +INST(FMNMX_reg, "FMNMX (reg)", "0101 1100 0110 0---") +INST(FMNMX_cbuf, "FMNMX (cbuf)", "0100 1100 0110 0---") +INST(FMNMX_imm, "FMNMX (imm)", "0011 100- 0110 0---") +INST(FMUL_reg, "FMUL (reg)", "0101 1100 0110 1---") +INST(FMUL_cbuf, "FMUL (cbuf)", "0100 1100 0110 1---") +INST(FMUL_imm, "FMUL (imm)", "0011 100- 0110 1---") +INST(FMUL32I, "FMUL32I", "0001 1110 ---- ----") +INST(FSET_reg, "FSET (reg)", "0101 1000 ---- ----") +INST(FSET_cbuf, "FSET (cbuf)", "0100 1000 ---- ----") +INST(FSET_imm, "FSET (imm)", "0011 000- ---- ----") +INST(FSETP_reg, "FSETP (reg)", "0101 1011 1011 ----") +INST(FSETP_cbuf, "FSETP (cbuf)", "0100 1011 1011 ----") +INST(FSETP_imm, "FSETP (imm)", "0011 011- 1011 ----") +INST(FSWZADD, "FSWZADD", "0101 0000 1111 1---") +INST(GETCRSPTR, "GETCRSPTR", "1110 0010 1100 ----") +INST(GETLMEMBASE, "GETLMEMBASE", "1110 0010 1101 ----") +INST(HADD2_reg, "HADD2 (reg)", "0101 1101 0001 0---") +INST(HADD2_cbuf, "HADD2 (cbuf)", "0111 101- 1--- ----") +INST(HADD2_imm, "HADD2 (imm)", "0111 101- 0--- ----") +INST(HADD2_32I, "HADD2_32I", "0010 110- ---- ----") +INST(HFMA2_reg, "HFMA2 (reg)", "0101 1101 0000 0---") +INST(HFMA2_rc, "HFMA2 (rc)", "0110 0--- 1--- ----") +INST(HFMA2_cr, "HFMA2 (cr)", "0111 0--- 1--- ----") +INST(HFMA2_imm, "HFMA2 (imm)", "0111 0--- 0--- ----") +INST(HFMA2_32I, "HFMA2_32I", "0010 100- ---- ----") +INST(HMUL2_reg, "HMUL2 (reg)", "0101 1101 0000 1---") +INST(HMUL2_cbuf, "HMUL2 (cbuf)", "0111 100- 1--- ----") +INST(HMUL2_imm, "HMUL2 (imm)", "0111 100- 0--- ----") +INST(HMUL2_32I, "HMUL2_32I", "0010 101- ---- ----") +INST(HSET2_reg, "HSET2 (reg)", "0101 1101 0001 1---") +INST(HSET2_cbuf, "HSET2 (cbuf)", "0111 110- 1--- ----") +INST(HSET2_imm, "HSET2 (imm)", "0111 110- 0--- ----") +INST(HSETP2_reg, "HSETP2 (reg)", "0101 1101 0010 0---") +INST(HSETP2_cbuf, "HSETP2 (cbuf)", "0111 111- 1--- ----") +INST(HSETP2_imm, "HSETP2 (imm)", "0111 111- 0--- ----") +INST(I2F_reg, "I2F (reg)", "0101 1100 1011 1---") +INST(I2F_cbuf, "I2F (cbuf)", "0100 1100 1011 1---") +INST(I2F_imm, "I2F (imm)", "0011 100- 1011 1---") +INST(I2I_reg, "I2I (reg)", "0101 1100 1110 0---") +INST(I2I_cbuf, "I2I (cbuf)", "0100 1100 1110 0---") +INST(I2I_imm, "I2I (imm)", "0011 100- 1110 0---") +INST(IADD_reg, "IADD (reg)", "0101 1100 0001 0---") +INST(IADD_cbuf, "IADD (cbuf)", "0100 1100 0001 0---") +INST(IADD_imm, "IADD (imm)", "0011 100- 0001 0---") +INST(IADD3_reg, "IADD3 (reg)", "0101 1100 1100 ----") +INST(IADD3_cbuf, "IADD3 (cbuf)", "0100 1100 1100 ----") +INST(IADD3_imm, "IADD3 (imm)", "0011 100- 1100 ----") +INST(IADD32I, "IADD32I", "0001 110- ---- ----") +INST(ICMP_reg, "ICMP (reg)", "0101 1011 0100 ----") +INST(ICMP_rc, "ICMP (rc)", "0101 0011 0100 ----") +INST(ICMP_cr, "ICMP (cr)", "0100 1011 0100 ----") +INST(ICMP_imm, "ICMP (imm)", "0011 011- 0100 ----") +INST(IDE, "IDE", "1110 0011 1001 ----") +INST(IDP_reg, "IDP (reg)", "0101 0011 1111 1---") +INST(IDP_imm, "IDP (imm)", "0101 0011 1101 1---") +INST(IMAD_reg, "IMAD (reg)", "0101 1010 0--- ----") +INST(IMAD_rc, "IMAD (rc)", "0101 0010 0--- ----") +INST(IMAD_cr, "IMAD (cr)", "0100 1010 0--- ----") +INST(IMAD_imm, "IMAD (imm)", "0011 010- 0--- ----") +INST(IMAD32I, "IMAD32I", "1000 00-- ---- ----") +INST(IMADSP_reg, "IMADSP (reg)", "0101 1010 1--- ----") +INST(IMADSP_rc, "IMADSP (rc)", "0101 0010 1--- ----") +INST(IMADSP_cr, "IMADSP (cr)", "0100 1010 1--- ----") +INST(IMADSP_imm, "IMADSP (imm)", "0011 010- 1--- ----") +INST(IMNMX_reg, "IMNMX (reg)", "0101 1100 0010 0---") +INST(IMNMX_cbuf, "IMNMX (cbuf)", "0100 1100 0010 0---") +INST(IMNMX_imm, "IMNMX (imm)", "0011 100- 0010 0---") +INST(IMUL_reg, "IMUL (reg)", "0101 1100 0011 1---") +INST(IMUL_cbuf, "IMUL (cbuf)", "0100 1100 0011 1---") +INST(IMUL_imm, "IMUL (imm)", "0011 100- 0011 1---") +INST(IMUL32I, "IMUL32I", "0001 1111 ---- ----") +INST(IPA, "IPA", "1110 0000 ---- ----") +INST(ISBERD, "ISBERD", "1110 1111 1101 0---") +INST(ISCADD_reg, "ISCADD (reg)", "0101 1100 0001 1---") +INST(ISCADD_cbuf, "ISCADD (cbuf)", "0100 1100 0001 1---") +INST(ISCADD_imm, "ISCADD (imm)", "0011 100- 0001 1---") +INST(ISCADD32I, "ISCADD32I", "0001 01-- ---- ----") +INST(ISET_reg, "ISET (reg)", "0101 1011 0101 ----") +INST(ISET_cbuf, "ISET (cbuf)", "0100 1011 0101 ----") +INST(ISET_imm, "ISET (imm)", "0011 011- 0101 ----") +INST(ISETP_reg, "ISETP (reg)", "0101 1011 0110 ----") +INST(ISETP_cbuf, "ISETP (cbuf)", "0100 1011 0110 ----") +INST(ISETP_imm, "ISETP (imm)", "0011 011- 0110 ----") +INST(JCAL, "JCAL", "1110 0010 0010 ----") +INST(JMP, "JMP", "1110 0010 0001 ----") +INST(JMX, "JMX", "1110 0010 0000 ----") +INST(KIL, "KIL", "1110 0011 0011 ----") +INST(LD, "LD", "100- ---- ---- ----") +INST(LDC, "LDC", "1110 1111 1001 0---") +INST(LDG, "LDG", "1110 1110 1101 0---") +INST(LDL, "LDL", "1110 1111 0100 0---") +INST(LDS, "LDS", "1110 1111 0100 1---") +INST(LEA_hi_reg, "LEA (hi reg)", "0101 1011 1101 1---") +INST(LEA_hi_cbuf, "LEA (hi cbuf)", "0001 10-- ---- ----") +INST(LEA_lo_reg, "LEA (lo reg)", "0101 1011 1101 0---") +INST(LEA_lo_cbuf, "LEA (lo cbuf)", "0100 1011 1101 ----") +INST(LEA_lo_imm, "LEA (lo imm)", "0011 011- 1101 0---") +INST(LEPC, "LEPC", "0101 0000 1101 0---") +INST(LONGJMP, "LONGJMP", "1110 0011 0001 ----") +INST(LOP_reg, "LOP (reg)", "0101 1100 0100 0---") +INST(LOP_cbuf, "LOP (cbuf)", "0100 1100 0100 0---") +INST(LOP_imm, "LOP (imm)", "0011 100- 0100 0---") +INST(LOP3_reg, "LOP3 (reg)", "0101 1011 1110 0---") +INST(LOP3_cbuf, "LOP3 (cbuf)", "0000 001- ---- ----") +INST(LOP3_imm, "LOP3 (imm)", "0011 11-- ---- ----") +INST(LOP32I, "LOP32I", "0000 01-- ---- ----") +INST(MEMBAR, "MEMBAR", "1110 1111 1001 1---") +INST(MOV_reg, "MOV (reg)", "0101 1100 1001 1---") +INST(MOV_cbuf, "MOV (cbuf)", "0100 1100 1001 1---") +INST(MOV_imm, "MOV (imm)", "0011 100- 1001 1---") +INST(MOV32I, "MOV32I", "0000 0001 0000 ----") +INST(MUFU, "MUFU", "0101 0000 1000 0---") +INST(NOP, "NOP", "0101 0000 1011 0---") +INST(OUT_reg, "OUT (reg)", "1111 1011 1110 0---") +INST(OUT_cbuf, "OUT (cbuf)", "1110 1011 1110 0---") +INST(OUT_imm, "OUT (imm)", "1111 011- 1110 0---") +INST(P2R_reg, "P2R (reg)", "0101 1100 1110 1---") +INST(P2R_cbuf, "P2R (cbuf)", "0100 1100 1110 1---") +INST(P2R_imm, "P2R (imm)", "0011 1000 1110 1---") +INST(PBK, "PBK", "1110 0010 1010 ----") +INST(PCNT, "PCNT", "1110 0010 1011 ----") +INST(PEXIT, "PEXIT", "1110 0010 0011 ----") +INST(PIXLD, "PIXLD", "1110 1111 1110 1---") +INST(PLONGJMP, "PLONGJMP", "1110 0010 1000 ----") +INST(POPC_reg, "POPC (reg)", "0101 1100 0000 1---") +INST(POPC_cbuf, "POPC (cbuf)", "0100 1100 0000 1---") +INST(POPC_imm, "POPC (imm)", "0011 100- 0000 1---") +INST(PRET, "PRET", "1110 0010 0111 ----") +INST(PRMT_reg, "PRMT (reg)", "0101 1011 1100 ----") +INST(PRMT_rc, "PRMT (rc)", "0101 0011 1100 ----") +INST(PRMT_cr, "PRMT (cr)", "0100 1011 1100 ----") +INST(PRMT_imm, "PRMT (imm)", "0011 011- 1100 ----") +INST(PSET, "PSET", "0101 0000 1000 1---") +INST(PSETP, "PSETP", "0101 0000 1001 0---") +INST(R2B, "R2B", "1111 0000 1100 0---") +INST(R2P_reg, "R2P (reg)", "0101 1100 1111 0---") +INST(R2P_cbuf, "R2P (cbuf)", "0100 1100 1111 0---") +INST(R2P_imm, "R2P (imm)", "0011 100- 1111 0---") +INST(RAM, "RAM", "1110 0011 1000 ----") +INST(RED, "RED", "1110 1011 1111 1---") +INST(RET, "RET", "1110 0011 0010 ----") +INST(RRO_reg, "RRO (reg)", "0101 1100 1001 0---") +INST(RRO_cbuf, "RRO (cbuf)", "0100 1100 1001 0---") +INST(RRO_imm, "RRO (imm)", "0011 100- 1001 0---") +INST(RTT, "RTT", "1110 0011 0110 ----") +INST(S2R, "S2R", "1111 0000 1100 1---") +INST(SAM, "SAM", "1110 0011 0111 ----") +INST(SEL_reg, "SEL (reg)", "0101 1100 1010 0---") +INST(SEL_cbuf, "SEL (cbuf)", "0100 1100 1010 0---") +INST(SEL_imm, "SEL (imm)", "0011 100- 1010 0---") +INST(SETCRSPTR, "SETCRSPTR", "1110 0010 1110 ----") +INST(SETLMEMBASE, "SETLMEMBASE", "1110 0010 1111 ----") +INST(SHF_l_reg, "SHF (l reg)", "0101 1011 1111 1---") +INST(SHF_l_imm, "SHF (l imm)", "0011 011- 1111 1---") +INST(SHF_r_reg, "SHF (r reg)", "0101 1100 1111 1---") +INST(SHF_r_imm, "SHF (r imm)", "0011 100- 1111 1---") +INST(SHFL, "SHFL", "1110 1111 0001 0---") +INST(SHL_reg, "SHL (reg)", "0101 1100 0100 1---") +INST(SHL_cbuf, "SHL (cbuf)", "0100 1100 0100 1---") +INST(SHL_imm, "SHL (imm)", "0011 100- 0100 1---") +INST(SHR_reg, "SHR (reg)", "0101 1100 0010 1---") +INST(SHR_cbuf, "SHR (cbuf)", "0100 1100 0010 1---") +INST(SHR_imm, "SHR (imm)", "0011 100- 0010 1---") +INST(SSY, "SSY", "1110 0010 1001 ----") +INST(ST, "ST", "101- ---- ---- ----") +INST(STG, "STG", "1110 1110 1101 1---") +INST(STL, "STL", "1110 1111 0101 0---") +INST(STP, "STP", "1110 1110 1010 0---") +INST(STS, "STS", "1110 1111 0101 1---") +INST(SUATOM, "SUATOM", "1110 1010 0--- ----") +INST(SUATOM_cas, "SUATOM_cas", "1110 1010 1--- ----") +INST(SULD, "SULD", "1110 1011 000- ----") +INST(SURED, "SURED", "1110 1011 010- ----") +INST(SUST, "SUST", "1110 1011 001- ----") +INST(SYNC, "SYNC", "1111 0000 1111 1---") +INST(TEX, "TEX", "1100 0--- ---- ----") +INST(TEX_b, "TEX (b)", "1101 1110 10-- ----") +INST(TEXS, "TEXS", "1101 -00- ---- ----") +INST(TLD, "TLD", "1101 1100 ---- ----") +INST(TLD_b, "TLD (b)", "1101 1101 ---- ----") +INST(TLD4, "TLD4", "1100 10-- ---- ----") +INST(TLD4_b, "TLD4 (b)", "1101 1110 11-- ----") +INST(TLD4S, "TLD4S", "1101 1111 -0-- ----") +INST(TLDS, "TLDS", "1101 -01- ---- ----") +INST(TMML, "TMML", "1101 1111 0101 1---") +INST(TMML_b, "TMML (b)", "1101 1111 0110 0---") +INST(TXA, "TXA", "1101 1111 0100 0---") +INST(TXD, "TXD", "1101 1110 00-- ----") +INST(TXD_b, "TXD (b)", "1101 1110 01-- ----") +INST(TXQ, "TXQ", "1101 1111 0100 1---") +INST(TXQ_b, "TXQ (b)", "1101 1111 0101 0---") +INST(VABSDIFF, "VABSDIFF", "0101 0100 ---- ----") +INST(VABSDIFF4, "VABSDIFF4", "0101 0000 0--- ----") +INST(VADD, "VADD", "0010 00-- ---- ----") +INST(VMAD, "VMAD", "0101 1111 ---- ----") +INST(VMNMX, "VMNMX", "0011 101- ---- ----") +INST(VOTE, "VOTE", "0101 0000 1101 1---") +INST(VOTE_vtg, "VOTE (vtg)", "0101 0000 1110 0---") +INST(VSET, "VSET", "0100 000- ---- ----") +INST(VSETP, "VSETP", "0101 0000 1111 0---") +INST(VSHL, "VSHL", "0101 0111 ---- ----") +INST(VSHR, "VSHR", "0101 0110 ---- ----") +INST(XMAD_reg, "XMAD (reg)", "0101 1011 00-- ----") +INST(XMAD_rc, "XMAD (rc)", "0101 0001 0--- ----") +INST(XMAD_cr, "XMAD (cr)", "0100 111- ---- ----") +INST(XMAD_imm, "XMAD (imm)", "0011 011- 00-- ----") + +// Removed due to its weird formatting making fast tables larger +// INST(CCTLT, "CCTLT", "1110 1011 1111 0--0") diff --git a/src/shader_recompiler/frontend/maxwell/opcodes.cpp b/src/shader_recompiler/frontend/maxwell/opcodes.cpp new file mode 100755 index 000000000..ccc40c20c --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/opcodes.cpp @@ -0,0 +1,26 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" + +namespace Shader::Maxwell { +namespace { +constexpr std::array NAME_TABLE{ +#define INST(name, cute, encode) cute, +#include "maxwell.inc" +#undef INST +}; +} // Anonymous namespace + +const char* NameOf(Opcode opcode) { + if (static_cast(opcode) >= NAME_TABLE.size()) { + throw InvalidArgument("Invalid opcode with raw value {}", static_cast(opcode)); + } + return NAME_TABLE[static_cast(opcode)]; +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/opcodes.h b/src/shader_recompiler/frontend/maxwell/opcodes.h new file mode 100755 index 000000000..cd574f29d --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/opcodes.h @@ -0,0 +1,30 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +namespace Shader::Maxwell { + +enum class Opcode { +#define INST(name, cute, encode) name, +#include "maxwell.inc" +#undef INST +}; + +const char* NameOf(Opcode opcode); + +} // namespace Shader::Maxwell + +template <> +struct fmt::formatter { + constexpr auto parse(format_parse_context& ctx) { + return ctx.begin(); + } + template + auto format(const Shader::Maxwell::Opcode& opcode, FormatContext& ctx) { + return format_to(ctx.out(), "{}", NameOf(opcode)); + } +}; diff --git a/src/shader_recompiler/frontend/maxwell/structured_control_flow.cpp b/src/shader_recompiler/frontend/maxwell/structured_control_flow.cpp new file mode 100755 index 000000000..06fde0017 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/structured_control_flow.cpp @@ -0,0 +1,888 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include + +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/maxwell/decode.h" +#include "shader_recompiler/frontend/maxwell/structured_control_flow.h" +#include "shader_recompiler/frontend/maxwell/translate/translate.h" +#include "shader_recompiler/object_pool.h" + +namespace Shader::Maxwell { +namespace { +struct Statement; + +// Use normal_link because we are not guaranteed to destroy the tree in order +using ListBaseHook = + boost::intrusive::list_base_hook>; + +using Tree = boost::intrusive::list, + // Avoid linear complexity on splice, size is never called + boost::intrusive::constant_time_size>; +using Node = Tree::iterator; + +enum class StatementType { + Code, + Goto, + Label, + If, + Loop, + Break, + Return, + Kill, + Unreachable, + Function, + Identity, + Not, + Or, + SetVariable, + SetIndirectBranchVariable, + Variable, + IndirectBranchCond, +}; + +bool HasChildren(StatementType type) { + switch (type) { + case StatementType::If: + case StatementType::Loop: + case StatementType::Function: + return true; + default: + return false; + } +} + +struct Goto {}; +struct Label {}; +struct If {}; +struct Loop {}; +struct Break {}; +struct Return {}; +struct Kill {}; +struct Unreachable {}; +struct FunctionTag {}; +struct Identity {}; +struct Not {}; +struct Or {}; +struct SetVariable {}; +struct SetIndirectBranchVariable {}; +struct Variable {}; +struct IndirectBranchCond {}; + +#ifdef _MSC_VER +#pragma warning(push) +#pragma warning(disable : 26495) // Always initialize a member variable, expected in Statement +#endif +struct Statement : ListBaseHook { + Statement(const Flow::Block* block_, Statement* up_) + : block{block_}, up{up_}, type{StatementType::Code} {} + Statement(Goto, Statement* cond_, Node label_, Statement* up_) + : label{label_}, cond{cond_}, up{up_}, type{StatementType::Goto} {} + Statement(Label, u32 id_, Statement* up_) : id{id_}, up{up_}, type{StatementType::Label} {} + Statement(If, Statement* cond_, Tree&& children_, Statement* up_) + : children{std::move(children_)}, cond{cond_}, up{up_}, type{StatementType::If} {} + Statement(Loop, Statement* cond_, Tree&& children_, Statement* up_) + : children{std::move(children_)}, cond{cond_}, up{up_}, type{StatementType::Loop} {} + Statement(Break, Statement* cond_, Statement* up_) + : cond{cond_}, up{up_}, type{StatementType::Break} {} + Statement(Return, Statement* up_) : up{up_}, type{StatementType::Return} {} + Statement(Kill, Statement* up_) : up{up_}, type{StatementType::Kill} {} + Statement(Unreachable, Statement* up_) : up{up_}, type{StatementType::Unreachable} {} + Statement(FunctionTag) : children{}, type{StatementType::Function} {} + Statement(Identity, IR::Condition cond_, Statement* up_) + : guest_cond{cond_}, up{up_}, type{StatementType::Identity} {} + Statement(Not, Statement* op_, Statement* up_) : op{op_}, up{up_}, type{StatementType::Not} {} + Statement(Or, Statement* op_a_, Statement* op_b_, Statement* up_) + : op_a{op_a_}, op_b{op_b_}, up{up_}, type{StatementType::Or} {} + Statement(SetVariable, u32 id_, Statement* op_, Statement* up_) + : op{op_}, id{id_}, up{up_}, type{StatementType::SetVariable} {} + Statement(SetIndirectBranchVariable, IR::Reg branch_reg_, s32 branch_offset_, Statement* up_) + : branch_offset{branch_offset_}, + branch_reg{branch_reg_}, up{up_}, type{StatementType::SetIndirectBranchVariable} {} + Statement(Variable, u32 id_, Statement* up_) + : id{id_}, up{up_}, type{StatementType::Variable} {} + Statement(IndirectBranchCond, u32 location_, Statement* up_) + : location{location_}, up{up_}, type{StatementType::IndirectBranchCond} {} + + ~Statement() { + if (HasChildren(type)) { + std::destroy_at(&children); + } + } + + union { + const Flow::Block* block; + Node label; + Tree children; + IR::Condition guest_cond; + Statement* op; + Statement* op_a; + u32 location; + s32 branch_offset; + }; + union { + Statement* cond; + Statement* op_b; + u32 id; + IR::Reg branch_reg; + }; + Statement* up{}; + StatementType type; +}; +#ifdef _MSC_VER +#pragma warning(pop) +#endif + +std::string DumpExpr(const Statement* stmt) { + switch (stmt->type) { + case StatementType::Identity: + return fmt::format("{}", stmt->guest_cond); + case StatementType::Not: + return fmt::format("!{}", DumpExpr(stmt->op)); + case StatementType::Or: + return fmt::format("{} || {}", DumpExpr(stmt->op_a), DumpExpr(stmt->op_b)); + case StatementType::Variable: + return fmt::format("goto_L{}", stmt->id); + case StatementType::IndirectBranchCond: + return fmt::format("(indirect_branch == {:x})", stmt->location); + default: + return ""; + } +} + +std::string DumpTree(const Tree& tree, u32 indentation = 0) { + std::string ret; + std::string indent(indentation, ' '); + for (auto stmt = tree.begin(); stmt != tree.end(); ++stmt) { + switch (stmt->type) { + case StatementType::Code: + ret += fmt::format("{} Block {:04x} -> {:04x} (0x{:016x});\n", indent, + stmt->block->begin.Offset(), stmt->block->end.Offset(), + reinterpret_cast(stmt->block)); + break; + case StatementType::Goto: + ret += fmt::format("{} if ({}) goto L{};\n", indent, DumpExpr(stmt->cond), + stmt->label->id); + break; + case StatementType::Label: + ret += fmt::format("{}L{}:\n", indent, stmt->id); + break; + case StatementType::If: + ret += fmt::format("{} if ({}) {{\n", indent, DumpExpr(stmt->cond)); + ret += DumpTree(stmt->children, indentation + 4); + ret += fmt::format("{} }}\n", indent); + break; + case StatementType::Loop: + ret += fmt::format("{} do {{\n", indent); + ret += DumpTree(stmt->children, indentation + 4); + ret += fmt::format("{} }} while ({});\n", indent, DumpExpr(stmt->cond)); + break; + case StatementType::Break: + ret += fmt::format("{} if ({}) break;\n", indent, DumpExpr(stmt->cond)); + break; + case StatementType::Return: + ret += fmt::format("{} return;\n", indent); + break; + case StatementType::Kill: + ret += fmt::format("{} kill;\n", indent); + break; + case StatementType::Unreachable: + ret += fmt::format("{} unreachable;\n", indent); + break; + case StatementType::SetVariable: + ret += fmt::format("{} goto_L{} = {};\n", indent, stmt->id, DumpExpr(stmt->op)); + break; + case StatementType::SetIndirectBranchVariable: + ret += fmt::format("{} indirect_branch = {} + {};\n", indent, stmt->branch_reg, + stmt->branch_offset); + break; + case StatementType::Function: + case StatementType::Identity: + case StatementType::Not: + case StatementType::Or: + case StatementType::Variable: + case StatementType::IndirectBranchCond: + throw LogicError("Statement can't be printed"); + } + } + return ret; +} + +void SanitizeNoBreaks(const Tree& tree) { + if (std::ranges::find(tree, StatementType::Break, &Statement::type) != tree.end()) { + throw NotImplementedException("Capturing statement with break nodes"); + } +} + +size_t Level(Node stmt) { + size_t level{0}; + Statement* node{stmt->up}; + while (node) { + ++level; + node = node->up; + } + return level; +} + +bool IsDirectlyRelated(Node goto_stmt, Node label_stmt) { + const size_t goto_level{Level(goto_stmt)}; + const size_t label_level{Level(label_stmt)}; + size_t min_level; + size_t max_level; + Node min; + Node max; + if (label_level < goto_level) { + min_level = label_level; + max_level = goto_level; + min = label_stmt; + max = goto_stmt; + } else { // goto_level < label_level + min_level = goto_level; + max_level = label_level; + min = goto_stmt; + max = label_stmt; + } + while (max_level > min_level) { + --max_level; + max = max->up; + } + return min->up == max->up; +} + +bool IsIndirectlyRelated(Node goto_stmt, Node label_stmt) { + return goto_stmt->up != label_stmt->up && !IsDirectlyRelated(goto_stmt, label_stmt); +} + +[[maybe_unused]] bool AreSiblings(Node goto_stmt, Node label_stmt) noexcept { + Node it{goto_stmt}; + do { + if (it == label_stmt) { + return true; + } + --it; + } while (it != goto_stmt->up->children.begin()); + while (it != goto_stmt->up->children.end()) { + if (it == label_stmt) { + return true; + } + ++it; + } + return false; +} + +Node SiblingFromNephew(Node uncle, Node nephew) noexcept { + Statement* const parent{uncle->up}; + Statement* it{&*nephew}; + while (it->up != parent) { + it = it->up; + } + return Tree::s_iterator_to(*it); +} + +bool AreOrdered(Node left_sibling, Node right_sibling) noexcept { + const Node end{right_sibling->up->children.end()}; + for (auto it = right_sibling; it != end; ++it) { + if (it == left_sibling) { + return false; + } + } + return true; +} + +bool NeedsLift(Node goto_stmt, Node label_stmt) noexcept { + const Node sibling{SiblingFromNephew(goto_stmt, label_stmt)}; + return AreOrdered(sibling, goto_stmt); +} + +class GotoPass { +public: + explicit GotoPass(Flow::CFG& cfg, ObjectPool& inst_pool_, + ObjectPool& block_pool_, ObjectPool& stmt_pool) + : inst_pool{inst_pool_}, block_pool{block_pool_}, pool{stmt_pool} { + std::vector gotos{BuildTree(cfg)}; + for (const Node& goto_stmt : gotos | std::views::reverse) { + RemoveGoto(goto_stmt); + } + } + + Statement& RootStatement() noexcept { + return root_stmt; + } + +private: + void RemoveGoto(Node goto_stmt) { + // Force goto_stmt and label_stmt to be directly related + const Node label_stmt{goto_stmt->label}; + if (IsIndirectlyRelated(goto_stmt, label_stmt)) { + // Move goto_stmt out using outward-movement transformation until it becomes + // directly related to label_stmt + while (!IsDirectlyRelated(goto_stmt, label_stmt)) { + goto_stmt = MoveOutward(goto_stmt); + } + } + // Force goto_stmt and label_stmt to be siblings + if (IsDirectlyRelated(goto_stmt, label_stmt)) { + const size_t label_level{Level(label_stmt)}; + size_t goto_level{Level(goto_stmt)}; + if (goto_level > label_level) { + // Move goto_stmt out of its level using outward-movement transformations + while (goto_level > label_level) { + goto_stmt = MoveOutward(goto_stmt); + --goto_level; + } + } else { // Level(goto_stmt) < Level(label_stmt) + if (NeedsLift(goto_stmt, label_stmt)) { + // Lift goto_stmt to above stmt containing label_stmt using goto-lifting + // transformations + goto_stmt = Lift(goto_stmt); + } + // Move goto_stmt into label_stmt's level using inward-movement transformation + while (goto_level < label_level) { + goto_stmt = MoveInward(goto_stmt); + ++goto_level; + } + } + } + // Expensive operation: + // if (!AreSiblings(goto_stmt, label_stmt)) { + // throw LogicError("Goto is not a sibling with the label"); + // } + // goto_stmt and label_stmt are guaranteed to be siblings, eliminate + if (std::next(goto_stmt) == label_stmt) { + // Simply eliminate the goto if the label is next to it + goto_stmt->up->children.erase(goto_stmt); + } else if (AreOrdered(goto_stmt, label_stmt)) { + // Eliminate goto_stmt with a conditional + EliminateAsConditional(goto_stmt, label_stmt); + } else { + // Eliminate goto_stmt with a loop + EliminateAsLoop(goto_stmt, label_stmt); + } + } + + std::vector BuildTree(Flow::CFG& cfg) { + u32 label_id{0}; + std::vector gotos; + Flow::Function& first_function{cfg.Functions().front()}; + BuildTree(cfg, first_function, label_id, gotos, root_stmt.children.end(), std::nullopt); + return gotos; + } + + void BuildTree(Flow::CFG& cfg, Flow::Function& function, u32& label_id, + std::vector& gotos, Node function_insert_point, + std::optional return_label) { + Statement* const false_stmt{pool.Create(Identity{}, IR::Condition{false}, &root_stmt)}; + Tree& root{root_stmt.children}; + std::unordered_map local_labels; + local_labels.reserve(function.blocks.size()); + + for (Flow::Block& block : function.blocks) { + Statement* const label{pool.Create(Label{}, label_id, &root_stmt)}; + const Node label_it{root.insert(function_insert_point, *label)}; + local_labels.emplace(&block, label_it); + ++label_id; + } + for (Flow::Block& block : function.blocks) { + const Node label{local_labels.at(&block)}; + // Insertion point + const Node ip{std::next(label)}; + + // Reset goto variables before the first block and after its respective label + const auto make_reset_variable{[&]() -> Statement& { + return *pool.Create(SetVariable{}, label->id, false_stmt, &root_stmt); + }}; + root.push_front(make_reset_variable()); + root.insert(ip, make_reset_variable()); + root.insert(ip, *pool.Create(&block, &root_stmt)); + + switch (block.end_class) { + case Flow::EndClass::Branch: { + Statement* const always_cond{ + pool.Create(Identity{}, IR::Condition{true}, &root_stmt)}; + if (block.cond == IR::Condition{true}) { + const Node true_label{local_labels.at(block.branch_true)}; + gotos.push_back( + root.insert(ip, *pool.Create(Goto{}, always_cond, true_label, &root_stmt))); + } else if (block.cond == IR::Condition{false}) { + const Node false_label{local_labels.at(block.branch_false)}; + gotos.push_back(root.insert( + ip, *pool.Create(Goto{}, always_cond, false_label, &root_stmt))); + } else { + const Node true_label{local_labels.at(block.branch_true)}; + const Node false_label{local_labels.at(block.branch_false)}; + Statement* const true_cond{pool.Create(Identity{}, block.cond, &root_stmt)}; + gotos.push_back( + root.insert(ip, *pool.Create(Goto{}, true_cond, true_label, &root_stmt))); + gotos.push_back(root.insert( + ip, *pool.Create(Goto{}, always_cond, false_label, &root_stmt))); + } + break; + } + case Flow::EndClass::IndirectBranch: + root.insert(ip, *pool.Create(SetIndirectBranchVariable{}, block.branch_reg, + block.branch_offset, &root_stmt)); + for (const Flow::IndirectBranch& indirect : block.indirect_branches) { + const Node indirect_label{local_labels.at(indirect.block)}; + Statement* cond{ + pool.Create(IndirectBranchCond{}, indirect.address, &root_stmt)}; + Statement* goto_stmt{pool.Create(Goto{}, cond, indirect_label, &root_stmt)}; + gotos.push_back(root.insert(ip, *goto_stmt)); + } + root.insert(ip, *pool.Create(Unreachable{}, &root_stmt)); + break; + case Flow::EndClass::Call: { + Flow::Function& call{cfg.Functions()[block.function_call]}; + const Node call_return_label{local_labels.at(block.return_block)}; + BuildTree(cfg, call, label_id, gotos, ip, call_return_label); + break; + } + case Flow::EndClass::Exit: + root.insert(ip, *pool.Create(Return{}, &root_stmt)); + break; + case Flow::EndClass::Return: { + Statement* const always_cond{pool.Create(Identity{}, block.cond, &root_stmt)}; + auto goto_stmt{pool.Create(Goto{}, always_cond, return_label.value(), &root_stmt)}; + gotos.push_back(root.insert(ip, *goto_stmt)); + break; + } + case Flow::EndClass::Kill: + root.insert(ip, *pool.Create(Kill{}, &root_stmt)); + break; + } + } + } + + void UpdateTreeUp(Statement* tree) { + for (Statement& stmt : tree->children) { + stmt.up = tree; + } + } + + void EliminateAsConditional(Node goto_stmt, Node label_stmt) { + Tree& body{goto_stmt->up->children}; + Tree if_body; + if_body.splice(if_body.begin(), body, std::next(goto_stmt), label_stmt); + Statement* const cond{pool.Create(Not{}, goto_stmt->cond, &root_stmt)}; + Statement* const if_stmt{pool.Create(If{}, cond, std::move(if_body), goto_stmt->up)}; + UpdateTreeUp(if_stmt); + body.insert(goto_stmt, *if_stmt); + body.erase(goto_stmt); + } + + void EliminateAsLoop(Node goto_stmt, Node label_stmt) { + Tree& body{goto_stmt->up->children}; + Tree loop_body; + loop_body.splice(loop_body.begin(), body, label_stmt, goto_stmt); + Statement* const cond{goto_stmt->cond}; + Statement* const loop{pool.Create(Loop{}, cond, std::move(loop_body), goto_stmt->up)}; + UpdateTreeUp(loop); + body.insert(goto_stmt, *loop); + body.erase(goto_stmt); + } + + [[nodiscard]] Node MoveOutward(Node goto_stmt) { + switch (goto_stmt->up->type) { + case StatementType::If: + return MoveOutwardIf(goto_stmt); + case StatementType::Loop: + return MoveOutwardLoop(goto_stmt); + default: + throw LogicError("Invalid outward movement"); + } + } + + [[nodiscard]] Node MoveInward(Node goto_stmt) { + Statement* const parent{goto_stmt->up}; + Tree& body{parent->children}; + const Node label{goto_stmt->label}; + const Node label_nested_stmt{SiblingFromNephew(goto_stmt, label)}; + const u32 label_id{label->id}; + + Statement* const goto_cond{goto_stmt->cond}; + Statement* const set_var{pool.Create(SetVariable{}, label_id, goto_cond, parent)}; + body.insert(goto_stmt, *set_var); + + Tree if_body; + if_body.splice(if_body.begin(), body, std::next(goto_stmt), label_nested_stmt); + Statement* const variable{pool.Create(Variable{}, label_id, &root_stmt)}; + Statement* const neg_var{pool.Create(Not{}, variable, &root_stmt)}; + if (!if_body.empty()) { + Statement* const if_stmt{pool.Create(If{}, neg_var, std::move(if_body), parent)}; + UpdateTreeUp(if_stmt); + body.insert(goto_stmt, *if_stmt); + } + body.erase(goto_stmt); + + switch (label_nested_stmt->type) { + case StatementType::If: + // Update nested if condition + label_nested_stmt->cond = + pool.Create(Or{}, variable, label_nested_stmt->cond, &root_stmt); + break; + case StatementType::Loop: + break; + default: + throw LogicError("Invalid inward movement"); + } + Tree& nested_tree{label_nested_stmt->children}; + Statement* const new_goto{pool.Create(Goto{}, variable, label, &*label_nested_stmt)}; + return nested_tree.insert(nested_tree.begin(), *new_goto); + } + + [[nodiscard]] Node Lift(Node goto_stmt) { + Statement* const parent{goto_stmt->up}; + Tree& body{parent->children}; + const Node label{goto_stmt->label}; + const u32 label_id{label->id}; + const Node label_nested_stmt{SiblingFromNephew(goto_stmt, label)}; + + Tree loop_body; + loop_body.splice(loop_body.begin(), body, label_nested_stmt, goto_stmt); + SanitizeNoBreaks(loop_body); + Statement* const variable{pool.Create(Variable{}, label_id, &root_stmt)}; + Statement* const loop_stmt{pool.Create(Loop{}, variable, std::move(loop_body), parent)}; + UpdateTreeUp(loop_stmt); + body.insert(goto_stmt, *loop_stmt); + + Statement* const new_goto{pool.Create(Goto{}, variable, label, loop_stmt)}; + loop_stmt->children.push_front(*new_goto); + const Node new_goto_node{loop_stmt->children.begin()}; + + Statement* const set_var{pool.Create(SetVariable{}, label_id, goto_stmt->cond, loop_stmt)}; + loop_stmt->children.push_back(*set_var); + + body.erase(goto_stmt); + return new_goto_node; + } + + Node MoveOutwardIf(Node goto_stmt) { + const Node parent{Tree::s_iterator_to(*goto_stmt->up)}; + Tree& body{parent->children}; + const u32 label_id{goto_stmt->label->id}; + Statement* const goto_cond{goto_stmt->cond}; + Statement* const set_goto_var{pool.Create(SetVariable{}, label_id, goto_cond, &*parent)}; + body.insert(goto_stmt, *set_goto_var); + + Tree if_body; + if_body.splice(if_body.begin(), body, std::next(goto_stmt), body.end()); + if_body.pop_front(); + Statement* const cond{pool.Create(Variable{}, label_id, &root_stmt)}; + Statement* const neg_cond{pool.Create(Not{}, cond, &root_stmt)}; + Statement* const if_stmt{pool.Create(If{}, neg_cond, std::move(if_body), &*parent)}; + UpdateTreeUp(if_stmt); + body.insert(goto_stmt, *if_stmt); + + body.erase(goto_stmt); + + Statement* const new_cond{pool.Create(Variable{}, label_id, &root_stmt)}; + Statement* const new_goto{pool.Create(Goto{}, new_cond, goto_stmt->label, parent->up)}; + Tree& parent_tree{parent->up->children}; + return parent_tree.insert(std::next(parent), *new_goto); + } + + Node MoveOutwardLoop(Node goto_stmt) { + Statement* const parent{goto_stmt->up}; + Tree& body{parent->children}; + const u32 label_id{goto_stmt->label->id}; + Statement* const goto_cond{goto_stmt->cond}; + Statement* const set_goto_var{pool.Create(SetVariable{}, label_id, goto_cond, parent)}; + Statement* const cond{pool.Create(Variable{}, label_id, &root_stmt)}; + Statement* const break_stmt{pool.Create(Break{}, cond, parent)}; + body.insert(goto_stmt, *set_goto_var); + body.insert(goto_stmt, *break_stmt); + body.erase(goto_stmt); + + const Node loop{Tree::s_iterator_to(*goto_stmt->up)}; + Statement* const new_goto_cond{pool.Create(Variable{}, label_id, &root_stmt)}; + Statement* const new_goto{pool.Create(Goto{}, new_goto_cond, goto_stmt->label, loop->up)}; + Tree& parent_tree{loop->up->children}; + return parent_tree.insert(std::next(loop), *new_goto); + } + + ObjectPool& inst_pool; + ObjectPool& block_pool; + ObjectPool& pool; + Statement root_stmt{FunctionTag{}}; +}; + +[[nodiscard]] Statement* TryFindForwardBlock(Statement& stmt) { + Tree& tree{stmt.up->children}; + const Node end{tree.end()}; + Node forward_node{std::next(Tree::s_iterator_to(stmt))}; + while (forward_node != end && !HasChildren(forward_node->type)) { + if (forward_node->type == StatementType::Code) { + return &*forward_node; + } + ++forward_node; + } + return nullptr; +} + +[[nodiscard]] IR::U1 VisitExpr(IR::IREmitter& ir, const Statement& stmt) { + switch (stmt.type) { + case StatementType::Identity: + return ir.Condition(stmt.guest_cond); + case StatementType::Not: + return ir.LogicalNot(IR::U1{VisitExpr(ir, *stmt.op)}); + case StatementType::Or: + return ir.LogicalOr(VisitExpr(ir, *stmt.op_a), VisitExpr(ir, *stmt.op_b)); + case StatementType::Variable: + return ir.GetGotoVariable(stmt.id); + case StatementType::IndirectBranchCond: + return ir.IEqual(ir.GetIndirectBranchVariable(), ir.Imm32(stmt.location)); + default: + throw NotImplementedException("Statement type {}", stmt.type); + } +} + +class TranslatePass { +public: + TranslatePass(ObjectPool& inst_pool_, ObjectPool& block_pool_, + ObjectPool& stmt_pool_, Environment& env_, Statement& root_stmt, + IR::AbstractSyntaxList& syntax_list_) + : stmt_pool{stmt_pool_}, inst_pool{inst_pool_}, block_pool{block_pool_}, env{env_}, + syntax_list{syntax_list_} { + Visit(root_stmt, nullptr, nullptr); + + IR::Block& first_block{*syntax_list.front().data.block}; + IR::IREmitter ir(first_block, first_block.begin()); + ir.Prologue(); + } + +private: + void Visit(Statement& parent, IR::Block* break_block, IR::Block* fallthrough_block) { + IR::Block* current_block{}; + const auto ensure_block{[&] { + if (current_block) { + return; + } + current_block = block_pool.Create(inst_pool); + auto& node{syntax_list.emplace_back()}; + node.type = IR::AbstractSyntaxNode::Type::Block; + node.data.block = current_block; + }}; + Tree& tree{parent.children}; + for (auto it = tree.begin(); it != tree.end(); ++it) { + Statement& stmt{*it}; + switch (stmt.type) { + case StatementType::Label: + // Labels can be ignored + break; + case StatementType::Code: { + ensure_block(); + Translate(env, current_block, stmt.block->begin.Offset(), stmt.block->end.Offset()); + break; + } + case StatementType::SetVariable: { + ensure_block(); + IR::IREmitter ir{*current_block}; + ir.SetGotoVariable(stmt.id, VisitExpr(ir, *stmt.op)); + break; + } + case StatementType::SetIndirectBranchVariable: { + ensure_block(); + IR::IREmitter ir{*current_block}; + IR::U32 address{ir.IAdd(ir.GetReg(stmt.branch_reg), ir.Imm32(stmt.branch_offset))}; + ir.SetIndirectBranchVariable(address); + break; + } + case StatementType::If: { + ensure_block(); + IR::Block* const merge_block{MergeBlock(parent, stmt)}; + + // Implement if header block + IR::IREmitter ir{*current_block}; + const IR::U1 cond{ir.ConditionRef(VisitExpr(ir, *stmt.cond))}; + + const size_t if_node_index{syntax_list.size()}; + syntax_list.emplace_back(); + + // Visit children + const size_t then_block_index{syntax_list.size()}; + Visit(stmt, break_block, merge_block); + + IR::Block* const then_block{syntax_list.at(then_block_index).data.block}; + current_block->AddBranch(then_block); + current_block->AddBranch(merge_block); + current_block = merge_block; + + auto& if_node{syntax_list[if_node_index]}; + if_node.type = IR::AbstractSyntaxNode::Type::If; + if_node.data.if_node.cond = cond; + if_node.data.if_node.body = then_block; + if_node.data.if_node.merge = merge_block; + + auto& endif_node{syntax_list.emplace_back()}; + endif_node.type = IR::AbstractSyntaxNode::Type::EndIf; + endif_node.data.end_if.merge = merge_block; + + auto& merge{syntax_list.emplace_back()}; + merge.type = IR::AbstractSyntaxNode::Type::Block; + merge.data.block = merge_block; + break; + } + case StatementType::Loop: { + IR::Block* const loop_header_block{block_pool.Create(inst_pool)}; + if (current_block) { + current_block->AddBranch(loop_header_block); + } + auto& header_node{syntax_list.emplace_back()}; + header_node.type = IR::AbstractSyntaxNode::Type::Block; + header_node.data.block = loop_header_block; + + IR::Block* const continue_block{block_pool.Create(inst_pool)}; + IR::Block* const merge_block{MergeBlock(parent, stmt)}; + + const size_t loop_node_index{syntax_list.size()}; + syntax_list.emplace_back(); + + // Visit children + const size_t body_block_index{syntax_list.size()}; + Visit(stmt, merge_block, continue_block); + + // The continue block is located at the end of the loop + IR::IREmitter ir{*continue_block}; + const IR::U1 cond{ir.ConditionRef(VisitExpr(ir, *stmt.cond))}; + + IR::Block* const body_block{syntax_list.at(body_block_index).data.block}; + loop_header_block->AddBranch(body_block); + + continue_block->AddBranch(loop_header_block); + continue_block->AddBranch(merge_block); + + current_block = merge_block; + + auto& loop{syntax_list[loop_node_index]}; + loop.type = IR::AbstractSyntaxNode::Type::Loop; + loop.data.loop.body = body_block; + loop.data.loop.continue_block = continue_block; + loop.data.loop.merge = merge_block; + + auto& continue_block_node{syntax_list.emplace_back()}; + continue_block_node.type = IR::AbstractSyntaxNode::Type::Block; + continue_block_node.data.block = continue_block; + + auto& repeat{syntax_list.emplace_back()}; + repeat.type = IR::AbstractSyntaxNode::Type::Repeat; + repeat.data.repeat.cond = cond; + repeat.data.repeat.loop_header = loop_header_block; + repeat.data.repeat.merge = merge_block; + + auto& merge{syntax_list.emplace_back()}; + merge.type = IR::AbstractSyntaxNode::Type::Block; + merge.data.block = merge_block; + break; + } + case StatementType::Break: { + ensure_block(); + IR::Block* const skip_block{MergeBlock(parent, stmt)}; + + IR::IREmitter ir{*current_block}; + const IR::U1 cond{ir.ConditionRef(VisitExpr(ir, *stmt.cond))}; + current_block->AddBranch(break_block); + current_block->AddBranch(skip_block); + current_block = skip_block; + + auto& break_node{syntax_list.emplace_back()}; + break_node.type = IR::AbstractSyntaxNode::Type::Break; + break_node.data.break_node.cond = cond; + break_node.data.break_node.merge = break_block; + break_node.data.break_node.skip = skip_block; + + auto& merge{syntax_list.emplace_back()}; + merge.type = IR::AbstractSyntaxNode::Type::Block; + merge.data.block = skip_block; + break; + } + case StatementType::Return: { + ensure_block(); + IR::IREmitter{*current_block}.Epilogue(); + current_block = nullptr; + syntax_list.emplace_back().type = IR::AbstractSyntaxNode::Type::Return; + break; + } + case StatementType::Kill: { + ensure_block(); + IR::Block* demote_block{MergeBlock(parent, stmt)}; + IR::IREmitter{*current_block}.DemoteToHelperInvocation(); + current_block->AddBranch(demote_block); + current_block = demote_block; + + auto& merge{syntax_list.emplace_back()}; + merge.type = IR::AbstractSyntaxNode::Type::Block; + merge.data.block = demote_block; + break; + } + case StatementType::Unreachable: { + ensure_block(); + current_block = nullptr; + syntax_list.emplace_back().type = IR::AbstractSyntaxNode::Type::Unreachable; + break; + } + default: + throw NotImplementedException("Statement type {}", stmt.type); + } + } + if (current_block) { + if (fallthrough_block) { + current_block->AddBranch(fallthrough_block); + } else { + syntax_list.emplace_back().type = IR::AbstractSyntaxNode::Type::Unreachable; + } + } + } + + IR::Block* MergeBlock(Statement& parent, Statement& stmt) { + Statement* merge_stmt{TryFindForwardBlock(stmt)}; + if (!merge_stmt) { + // Create a merge block we can visit later + merge_stmt = stmt_pool.Create(&dummy_flow_block, &parent); + parent.children.insert(std::next(Tree::s_iterator_to(stmt)), *merge_stmt); + } + return block_pool.Create(inst_pool); + } + + ObjectPool& stmt_pool; + ObjectPool& inst_pool; + ObjectPool& block_pool; + Environment& env; + IR::AbstractSyntaxList& syntax_list; + u32 loop_id{}; + +// TODO: C++20 Remove this when all compilers support constexpr std::vector +#if __cpp_lib_constexpr_vector >= 201907 + static constexpr Flow::Block dummy_flow_block; +#else + const Flow::Block dummy_flow_block; +#endif +}; +} // Anonymous namespace + +IR::AbstractSyntaxList BuildASL(ObjectPool& inst_pool, ObjectPool& block_pool, + Environment& env, Flow::CFG& cfg) { + ObjectPool stmt_pool{64}; + GotoPass goto_pass{cfg, inst_pool, block_pool, stmt_pool}; + Statement& root{goto_pass.RootStatement()}; + IR::AbstractSyntaxList syntax_list; + TranslatePass{inst_pool, block_pool, stmt_pool, env, root, syntax_list}; + return syntax_list; +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/structured_control_flow.h b/src/shader_recompiler/frontend/maxwell/structured_control_flow.h new file mode 100755 index 000000000..88b083649 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/structured_control_flow.h @@ -0,0 +1,20 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/abstract_syntax_list.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/frontend/maxwell/control_flow.h" +#include "shader_recompiler/object_pool.h" + +namespace Shader::Maxwell { + +[[nodiscard]] IR::AbstractSyntaxList BuildASL(ObjectPool& inst_pool, + ObjectPool& block_pool, Environment& env, + Flow::CFG& cfg); + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/atomic_operations_global_memory.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/atomic_operations_global_memory.cpp new file mode 100755 index 000000000..66f39e44e --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/atomic_operations_global_memory.cpp @@ -0,0 +1,214 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class AtomOp : u64 { + ADD, + MIN, + MAX, + INC, + DEC, + AND, + OR, + XOR, + EXCH, + SAFEADD, +}; + +enum class AtomSize : u64 { + U32, + S32, + U64, + F32, + F16x2, + S64, +}; + +IR::U32U64 ApplyIntegerAtomOp(IR::IREmitter& ir, const IR::U32U64& offset, const IR::U32U64& op_b, + AtomOp op, bool is_signed) { + switch (op) { + case AtomOp::ADD: + return ir.GlobalAtomicIAdd(offset, op_b); + case AtomOp::MIN: + return ir.GlobalAtomicIMin(offset, op_b, is_signed); + case AtomOp::MAX: + return ir.GlobalAtomicIMax(offset, op_b, is_signed); + case AtomOp::INC: + return ir.GlobalAtomicInc(offset, op_b); + case AtomOp::DEC: + return ir.GlobalAtomicDec(offset, op_b); + case AtomOp::AND: + return ir.GlobalAtomicAnd(offset, op_b); + case AtomOp::OR: + return ir.GlobalAtomicOr(offset, op_b); + case AtomOp::XOR: + return ir.GlobalAtomicXor(offset, op_b); + case AtomOp::EXCH: + return ir.GlobalAtomicExchange(offset, op_b); + default: + throw NotImplementedException("Integer Atom Operation {}", op); + } +} + +IR::Value ApplyFpAtomOp(IR::IREmitter& ir, const IR::U64& offset, const IR::Value& op_b, AtomOp op, + AtomSize size) { + static constexpr IR::FpControl f16_control{ + .no_contraction{false}, + .rounding{IR::FpRounding::RN}, + .fmz_mode{IR::FmzMode::DontCare}, + }; + static constexpr IR::FpControl f32_control{ + .no_contraction{false}, + .rounding{IR::FpRounding::RN}, + .fmz_mode{IR::FmzMode::FTZ}, + }; + switch (op) { + case AtomOp::ADD: + return size == AtomSize::F32 ? ir.GlobalAtomicF32Add(offset, op_b, f32_control) + : ir.GlobalAtomicF16x2Add(offset, op_b, f16_control); + case AtomOp::MIN: + return ir.GlobalAtomicF16x2Min(offset, op_b, f16_control); + case AtomOp::MAX: + return ir.GlobalAtomicF16x2Max(offset, op_b, f16_control); + default: + throw NotImplementedException("FP Atom Operation {}", op); + } +} + +IR::U64 AtomOffset(TranslatorVisitor& v, u64 insn) { + union { + u64 raw; + BitField<8, 8, IR::Reg> addr_reg; + BitField<28, 20, s64> addr_offset; + BitField<28, 20, u64> rz_addr_offset; + BitField<48, 1, u64> e; + } const mem{insn}; + + const IR::U64 address{[&]() -> IR::U64 { + if (mem.e == 0) { + return v.ir.UConvert(64, v.X(mem.addr_reg)); + } + return v.L(mem.addr_reg); + }()}; + const u64 addr_offset{[&]() -> u64 { + if (mem.addr_reg == IR::Reg::RZ) { + // When RZ is used, the address is an absolute address + return static_cast(mem.rz_addr_offset.Value()); + } else { + return static_cast(mem.addr_offset.Value()); + } + }()}; + return v.ir.IAdd(address, v.ir.Imm64(addr_offset)); +} + +bool AtomOpNotApplicable(AtomSize size, AtomOp op) { + // TODO: SAFEADD + switch (size) { + case AtomSize::S32: + case AtomSize::U64: + return (op == AtomOp::INC || op == AtomOp::DEC); + case AtomSize::S64: + return !(op == AtomOp::MIN || op == AtomOp::MAX); + case AtomSize::F32: + return op != AtomOp::ADD; + case AtomSize::F16x2: + return !(op == AtomOp::ADD || op == AtomOp::MIN || op == AtomOp::MAX); + default: + return false; + } +} + +IR::U32U64 LoadGlobal(IR::IREmitter& ir, const IR::U64& offset, AtomSize size) { + switch (size) { + case AtomSize::U32: + case AtomSize::S32: + case AtomSize::F32: + case AtomSize::F16x2: + return ir.LoadGlobal32(offset); + case AtomSize::U64: + case AtomSize::S64: + return ir.PackUint2x32(ir.LoadGlobal64(offset)); + default: + throw NotImplementedException("Atom Size {}", size); + } +} + +void StoreResult(TranslatorVisitor& v, IR::Reg dest_reg, const IR::Value& result, AtomSize size) { + switch (size) { + case AtomSize::U32: + case AtomSize::S32: + case AtomSize::F16x2: + return v.X(dest_reg, IR::U32{result}); + case AtomSize::U64: + case AtomSize::S64: + return v.L(dest_reg, IR::U64{result}); + case AtomSize::F32: + return v.F(dest_reg, IR::F32{result}); + default: + break; + } +} + +IR::Value ApplyAtomOp(TranslatorVisitor& v, IR::Reg operand_reg, const IR::U64& offset, + AtomSize size, AtomOp op) { + switch (size) { + case AtomSize::U32: + case AtomSize::S32: + return ApplyIntegerAtomOp(v.ir, offset, v.X(operand_reg), op, size == AtomSize::S32); + case AtomSize::U64: + case AtomSize::S64: + return ApplyIntegerAtomOp(v.ir, offset, v.L(operand_reg), op, size == AtomSize::S64); + case AtomSize::F32: + return ApplyFpAtomOp(v.ir, offset, v.F(operand_reg), op, size); + case AtomSize::F16x2: { + return ApplyFpAtomOp(v.ir, offset, v.ir.UnpackFloat2x16(v.X(operand_reg)), op, size); + } + default: + throw NotImplementedException("Atom Size {}", size); + } +} + +void GlobalAtomic(TranslatorVisitor& v, IR::Reg dest_reg, IR::Reg operand_reg, + const IR::U64& offset, AtomSize size, AtomOp op, bool write_dest) { + IR::Value result; + if (AtomOpNotApplicable(size, op)) { + result = LoadGlobal(v.ir, offset, size); + } else { + result = ApplyAtomOp(v, operand_reg, offset, size, op); + } + if (write_dest) { + StoreResult(v, dest_reg, result, size); + } +} +} // Anonymous namespace + +void TranslatorVisitor::ATOM(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<20, 8, IR::Reg> operand_reg; + BitField<49, 3, AtomSize> size; + BitField<52, 4, AtomOp> op; + } const atom{insn}; + const IR::U64 offset{AtomOffset(*this, insn)}; + GlobalAtomic(*this, atom.dest_reg, atom.operand_reg, offset, atom.size, atom.op, true); +} + +void TranslatorVisitor::RED(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> operand_reg; + BitField<20, 3, AtomSize> size; + BitField<23, 3, AtomOp> op; + } const red{insn}; + const IR::U64 offset{AtomOffset(*this, insn)}; + GlobalAtomic(*this, IR::Reg::RZ, red.operand_reg, offset, red.size, red.op, true); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/atomic_operations_shared_memory.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/atomic_operations_shared_memory.cpp new file mode 100755 index 000000000..8b974621e --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/atomic_operations_shared_memory.cpp @@ -0,0 +1,110 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class AtomOp : u64 { + ADD, + MIN, + MAX, + INC, + DEC, + AND, + OR, + XOR, + EXCH, +}; + +enum class AtomsSize : u64 { + U32, + S32, + U64, +}; + +IR::U32U64 ApplyAtomsOp(IR::IREmitter& ir, const IR::U32& offset, const IR::U32U64& op_b, AtomOp op, + bool is_signed) { + switch (op) { + case AtomOp::ADD: + return ir.SharedAtomicIAdd(offset, op_b); + case AtomOp::MIN: + return ir.SharedAtomicIMin(offset, op_b, is_signed); + case AtomOp::MAX: + return ir.SharedAtomicIMax(offset, op_b, is_signed); + case AtomOp::INC: + return ir.SharedAtomicInc(offset, op_b); + case AtomOp::DEC: + return ir.SharedAtomicDec(offset, op_b); + case AtomOp::AND: + return ir.SharedAtomicAnd(offset, op_b); + case AtomOp::OR: + return ir.SharedAtomicOr(offset, op_b); + case AtomOp::XOR: + return ir.SharedAtomicXor(offset, op_b); + case AtomOp::EXCH: + return ir.SharedAtomicExchange(offset, op_b); + default: + throw NotImplementedException("Integer Atoms Operation {}", op); + } +} + +IR::U32 AtomsOffset(TranslatorVisitor& v, u64 insn) { + union { + u64 raw; + BitField<8, 8, IR::Reg> offset_reg; + BitField<30, 22, u64> absolute_offset; + BitField<30, 22, s64> relative_offset; + } const encoding{insn}; + + if (encoding.offset_reg == IR::Reg::RZ) { + return v.ir.Imm32(static_cast(encoding.absolute_offset << 2)); + } else { + const s32 relative{static_cast(encoding.relative_offset << 2)}; + return v.ir.IAdd(v.X(encoding.offset_reg), v.ir.Imm32(relative)); + } +} + +void StoreResult(TranslatorVisitor& v, IR::Reg dest_reg, const IR::Value& result, AtomsSize size) { + switch (size) { + case AtomsSize::U32: + case AtomsSize::S32: + return v.X(dest_reg, IR::U32{result}); + case AtomsSize::U64: + return v.L(dest_reg, IR::U64{result}); + default: + break; + } +} +} // Anonymous namespace + +void TranslatorVisitor::ATOMS(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> addr_reg; + BitField<20, 8, IR::Reg> src_reg_b; + BitField<28, 2, AtomsSize> size; + BitField<52, 4, AtomOp> op; + } const atoms{insn}; + + const bool size_64{atoms.size == AtomsSize::U64}; + if (size_64 && atoms.op != AtomOp::EXCH) { + throw NotImplementedException("64-bit Atoms Operation {}", atoms.op.Value()); + } + const bool is_signed{atoms.size == AtomsSize::S32}; + const IR::U32 offset{AtomsOffset(*this, insn)}; + + IR::Value result; + if (size_64) { + result = ApplyAtomsOp(ir, offset, L(atoms.src_reg_b), atoms.op, is_signed); + } else { + result = ApplyAtomsOp(ir, offset, X(atoms.src_reg_b), atoms.op, is_signed); + } + StoreResult(*this, atoms.dest_reg, result, atoms.size); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/attribute_memory_to_physical.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/attribute_memory_to_physical.cpp new file mode 100755 index 000000000..fb3f00d3f --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/attribute_memory_to_physical.cpp @@ -0,0 +1,35 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { + +enum class BitSize : u64 { + B32, + B64, + B96, + B128, +}; + +void TranslatorVisitor::AL2P(u64 inst) { + union { + u64 raw; + BitField<0, 8, IR::Reg> result_register; + BitField<8, 8, IR::Reg> indexing_register; + BitField<20, 11, s64> offset; + BitField<47, 2, BitSize> bitsize; + } al2p{inst}; + if (al2p.bitsize != BitSize::B32) { + throw NotImplementedException("BitSize {}", al2p.bitsize.Value()); + } + const IR::U32 converted_offset{ir.Imm32(static_cast(al2p.offset.Value()))}; + const IR::U32 result{ir.IAdd(X(al2p.indexing_register), converted_offset)}; + X(al2p.result_register, result); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/barrier_operations.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/barrier_operations.cpp new file mode 100755 index 000000000..86e433e41 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/barrier_operations.cpp @@ -0,0 +1,96 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +// Seems to be in CUDA terminology. +enum class LocalScope : u64 { + CTA, + GL, + SYS, + VC, +}; +} // Anonymous namespace + +void TranslatorVisitor::MEMBAR(u64 inst) { + union { + u64 raw; + BitField<8, 2, LocalScope> scope; + } const membar{inst}; + + if (membar.scope == LocalScope::CTA) { + ir.WorkgroupMemoryBarrier(); + } else { + ir.DeviceMemoryBarrier(); + } +} + +void TranslatorVisitor::DEPBAR() { + // DEPBAR is a no-op +} + +void TranslatorVisitor::BAR(u64 insn) { + enum class Mode { + RedPopc, + Scan, + RedAnd, + RedOr, + Sync, + Arrive, + }; + union { + u64 raw; + BitField<43, 1, u64> is_a_imm; + BitField<44, 1, u64> is_b_imm; + BitField<8, 8, u64> imm_a; + BitField<20, 12, u64> imm_b; + BitField<42, 1, u64> neg_pred; + BitField<39, 3, IR::Pred> pred; + } const bar{insn}; + + const Mode mode{[insn] { + switch (insn & 0x0000009B00000000ULL) { + case 0x0000000200000000ULL: + return Mode::RedPopc; + case 0x0000000300000000ULL: + return Mode::Scan; + case 0x0000000A00000000ULL: + return Mode::RedAnd; + case 0x0000001200000000ULL: + return Mode::RedOr; + case 0x0000008000000000ULL: + return Mode::Sync; + case 0x0000008100000000ULL: + return Mode::Arrive; + } + throw NotImplementedException("Invalid encoding"); + }()}; + if (mode != Mode::Sync) { + throw NotImplementedException("BAR mode {}", mode); + } + if (bar.is_a_imm == 0) { + throw NotImplementedException("Non-immediate input A"); + } + if (bar.imm_a != 0) { + throw NotImplementedException("Non-zero input A"); + } + if (bar.is_b_imm == 0) { + throw NotImplementedException("Non-immediate input B"); + } + if (bar.imm_b != 0) { + throw NotImplementedException("Non-zero input B"); + } + if (bar.pred != IR::Pred::PT && bar.neg_pred != 0) { + throw NotImplementedException("Non-true input predicate"); + } + ir.Barrier(); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/bitfield_extract.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/bitfield_extract.cpp new file mode 100755 index 000000000..9d5a87e52 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/bitfield_extract.cpp @@ -0,0 +1,74 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void BFE(TranslatorVisitor& v, u64 insn, const IR::U32& src) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> offset_reg; + BitField<40, 1, u64> brev; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> is_signed; + } const bfe{insn}; + + const IR::U32 offset{v.ir.BitFieldExtract(src, v.ir.Imm32(0), v.ir.Imm32(8), false)}; + const IR::U32 count{v.ir.BitFieldExtract(src, v.ir.Imm32(8), v.ir.Imm32(8), false)}; + + // Common constants + const IR::U32 zero{v.ir.Imm32(0)}; + const IR::U32 one{v.ir.Imm32(1)}; + const IR::U32 max_size{v.ir.Imm32(32)}; + // Edge case conditions + const IR::U1 zero_count{v.ir.IEqual(count, zero)}; + const IR::U1 exceed_count{v.ir.IGreaterThanEqual(v.ir.IAdd(offset, count), max_size, false)}; + const IR::U1 replicate{v.ir.IGreaterThanEqual(offset, max_size, false)}; + + IR::U32 base{v.X(bfe.offset_reg)}; + if (bfe.brev != 0) { + base = v.ir.BitReverse(base); + } + IR::U32 result{v.ir.BitFieldExtract(base, offset, count, bfe.is_signed != 0)}; + if (bfe.is_signed != 0) { + const IR::U1 is_negative{v.ir.ILessThan(base, zero, true)}; + const IR::U32 replicated_bit{v.ir.Select(is_negative, v.ir.Imm32(-1), zero)}; + const IR::U32 exceed_bit{v.ir.BitFieldExtract(base, v.ir.Imm32(31), one, false)}; + // Replicate condition + result = IR::U32{v.ir.Select(replicate, replicated_bit, result)}; + // Exceeding condition + const IR::U32 exceed_result{v.ir.BitFieldInsert(result, exceed_bit, v.ir.Imm32(31), one)}; + result = IR::U32{v.ir.Select(exceed_count, exceed_result, result)}; + } + // Zero count condition + result = IR::U32{v.ir.Select(zero_count, zero, result)}; + + v.X(bfe.dest_reg, result); + + if (bfe.cc != 0) { + v.SetZFlag(v.ir.IEqual(result, zero)); + v.SetSFlag(v.ir.ILessThan(result, zero, true)); + v.ResetCFlag(); + v.ResetOFlag(); + } +} +} // Anonymous namespace + +void TranslatorVisitor::BFE_reg(u64 insn) { + BFE(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::BFE_cbuf(u64 insn) { + BFE(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::BFE_imm(u64 insn) { + BFE(*this, insn, GetImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/bitfield_insert.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/bitfield_insert.cpp new file mode 100755 index 000000000..1e1ec2119 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/bitfield_insert.cpp @@ -0,0 +1,62 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void BFI(TranslatorVisitor& v, u64 insn, const IR::U32& src_a, const IR::U32& base) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> insert_reg; + BitField<47, 1, u64> cc; + } const bfi{insn}; + + const IR::U32 zero{v.ir.Imm32(0)}; + const IR::U32 offset{v.ir.BitFieldExtract(src_a, zero, v.ir.Imm32(8), false)}; + const IR::U32 unsafe_count{v.ir.BitFieldExtract(src_a, v.ir.Imm32(8), v.ir.Imm32(8), false)}; + const IR::U32 max_size{v.ir.Imm32(32)}; + + // Edge case conditions + const IR::U1 exceed_offset{v.ir.IGreaterThanEqual(offset, max_size, false)}; + const IR::U1 exceed_count{v.ir.IGreaterThan(unsafe_count, max_size, false)}; + + const IR::U32 remaining_size{v.ir.ISub(max_size, offset)}; + const IR::U32 safe_count{v.ir.Select(exceed_count, remaining_size, unsafe_count)}; + + const IR::U32 insert{v.X(bfi.insert_reg)}; + IR::U32 result{v.ir.BitFieldInsert(base, insert, offset, safe_count)}; + + result = IR::U32{v.ir.Select(exceed_offset, base, result)}; + + v.X(bfi.dest_reg, result); + if (bfi.cc != 0) { + v.SetZFlag(v.ir.IEqual(result, zero)); + v.SetSFlag(v.ir.ILessThan(result, zero, true)); + v.ResetCFlag(); + v.ResetOFlag(); + } +} +} // Anonymous namespace + +void TranslatorVisitor::BFI_reg(u64 insn) { + BFI(*this, insn, GetReg20(insn), GetReg39(insn)); +} + +void TranslatorVisitor::BFI_rc(u64 insn) { + BFI(*this, insn, GetReg39(insn), GetCbuf(insn)); +} + +void TranslatorVisitor::BFI_cr(u64 insn) { + BFI(*this, insn, GetCbuf(insn), GetReg39(insn)); +} + +void TranslatorVisitor::BFI_imm(u64 insn) { + BFI(*this, insn, GetImm20(insn), GetReg39(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/branch_indirect.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/branch_indirect.cpp new file mode 100755 index 000000000..371c0e0f7 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/branch_indirect.cpp @@ -0,0 +1,36 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void Check(u64 insn) { + union { + u64 raw; + BitField<5, 1, u64> cbuf_mode; + BitField<6, 1, u64> lmt; + } const encoding{insn}; + + if (encoding.cbuf_mode != 0) { + throw NotImplementedException("Constant buffer mode"); + } + if (encoding.lmt != 0) { + throw NotImplementedException("LMT"); + } +} +} // Anonymous namespace + +void TranslatorVisitor::BRX(u64 insn) { + Check(insn); +} + +void TranslatorVisitor::JMX(u64 insn) { + Check(insn); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h b/src/shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h new file mode 100755 index 000000000..fd73f656c --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h @@ -0,0 +1,57 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" + +namespace Shader::Maxwell { + +enum class FpRounding : u64 { + RN, + RM, + RP, + RZ, +}; + +enum class FmzMode : u64 { + None, + FTZ, + FMZ, + INVALIDFMZ3, +}; + +inline IR::FpRounding CastFpRounding(FpRounding fp_rounding) { + switch (fp_rounding) { + case FpRounding::RN: + return IR::FpRounding::RN; + case FpRounding::RM: + return IR::FpRounding::RM; + case FpRounding::RP: + return IR::FpRounding::RP; + case FpRounding::RZ: + return IR::FpRounding::RZ; + } + throw NotImplementedException("Invalid floating-point rounding {}", fp_rounding); +} + +inline IR::FmzMode CastFmzMode(FmzMode fmz_mode) { + switch (fmz_mode) { + case FmzMode::None: + return IR::FmzMode::None; + case FmzMode::FTZ: + return IR::FmzMode::FTZ; + case FmzMode::FMZ: + // FMZ is manually handled in the instruction + return IR::FmzMode::FTZ; + case FmzMode::INVALIDFMZ3: + break; + } + throw NotImplementedException("Invalid FMZ mode {}", fmz_mode); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/common_funcs.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/common_funcs.cpp new file mode 100755 index 000000000..10bb01d99 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/common_funcs.cpp @@ -0,0 +1,110 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" + +namespace Shader::Maxwell { +IR::U1 IntegerCompare(IR::IREmitter& ir, const IR::U32& operand_1, const IR::U32& operand_2, + CompareOp compare_op, bool is_signed) { + switch (compare_op) { + case CompareOp::False: + return ir.Imm1(false); + case CompareOp::LessThan: + return ir.ILessThan(operand_1, operand_2, is_signed); + case CompareOp::Equal: + return ir.IEqual(operand_1, operand_2); + case CompareOp::LessThanEqual: + return ir.ILessThanEqual(operand_1, operand_2, is_signed); + case CompareOp::GreaterThan: + return ir.IGreaterThan(operand_1, operand_2, is_signed); + case CompareOp::NotEqual: + return ir.INotEqual(operand_1, operand_2); + case CompareOp::GreaterThanEqual: + return ir.IGreaterThanEqual(operand_1, operand_2, is_signed); + case CompareOp::True: + return ir.Imm1(true); + default: + throw NotImplementedException("Invalid compare op {}", compare_op); + } +} + +IR::U1 PredicateCombine(IR::IREmitter& ir, const IR::U1& predicate_1, const IR::U1& predicate_2, + BooleanOp bop) { + switch (bop) { + case BooleanOp::AND: + return ir.LogicalAnd(predicate_1, predicate_2); + case BooleanOp::OR: + return ir.LogicalOr(predicate_1, predicate_2); + case BooleanOp::XOR: + return ir.LogicalXor(predicate_1, predicate_2); + default: + throw NotImplementedException("Invalid bop {}", bop); + } +} + +IR::U1 PredicateOperation(IR::IREmitter& ir, const IR::U32& result, PredicateOp op) { + switch (op) { + case PredicateOp::False: + return ir.Imm1(false); + case PredicateOp::True: + return ir.Imm1(true); + case PredicateOp::Zero: + return ir.IEqual(result, ir.Imm32(0)); + case PredicateOp::NonZero: + return ir.INotEqual(result, ir.Imm32(0)); + default: + throw NotImplementedException("Invalid Predicate operation {}", op); + } +} + +bool IsCompareOpOrdered(FPCompareOp op) { + switch (op) { + case FPCompareOp::LTU: + case FPCompareOp::EQU: + case FPCompareOp::LEU: + case FPCompareOp::GTU: + case FPCompareOp::NEU: + case FPCompareOp::GEU: + return false; + default: + return true; + } +} + +IR::U1 FloatingPointCompare(IR::IREmitter& ir, const IR::F16F32F64& operand_1, + const IR::F16F32F64& operand_2, FPCompareOp compare_op, + IR::FpControl control) { + const bool ordered{IsCompareOpOrdered(compare_op)}; + switch (compare_op) { + case FPCompareOp::F: + return ir.Imm1(false); + case FPCompareOp::LT: + case FPCompareOp::LTU: + return ir.FPLessThan(operand_1, operand_2, control, ordered); + case FPCompareOp::EQ: + case FPCompareOp::EQU: + return ir.FPEqual(operand_1, operand_2, control, ordered); + case FPCompareOp::LE: + case FPCompareOp::LEU: + return ir.FPLessThanEqual(operand_1, operand_2, control, ordered); + case FPCompareOp::GT: + case FPCompareOp::GTU: + return ir.FPGreaterThan(operand_1, operand_2, control, ordered); + case FPCompareOp::NE: + case FPCompareOp::NEU: + return ir.FPNotEqual(operand_1, operand_2, control, ordered); + case FPCompareOp::GE: + case FPCompareOp::GEU: + return ir.FPGreaterThanEqual(operand_1, operand_2, control, ordered); + case FPCompareOp::NUM: + return ir.FPOrdered(operand_1, operand_2); + case FPCompareOp::Nan: + return ir.FPUnordered(operand_1, operand_2); + case FPCompareOp::T: + return ir.Imm1(true); + default: + throw NotImplementedException("Invalid FP compare op {}", compare_op); + } +} +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h b/src/shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h new file mode 100755 index 000000000..f584060b3 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h @@ -0,0 +1,24 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +[[nodiscard]] IR::U1 IntegerCompare(IR::IREmitter& ir, const IR::U32& operand_1, + const IR::U32& operand_2, CompareOp compare_op, bool is_signed); + +[[nodiscard]] IR::U1 PredicateCombine(IR::IREmitter& ir, const IR::U1& predicate_1, + const IR::U1& predicate_2, BooleanOp bop); + +[[nodiscard]] IR::U1 PredicateOperation(IR::IREmitter& ir, const IR::U32& result, PredicateOp op); + +[[nodiscard]] bool IsCompareOpOrdered(FPCompareOp op); + +[[nodiscard]] IR::U1 FloatingPointCompare(IR::IREmitter& ir, const IR::F16F32F64& operand_1, + const IR::F16F32F64& operand_2, FPCompareOp compare_op, + IR::FpControl control = {}); +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/condition_code_set.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/condition_code_set.cpp new file mode 100755 index 000000000..420f2fb94 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/condition_code_set.cpp @@ -0,0 +1,66 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { + +void TranslatorVisitor::CSET(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 5, IR::FlowTest> cc_test; + BitField<39, 3, IR::Pred> bop_pred; + BitField<42, 1, u64> neg_bop_pred; + BitField<44, 1, u64> bf; + BitField<45, 2, BooleanOp> bop; + BitField<47, 1, u64> cc; + } const cset{insn}; + + const IR::U32 one_mask{ir.Imm32(-1)}; + const IR::U32 fp_one{ir.Imm32(0x3f800000)}; + const IR::U32 zero{ir.Imm32(0)}; + const IR::U32 pass_result{cset.bf == 0 ? one_mask : fp_one}; + const IR::U1 cc_test_result{ir.GetFlowTestResult(cset.cc_test)}; + const IR::U1 bop_pred{ir.GetPred(cset.bop_pred, cset.neg_bop_pred != 0)}; + const IR::U1 pred_result{PredicateCombine(ir, cc_test_result, bop_pred, cset.bop)}; + const IR::U32 result{ir.Select(pred_result, pass_result, zero)}; + X(cset.dest_reg, result); + if (cset.cc != 0) { + const IR::U1 is_zero{ir.IEqual(result, zero)}; + SetZFlag(is_zero); + if (cset.bf != 0) { + ResetSFlag(); + } else { + SetSFlag(ir.LogicalNot(is_zero)); + } + ResetOFlag(); + ResetCFlag(); + } +} + +void TranslatorVisitor::CSETP(u64 insn) { + union { + u64 raw; + BitField<0, 3, IR::Pred> dest_pred_b; + BitField<3, 3, IR::Pred> dest_pred_a; + BitField<8, 5, IR::FlowTest> cc_test; + BitField<39, 3, IR::Pred> bop_pred; + BitField<42, 1, u64> neg_bop_pred; + BitField<45, 2, BooleanOp> bop; + } const csetp{insn}; + + const BooleanOp bop{csetp.bop}; + const IR::U1 bop_pred{ir.GetPred(csetp.bop_pred, csetp.neg_bop_pred != 0)}; + const IR::U1 cc_test_result{ir.GetFlowTestResult(csetp.cc_test)}; + const IR::U1 result_a{PredicateCombine(ir, cc_test_result, bop_pred, bop)}; + const IR::U1 result_b{PredicateCombine(ir, ir.LogicalNot(cc_test_result), bop_pred, bop)}; + ir.SetPred(csetp.dest_pred_a, result_a); + ir.SetPred(csetp.dest_pred_b, result_b); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/double_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/double_add.cpp new file mode 100755 index 000000000..5a1b3a8fc --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/double_add.cpp @@ -0,0 +1,55 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { + +void DADD(TranslatorVisitor& v, u64 insn, const IR::F64& src_b) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<39, 2, FpRounding> fp_rounding; + BitField<45, 1, u64> neg_b; + BitField<46, 1, u64> abs_a; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> neg_a; + BitField<49, 1, u64> abs_b; + } const dadd{insn}; + if (dadd.cc != 0) { + throw NotImplementedException("DADD CC"); + } + + const IR::F64 src_a{v.D(dadd.src_a_reg)}; + const IR::F64 op_a{v.ir.FPAbsNeg(src_a, dadd.abs_a != 0, dadd.neg_a != 0)}; + const IR::F64 op_b{v.ir.FPAbsNeg(src_b, dadd.abs_b != 0, dadd.neg_b != 0)}; + + const IR::FpControl control{ + .no_contraction = true, + .rounding = CastFpRounding(dadd.fp_rounding), + .fmz_mode = IR::FmzMode::None, + }; + + v.D(dadd.dest_reg, v.ir.FPAdd(op_a, op_b, control)); +} +} // Anonymous namespace + +void TranslatorVisitor::DADD_reg(u64 insn) { + DADD(*this, insn, GetDoubleReg20(insn)); +} + +void TranslatorVisitor::DADD_cbuf(u64 insn) { + DADD(*this, insn, GetDoubleCbuf(insn)); +} + +void TranslatorVisitor::DADD_imm(u64 insn) { + DADD(*this, insn, GetDoubleImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/double_compare_and_set.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/double_compare_and_set.cpp new file mode 100755 index 000000000..1173192e4 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/double_compare_and_set.cpp @@ -0,0 +1,72 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void DSET(TranslatorVisitor& v, u64 insn, const IR::F64& src_b) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<39, 3, IR::Pred> pred; + BitField<42, 1, u64> neg_pred; + BitField<43, 1, u64> negate_a; + BitField<44, 1, u64> abs_b; + BitField<45, 2, BooleanOp> bop; + BitField<47, 1, u64> cc; + BitField<48, 4, FPCompareOp> compare_op; + BitField<52, 1, u64> bf; + BitField<53, 1, u64> negate_b; + BitField<54, 1, u64> abs_a; + } const dset{insn}; + + const IR::F64 op_a{v.ir.FPAbsNeg(v.D(dset.src_a_reg), dset.abs_a != 0, dset.negate_a != 0)}; + const IR::F64 op_b{v.ir.FPAbsNeg(src_b, dset.abs_b != 0, dset.negate_b != 0)}; + + IR::U1 pred{v.ir.GetPred(dset.pred)}; + if (dset.neg_pred != 0) { + pred = v.ir.LogicalNot(pred); + } + const IR::U1 cmp_result{FloatingPointCompare(v.ir, op_a, op_b, dset.compare_op)}; + const IR::U1 bop_result{PredicateCombine(v.ir, cmp_result, pred, dset.bop)}; + + const IR::U32 one_mask{v.ir.Imm32(-1)}; + const IR::U32 fp_one{v.ir.Imm32(0x3f800000)}; + const IR::U32 zero{v.ir.Imm32(0)}; + const IR::U32 pass_result{dset.bf == 0 ? one_mask : fp_one}; + const IR::U32 result{v.ir.Select(bop_result, pass_result, zero)}; + + v.X(dset.dest_reg, result); + if (dset.cc != 0) { + const IR::U1 is_zero{v.ir.IEqual(result, zero)}; + v.SetZFlag(is_zero); + if (dset.bf != 0) { + v.ResetSFlag(); + } else { + v.SetSFlag(v.ir.LogicalNot(is_zero)); + } + v.ResetCFlag(); + v.ResetOFlag(); + } +} +} // Anonymous namespace + +void TranslatorVisitor::DSET_reg(u64 insn) { + DSET(*this, insn, GetDoubleReg20(insn)); +} + +void TranslatorVisitor::DSET_cbuf(u64 insn) { + DSET(*this, insn, GetDoubleCbuf(insn)); +} + +void TranslatorVisitor::DSET_imm(u64 insn) { + DSET(*this, insn, GetDoubleImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/double_fused_multiply_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/double_fused_multiply_add.cpp new file mode 100755 index 000000000..f66097014 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/double_fused_multiply_add.cpp @@ -0,0 +1,58 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { + +void DFMA(TranslatorVisitor& v, u64 insn, const IR::F64& src_b, const IR::F64& src_c) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<50, 2, FpRounding> fp_rounding; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> neg_b; + BitField<49, 1, u64> neg_c; + } const dfma{insn}; + + if (dfma.cc != 0) { + throw NotImplementedException("DFMA CC"); + } + + const IR::F64 src_a{v.D(dfma.src_a_reg)}; + const IR::F64 op_b{v.ir.FPAbsNeg(src_b, false, dfma.neg_b != 0)}; + const IR::F64 op_c{v.ir.FPAbsNeg(src_c, false, dfma.neg_c != 0)}; + + const IR::FpControl control{ + .no_contraction = true, + .rounding = CastFpRounding(dfma.fp_rounding), + .fmz_mode = IR::FmzMode::None, + }; + + v.D(dfma.dest_reg, v.ir.FPFma(src_a, op_b, op_c, control)); +} +} // Anonymous namespace + +void TranslatorVisitor::DFMA_reg(u64 insn) { + DFMA(*this, insn, GetDoubleReg20(insn), GetDoubleReg39(insn)); +} + +void TranslatorVisitor::DFMA_cr(u64 insn) { + DFMA(*this, insn, GetDoubleCbuf(insn), GetDoubleReg39(insn)); +} + +void TranslatorVisitor::DFMA_rc(u64 insn) { + DFMA(*this, insn, GetDoubleReg39(insn), GetDoubleCbuf(insn)); +} + +void TranslatorVisitor::DFMA_imm(u64 insn) { + DFMA(*this, insn, GetDoubleImm20(insn), GetDoubleReg39(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/double_min_max.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/double_min_max.cpp new file mode 100755 index 000000000..6b551847c --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/double_min_max.cpp @@ -0,0 +1,55 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void DMNMX(TranslatorVisitor& v, u64 insn, const IR::F64& src_b) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<39, 3, IR::Pred> pred; + BitField<42, 1, u64> neg_pred; + BitField<45, 1, u64> negate_b; + BitField<46, 1, u64> abs_a; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> negate_a; + BitField<49, 1, u64> abs_b; + } const dmnmx{insn}; + + if (dmnmx.cc != 0) { + throw NotImplementedException("DMNMX CC"); + } + + const IR::U1 pred{v.ir.GetPred(dmnmx.pred)}; + const IR::F64 op_a{v.ir.FPAbsNeg(v.D(dmnmx.src_a_reg), dmnmx.abs_a != 0, dmnmx.negate_a != 0)}; + const IR::F64 op_b{v.ir.FPAbsNeg(src_b, dmnmx.abs_b != 0, dmnmx.negate_b != 0)}; + + IR::F64 max{v.ir.FPMax(op_a, op_b)}; + IR::F64 min{v.ir.FPMin(op_a, op_b)}; + + if (dmnmx.neg_pred != 0) { + std::swap(min, max); + } + v.D(dmnmx.dest_reg, IR::F64{v.ir.Select(pred, min, max)}); +} +} // Anonymous namespace + +void TranslatorVisitor::DMNMX_reg(u64 insn) { + DMNMX(*this, insn, GetDoubleReg20(insn)); +} + +void TranslatorVisitor::DMNMX_cbuf(u64 insn) { + DMNMX(*this, insn, GetDoubleCbuf(insn)); +} + +void TranslatorVisitor::DMNMX_imm(u64 insn) { + DMNMX(*this, insn, GetDoubleImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/double_multiply.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/double_multiply.cpp new file mode 100755 index 000000000..c0159fb65 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/double_multiply.cpp @@ -0,0 +1,50 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { + +void DMUL(TranslatorVisitor& v, u64 insn, const IR::F64& src_b) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<39, 2, FpRounding> fp_rounding; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> neg; + } const dmul{insn}; + + if (dmul.cc != 0) { + throw NotImplementedException("DMUL CC"); + } + + const IR::F64 src_a{v.ir.FPAbsNeg(v.D(dmul.src_a_reg), false, dmul.neg != 0)}; + const IR::FpControl control{ + .no_contraction = true, + .rounding = CastFpRounding(dmul.fp_rounding), + .fmz_mode = IR::FmzMode::None, + }; + + v.D(dmul.dest_reg, v.ir.FPMul(src_a, src_b, control)); +} +} // Anonymous namespace + +void TranslatorVisitor::DMUL_reg(u64 insn) { + DMUL(*this, insn, GetDoubleReg20(insn)); +} + +void TranslatorVisitor::DMUL_cbuf(u64 insn) { + DMUL(*this, insn, GetDoubleCbuf(insn)); +} + +void TranslatorVisitor::DMUL_imm(u64 insn) { + DMUL(*this, insn, GetDoubleImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/double_set_predicate.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/double_set_predicate.cpp new file mode 100755 index 000000000..b8e74ee44 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/double_set_predicate.cpp @@ -0,0 +1,54 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void DSETP(TranslatorVisitor& v, u64 insn, const IR::F64& src_b) { + union { + u64 insn; + BitField<0, 3, IR::Pred> dest_pred_b; + BitField<3, 3, IR::Pred> dest_pred_a; + BitField<6, 1, u64> negate_b; + BitField<7, 1, u64> abs_a; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<39, 3, IR::Pred> bop_pred; + BitField<42, 1, u64> neg_bop_pred; + BitField<43, 1, u64> negate_a; + BitField<44, 1, u64> abs_b; + BitField<45, 2, BooleanOp> bop; + BitField<48, 4, FPCompareOp> compare_op; + } const dsetp{insn}; + + const IR::F64 op_a{v.ir.FPAbsNeg(v.D(dsetp.src_a_reg), dsetp.abs_a != 0, dsetp.negate_a != 0)}; + const IR::F64 op_b{v.ir.FPAbsNeg(src_b, dsetp.abs_b != 0, dsetp.negate_b != 0)}; + + const BooleanOp bop{dsetp.bop}; + const FPCompareOp compare_op{dsetp.compare_op}; + const IR::U1 comparison{FloatingPointCompare(v.ir, op_a, op_b, compare_op)}; + const IR::U1 bop_pred{v.ir.GetPred(dsetp.bop_pred, dsetp.neg_bop_pred != 0)}; + const IR::U1 result_a{PredicateCombine(v.ir, comparison, bop_pred, bop)}; + const IR::U1 result_b{PredicateCombine(v.ir, v.ir.LogicalNot(comparison), bop_pred, bop)}; + v.ir.SetPred(dsetp.dest_pred_a, result_a); + v.ir.SetPred(dsetp.dest_pred_b, result_b); +} +} // Anonymous namespace + +void TranslatorVisitor::DSETP_reg(u64 insn) { + DSETP(*this, insn, GetDoubleReg20(insn)); +} + +void TranslatorVisitor::DSETP_cbuf(u64 insn) { + DSETP(*this, insn, GetDoubleCbuf(insn)); +} + +void TranslatorVisitor::DSETP_imm(u64 insn) { + DSETP(*this, insn, GetDoubleImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/exit_program.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/exit_program.cpp new file mode 100755 index 000000000..c2443c886 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/exit_program.cpp @@ -0,0 +1,43 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void ExitFragment(TranslatorVisitor& v) { + const ProgramHeader sph{v.env.SPH()}; + IR::Reg src_reg{IR::Reg::R0}; + for (u32 render_target = 0; render_target < 8; ++render_target) { + const std::array mask{sph.ps.EnabledOutputComponents(render_target)}; + for (u32 component = 0; component < 4; ++component) { + if (!mask[component]) { + continue; + } + v.ir.SetFragColor(render_target, component, v.F(src_reg)); + ++src_reg; + } + } + if (sph.ps.omap.sample_mask != 0) { + v.ir.SetSampleMask(v.X(src_reg)); + } + if (sph.ps.omap.depth != 0) { + v.ir.SetFragDepth(v.F(src_reg + 1)); + } +} +} // Anonymous namespace + +void TranslatorVisitor::EXIT() { + switch (env.ShaderStage()) { + case Stage::Fragment: + ExitFragment(*this); + break; + default: + break; + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/find_leading_one.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/find_leading_one.cpp new file mode 100755 index 000000000..f0cb25d61 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/find_leading_one.cpp @@ -0,0 +1,47 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void FLO(TranslatorVisitor& v, u64 insn, IR::U32 src) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<40, 1, u64> tilde; + BitField<41, 1, u64> shift; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> is_signed; + } const flo{insn}; + + if (flo.cc != 0) { + throw NotImplementedException("CC"); + } + if (flo.tilde != 0) { + src = v.ir.BitwiseNot(src); + } + IR::U32 result{flo.is_signed != 0 ? v.ir.FindSMsb(src) : v.ir.FindUMsb(src)}; + if (flo.shift != 0) { + const IR::U1 not_found{v.ir.IEqual(result, v.ir.Imm32(-1))}; + result = IR::U32{v.ir.Select(not_found, result, v.ir.BitwiseXor(result, v.ir.Imm32(31)))}; + } + v.X(flo.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::FLO_reg(u64 insn) { + FLO(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::FLO_cbuf(u64 insn) { + FLO(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::FLO_imm(u64 insn) { + FLO(*this, insn, GetImm20(insn)); +} +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_add.cpp new file mode 100755 index 000000000..b8c89810c --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_add.cpp @@ -0,0 +1,82 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void FADD(TranslatorVisitor& v, u64 insn, bool sat, bool cc, bool ftz, FpRounding fp_rounding, + const IR::F32& src_b, bool abs_a, bool neg_a, bool abs_b, bool neg_b) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a; + } const fadd{insn}; + + if (cc) { + throw NotImplementedException("FADD CC"); + } + const IR::F32 op_a{v.ir.FPAbsNeg(v.F(fadd.src_a), abs_a, neg_a)}; + const IR::F32 op_b{v.ir.FPAbsNeg(src_b, abs_b, neg_b)}; + IR::FpControl control{ + .no_contraction = true, + .rounding = CastFpRounding(fp_rounding), + .fmz_mode = (ftz ? IR::FmzMode::FTZ : IR::FmzMode::None), + }; + IR::F32 value{v.ir.FPAdd(op_a, op_b, control)}; + if (sat) { + value = v.ir.FPSaturate(value); + } + v.F(fadd.dest_reg, value); +} + +void FADD(TranslatorVisitor& v, u64 insn, const IR::F32& src_b) { + union { + u64 raw; + BitField<39, 2, FpRounding> fp_rounding; + BitField<44, 1, u64> ftz; + BitField<45, 1, u64> neg_b; + BitField<46, 1, u64> abs_a; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> neg_a; + BitField<49, 1, u64> abs_b; + BitField<50, 1, u64> sat; + } const fadd{insn}; + + FADD(v, insn, fadd.sat != 0, fadd.cc != 0, fadd.ftz != 0, fadd.fp_rounding, src_b, + fadd.abs_a != 0, fadd.neg_a != 0, fadd.abs_b != 0, fadd.neg_b != 0); +} +} // Anonymous namespace + +void TranslatorVisitor::FADD_reg(u64 insn) { + FADD(*this, insn, GetFloatReg20(insn)); +} + +void TranslatorVisitor::FADD_cbuf(u64 insn) { + FADD(*this, insn, GetFloatCbuf(insn)); +} + +void TranslatorVisitor::FADD_imm(u64 insn) { + FADD(*this, insn, GetFloatImm20(insn)); +} + +void TranslatorVisitor::FADD32I(u64 insn) { + union { + u64 raw; + BitField<55, 1, u64> ftz; + BitField<56, 1, u64> neg_a; + BitField<54, 1, u64> abs_a; + BitField<52, 1, u64> cc; + BitField<53, 1, u64> neg_b; + BitField<57, 1, u64> abs_b; + } const fadd32i{insn}; + + FADD(*this, insn, false, fadd32i.cc != 0, fadd32i.ftz != 0, FpRounding::RN, GetFloatImm32(insn), + fadd32i.abs_a != 0, fadd32i.neg_a != 0, fadd32i.abs_b != 0, fadd32i.neg_b != 0); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_compare.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_compare.cpp new file mode 100755 index 000000000..7127ebf54 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_compare.cpp @@ -0,0 +1,55 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void FCMP(TranslatorVisitor& v, u64 insn, const IR::U32& src_a, const IR::F32& operand) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<47, 1, u64> ftz; + BitField<48, 4, FPCompareOp> compare_op; + } const fcmp{insn}; + + const IR::F32 zero{v.ir.Imm32(0.0f)}; + const IR::FpControl control{.fmz_mode = (fcmp.ftz != 0 ? IR::FmzMode::FTZ : IR::FmzMode::None)}; + const IR::U1 cmp_result{FloatingPointCompare(v.ir, operand, zero, fcmp.compare_op, control)}; + const IR::U32 src_reg{v.X(fcmp.src_reg)}; + const IR::U32 result{v.ir.Select(cmp_result, src_reg, src_a)}; + + v.X(fcmp.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::FCMP_reg(u64 insn) { + FCMP(*this, insn, GetReg20(insn), GetFloatReg39(insn)); +} + +void TranslatorVisitor::FCMP_rc(u64 insn) { + FCMP(*this, insn, GetReg39(insn), GetFloatCbuf(insn)); +} + +void TranslatorVisitor::FCMP_cr(u64 insn) { + FCMP(*this, insn, GetCbuf(insn), GetFloatReg39(insn)); +} + +void TranslatorVisitor::FCMP_imm(u64 insn) { + union { + u64 raw; + BitField<20, 19, u64> value; + BitField<56, 1, u64> is_negative; + } const fcmp{insn}; + const u32 sign_bit{fcmp.is_negative != 0 ? (1U << 31) : 0}; + const u32 value{static_cast(fcmp.value) << 12}; + + FCMP(*this, insn, ir.Imm32(value | sign_bit), GetFloatReg39(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_compare_and_set.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_compare_and_set.cpp new file mode 100755 index 000000000..eece4f28f --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_compare_and_set.cpp @@ -0,0 +1,78 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void FSET(TranslatorVisitor& v, u64 insn, const IR::F32& src_b) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<39, 3, IR::Pred> pred; + BitField<42, 1, u64> neg_pred; + BitField<43, 1, u64> negate_a; + BitField<44, 1, u64> abs_b; + BitField<45, 2, BooleanOp> bop; + BitField<47, 1, u64> cc; + BitField<48, 4, FPCompareOp> compare_op; + BitField<52, 1, u64> bf; + BitField<53, 1, u64> negate_b; + BitField<54, 1, u64> abs_a; + BitField<55, 1, u64> ftz; + } const fset{insn}; + + const IR::F32 op_a{v.ir.FPAbsNeg(v.F(fset.src_a_reg), fset.abs_a != 0, fset.negate_a != 0)}; + const IR::F32 op_b = v.ir.FPAbsNeg(src_b, fset.abs_b != 0, fset.negate_b != 0); + const IR::FpControl control{ + .no_contraction = false, + .rounding = IR::FpRounding::DontCare, + .fmz_mode = (fset.ftz != 0 ? IR::FmzMode::FTZ : IR::FmzMode::None), + }; + + IR::U1 pred{v.ir.GetPred(fset.pred)}; + if (fset.neg_pred != 0) { + pred = v.ir.LogicalNot(pred); + } + const IR::U1 cmp_result{FloatingPointCompare(v.ir, op_a, op_b, fset.compare_op, control)}; + const IR::U1 bop_result{PredicateCombine(v.ir, cmp_result, pred, fset.bop)}; + + const IR::U32 one_mask{v.ir.Imm32(-1)}; + const IR::U32 fp_one{v.ir.Imm32(0x3f800000)}; + const IR::U32 zero{v.ir.Imm32(0)}; + const IR::U32 pass_result{fset.bf == 0 ? one_mask : fp_one}; + const IR::U32 result{v.ir.Select(bop_result, pass_result, zero)}; + + v.X(fset.dest_reg, result); + if (fset.cc != 0) { + const IR::U1 is_zero{v.ir.IEqual(result, zero)}; + v.SetZFlag(is_zero); + if (fset.bf != 0) { + v.ResetSFlag(); + } else { + v.SetSFlag(v.ir.LogicalNot(is_zero)); + } + v.ResetCFlag(); + v.ResetOFlag(); + } +} +} // Anonymous namespace + +void TranslatorVisitor::FSET_reg(u64 insn) { + FSET(*this, insn, GetFloatReg20(insn)); +} + +void TranslatorVisitor::FSET_cbuf(u64 insn) { + FSET(*this, insn, GetFloatCbuf(insn)); +} + +void TranslatorVisitor::FSET_imm(u64 insn) { + FSET(*this, insn, GetFloatImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_conversion_floating_point.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_conversion_floating_point.cpp new file mode 100755 index 000000000..02ab023c1 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_conversion_floating_point.cpp @@ -0,0 +1,214 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h" + +namespace Shader::Maxwell { +namespace { +enum class FloatFormat : u64 { + F16 = 1, + F32 = 2, + F64 = 3, +}; + +enum class RoundingOp : u64 { + None = 0, + Pass = 3, + Round = 8, + Floor = 9, + Ceil = 10, + Trunc = 11, +}; + +[[nodiscard]] u32 WidthSize(FloatFormat width) { + switch (width) { + case FloatFormat::F16: + return 16; + case FloatFormat::F32: + return 32; + case FloatFormat::F64: + return 64; + default: + throw NotImplementedException("Invalid width {}", width); + } +} + +void F2F(TranslatorVisitor& v, u64 insn, const IR::F16F32F64& src_a, bool abs) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<44, 1, u64> ftz; + BitField<45, 1, u64> neg; + BitField<47, 1, u64> cc; + BitField<50, 1, u64> sat; + BitField<39, 4, u64> rounding_op; + BitField<39, 2, FpRounding> rounding; + BitField<10, 2, FloatFormat> src_size; + BitField<8, 2, FloatFormat> dst_size; + + [[nodiscard]] RoundingOp RoundingOperation() const { + constexpr u64 rounding_mask = 0x0B; + return static_cast(rounding_op.Value() & rounding_mask); + } + } const f2f{insn}; + + if (f2f.cc != 0) { + throw NotImplementedException("F2F CC"); + } + + IR::F16F32F64 input{v.ir.FPAbsNeg(src_a, abs, f2f.neg != 0)}; + + const bool any_fp64{f2f.src_size == FloatFormat::F64 || f2f.dst_size == FloatFormat::F64}; + IR::FpControl fp_control{ + .no_contraction = false, + .rounding = IR::FpRounding::DontCare, + .fmz_mode = (f2f.ftz != 0 && !any_fp64 ? IR::FmzMode::FTZ : IR::FmzMode::None), + }; + if (f2f.src_size != f2f.dst_size) { + fp_control.rounding = CastFpRounding(f2f.rounding); + input = v.ir.FPConvert(WidthSize(f2f.dst_size), input, fp_control); + } else { + switch (f2f.RoundingOperation()) { + case RoundingOp::None: + case RoundingOp::Pass: + // Make sure NANs are handled properly + switch (f2f.src_size) { + case FloatFormat::F16: + input = v.ir.FPAdd(input, v.ir.FPConvert(16, v.ir.Imm32(0.0f)), fp_control); + break; + case FloatFormat::F32: + input = v.ir.FPAdd(input, v.ir.Imm32(0.0f), fp_control); + break; + case FloatFormat::F64: + input = v.ir.FPAdd(input, v.ir.Imm64(0.0), fp_control); + break; + } + break; + case RoundingOp::Round: + input = v.ir.FPRoundEven(input, fp_control); + break; + case RoundingOp::Floor: + input = v.ir.FPFloor(input, fp_control); + break; + case RoundingOp::Ceil: + input = v.ir.FPCeil(input, fp_control); + break; + case RoundingOp::Trunc: + input = v.ir.FPTrunc(input, fp_control); + break; + default: + throw NotImplementedException("Unimplemented rounding mode {}", f2f.rounding.Value()); + } + } + if (f2f.sat != 0 && !any_fp64) { + input = v.ir.FPSaturate(input); + } + + switch (f2f.dst_size) { + case FloatFormat::F16: { + const IR::F16 imm{v.ir.FPConvert(16, v.ir.Imm32(0.0f))}; + v.X(f2f.dest_reg, v.ir.PackFloat2x16(v.ir.CompositeConstruct(input, imm))); + break; + } + case FloatFormat::F32: + v.F(f2f.dest_reg, input); + break; + case FloatFormat::F64: + v.D(f2f.dest_reg, input); + break; + default: + throw NotImplementedException("Invalid dest format {}", f2f.dst_size.Value()); + } +} +} // Anonymous namespace + +void TranslatorVisitor::F2F_reg(u64 insn) { + union { + u64 insn; + BitField<49, 1, u64> abs; + BitField<10, 2, FloatFormat> src_size; + BitField<41, 1, u64> selector; + } const f2f{insn}; + + IR::F16F32F64 src_a; + switch (f2f.src_size) { + case FloatFormat::F16: { + auto [lhs_a, rhs_a]{Extract(ir, GetReg20(insn), Swizzle::H1_H0)}; + src_a = f2f.selector != 0 ? rhs_a : lhs_a; + break; + } + case FloatFormat::F32: + src_a = GetFloatReg20(insn); + break; + case FloatFormat::F64: + src_a = GetDoubleReg20(insn); + break; + default: + throw NotImplementedException("Invalid dest format {}", f2f.src_size.Value()); + } + F2F(*this, insn, src_a, f2f.abs != 0); +} + +void TranslatorVisitor::F2F_cbuf(u64 insn) { + union { + u64 insn; + BitField<49, 1, u64> abs; + BitField<10, 2, FloatFormat> src_size; + BitField<41, 1, u64> selector; + } const f2f{insn}; + + IR::F16F32F64 src_a; + switch (f2f.src_size) { + case FloatFormat::F16: { + auto [lhs_a, rhs_a]{Extract(ir, GetCbuf(insn), Swizzle::H1_H0)}; + src_a = f2f.selector != 0 ? rhs_a : lhs_a; + break; + } + case FloatFormat::F32: + src_a = GetFloatCbuf(insn); + break; + case FloatFormat::F64: + src_a = GetDoubleCbuf(insn); + break; + default: + throw NotImplementedException("Invalid dest format {}", f2f.src_size.Value()); + } + F2F(*this, insn, src_a, f2f.abs != 0); +} + +void TranslatorVisitor::F2F_imm([[maybe_unused]] u64 insn) { + union { + u64 insn; + BitField<49, 1, u64> abs; + BitField<10, 2, FloatFormat> src_size; + BitField<41, 1, u64> selector; + BitField<20, 19, u64> imm; + BitField<56, 1, u64> imm_neg; + } const f2f{insn}; + + IR::F16F32F64 src_a; + switch (f2f.src_size) { + case FloatFormat::F16: { + const u32 imm{static_cast(f2f.imm & 0x0000ffff)}; + const IR::Value vector{ir.UnpackFloat2x16(ir.Imm32(imm | (imm << 16)))}; + src_a = IR::F16{ir.CompositeExtract(vector, f2f.selector != 0 ? 0 : 1)}; + if (f2f.imm_neg != 0) { + throw NotImplementedException("Neg bit on F16"); + } + break; + } + case FloatFormat::F32: + src_a = GetFloatImm20(insn); + break; + case FloatFormat::F64: + src_a = GetDoubleImm20(insn); + break; + default: + throw NotImplementedException("Invalid dest format {}", f2f.src_size.Value()); + } + F2F(*this, insn, src_a, f2f.abs != 0); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_conversion_integer.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_conversion_integer.cpp new file mode 100755 index 000000000..92b1ce015 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_conversion_integer.cpp @@ -0,0 +1,253 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class DestFormat : u64 { + Invalid, + I16, + I32, + I64, +}; +enum class SrcFormat : u64 { + Invalid, + F16, + F32, + F64, +}; +enum class Rounding : u64 { + Round, + Floor, + Ceil, + Trunc, +}; + +union F2I { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 2, DestFormat> dest_format; + BitField<10, 2, SrcFormat> src_format; + BitField<12, 1, u64> is_signed; + BitField<39, 2, Rounding> rounding; + BitField<41, 1, u64> half; + BitField<44, 1, u64> ftz; + BitField<45, 1, u64> abs; + BitField<47, 1, u64> cc; + BitField<49, 1, u64> neg; +}; + +size_t BitSize(DestFormat dest_format) { + switch (dest_format) { + case DestFormat::I16: + return 16; + case DestFormat::I32: + return 32; + case DestFormat::I64: + return 64; + default: + throw NotImplementedException("Invalid destination format {}", dest_format); + } +} + +std::pair ClampBounds(DestFormat format, bool is_signed) { + if (is_signed) { + switch (format) { + case DestFormat::I16: + return {static_cast(std::numeric_limits::max()), + static_cast(std::numeric_limits::min())}; + case DestFormat::I32: + return {static_cast(std::numeric_limits::max()), + static_cast(std::numeric_limits::min())}; + case DestFormat::I64: + return {static_cast(std::numeric_limits::max()), + static_cast(std::numeric_limits::min())}; + default: + break; + } + } else { + switch (format) { + case DestFormat::I16: + return {static_cast(std::numeric_limits::max()), + static_cast(std::numeric_limits::min())}; + case DestFormat::I32: + return {static_cast(std::numeric_limits::max()), + static_cast(std::numeric_limits::min())}; + case DestFormat::I64: + return {static_cast(std::numeric_limits::max()), + static_cast(std::numeric_limits::min())}; + default: + break; + } + } + throw NotImplementedException("Invalid destination format {}", format); +} + +IR::F64 UnpackCbuf(TranslatorVisitor& v, u64 insn) { + union { + u64 raw; + BitField<20, 14, s64> offset; + BitField<34, 5, u64> binding; + } const cbuf{insn}; + if (cbuf.binding >= 18) { + throw NotImplementedException("Out of bounds constant buffer binding {}", cbuf.binding); + } + if (cbuf.offset >= 0x4'000 || cbuf.offset < 0) { + throw NotImplementedException("Out of bounds constant buffer offset {}", cbuf.offset * 4); + } + if (cbuf.offset % 2 != 0) { + throw NotImplementedException("Unaligned F64 constant buffer offset {}", cbuf.offset * 4); + } + const IR::U32 binding{v.ir.Imm32(static_cast(cbuf.binding))}; + const IR::U32 byte_offset{v.ir.Imm32(static_cast(cbuf.offset) * 4 + 4)}; + const IR::U32 cbuf_data{v.ir.GetCbuf(binding, byte_offset)}; + const IR::Value vector{v.ir.CompositeConstruct(v.ir.Imm32(0U), cbuf_data)}; + return v.ir.PackDouble2x32(vector); +} + +void TranslateF2I(TranslatorVisitor& v, u64 insn, const IR::F16F32F64& src_a) { + // F2I is used to convert from a floating point value to an integer + const F2I f2i{insn}; + + const bool denorm_cares{f2i.src_format != SrcFormat::F16 && f2i.src_format != SrcFormat::F64 && + f2i.dest_format != DestFormat::I64}; + IR::FmzMode fmz_mode{IR::FmzMode::DontCare}; + if (denorm_cares) { + fmz_mode = f2i.ftz != 0 ? IR::FmzMode::FTZ : IR::FmzMode::None; + } + const IR::FpControl fp_control{ + .no_contraction = true, + .rounding = IR::FpRounding::DontCare, + .fmz_mode = fmz_mode, + }; + const IR::F16F32F64 op_a{v.ir.FPAbsNeg(src_a, f2i.abs != 0, f2i.neg != 0)}; + const IR::F16F32F64 rounded_value{[&] { + switch (f2i.rounding) { + case Rounding::Round: + return v.ir.FPRoundEven(op_a, fp_control); + case Rounding::Floor: + return v.ir.FPFloor(op_a, fp_control); + case Rounding::Ceil: + return v.ir.FPCeil(op_a, fp_control); + case Rounding::Trunc: + return v.ir.FPTrunc(op_a, fp_control); + default: + throw NotImplementedException("Invalid F2I rounding {}", f2i.rounding.Value()); + } + }()}; + const bool is_signed{f2i.is_signed != 0}; + const auto [max_bound, min_bound] = ClampBounds(f2i.dest_format, is_signed); + + IR::F16F32F64 intermediate; + switch (f2i.src_format) { + case SrcFormat::F16: { + const IR::F16 max_val{v.ir.FPConvert(16, v.ir.Imm32(static_cast(max_bound)))}; + const IR::F16 min_val{v.ir.FPConvert(16, v.ir.Imm32(static_cast(min_bound)))}; + intermediate = v.ir.FPClamp(rounded_value, min_val, max_val); + break; + } + case SrcFormat::F32: { + const IR::F32 max_val{v.ir.Imm32(static_cast(max_bound))}; + const IR::F32 min_val{v.ir.Imm32(static_cast(min_bound))}; + intermediate = v.ir.FPClamp(rounded_value, min_val, max_val); + break; + } + case SrcFormat::F64: { + const IR::F64 max_val{v.ir.Imm64(max_bound)}; + const IR::F64 min_val{v.ir.Imm64(min_bound)}; + intermediate = v.ir.FPClamp(rounded_value, min_val, max_val); + break; + } + default: + throw NotImplementedException("Invalid destination format {}", f2i.dest_format.Value()); + } + + const size_t bitsize{std::max(32, BitSize(f2i.dest_format))}; + IR::U16U32U64 result{v.ir.ConvertFToI(bitsize, is_signed, intermediate)}; + + bool handled_special_case = false; + const bool special_nan_cases = + (f2i.src_format == SrcFormat::F64) != (f2i.dest_format == DestFormat::I64); + if (special_nan_cases) { + if (f2i.dest_format == DestFormat::I32) { + handled_special_case = true; + result = IR::U32{v.ir.Select(v.ir.FPIsNan(op_a), v.ir.Imm32(0x8000'0000U), result)}; + } else if (f2i.dest_format == DestFormat::I64) { + handled_special_case = true; + result = IR::U64{ + v.ir.Select(v.ir.FPIsNan(op_a), v.ir.Imm64(0x8000'0000'0000'0000UL), result)}; + } + } + if (!handled_special_case && is_signed) { + if (bitsize != 64) { + result = IR::U32{v.ir.Select(v.ir.FPIsNan(op_a), v.ir.Imm32(0U), result)}; + } else { + result = IR::U64{v.ir.Select(v.ir.FPIsNan(op_a), v.ir.Imm64(u64{0}), result)}; + } + } + + if (bitsize == 64) { + v.L(f2i.dest_reg, result); + } else { + v.X(f2i.dest_reg, result); + } + + if (f2i.cc != 0) { + throw NotImplementedException("F2I CC"); + } +} +} // Anonymous namespace + +void TranslatorVisitor::F2I_reg(u64 insn) { + union { + u64 raw; + F2I base; + BitField<20, 8, IR::Reg> src_reg; + } const f2i{insn}; + + const IR::F16F32F64 op_a{[&]() -> IR::F16F32F64 { + switch (f2i.base.src_format) { + case SrcFormat::F16: + return IR::F16{ir.CompositeExtract(ir.UnpackFloat2x16(X(f2i.src_reg)), f2i.base.half)}; + case SrcFormat::F32: + return F(f2i.src_reg); + case SrcFormat::F64: + return ir.PackDouble2x32(ir.CompositeConstruct(X(f2i.src_reg), X(f2i.src_reg + 1))); + default: + throw NotImplementedException("Invalid F2I source format {}", + f2i.base.src_format.Value()); + } + }()}; + TranslateF2I(*this, insn, op_a); +} + +void TranslatorVisitor::F2I_cbuf(u64 insn) { + const F2I f2i{insn}; + const IR::F16F32F64 op_a{[&]() -> IR::F16F32F64 { + switch (f2i.src_format) { + case SrcFormat::F16: + return IR::F16{ir.CompositeExtract(ir.UnpackFloat2x16(GetCbuf(insn)), f2i.half)}; + case SrcFormat::F32: + return GetFloatCbuf(insn); + case SrcFormat::F64: { + return UnpackCbuf(*this, insn); + } + default: + throw NotImplementedException("Invalid F2I source format {}", f2i.src_format.Value()); + } + }()}; + TranslateF2I(*this, insn, op_a); +} + +void TranslatorVisitor::F2I_imm(u64) { + throw NotImplementedException("{}", Opcode::F2I_imm); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_fused_multiply_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_fused_multiply_add.cpp new file mode 100755 index 000000000..fa2a7807b --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_fused_multiply_add.cpp @@ -0,0 +1,94 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void FFMA(TranslatorVisitor& v, u64 insn, const IR::F32& src_b, const IR::F32& src_c, bool neg_a, + bool neg_b, bool neg_c, bool sat, bool cc, FmzMode fmz_mode, FpRounding fp_rounding) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a; + } const ffma{insn}; + + if (cc) { + throw NotImplementedException("FFMA CC"); + } + const IR::F32 op_a{v.ir.FPAbsNeg(v.F(ffma.src_a), false, neg_a)}; + const IR::F32 op_b{v.ir.FPAbsNeg(src_b, false, neg_b)}; + const IR::F32 op_c{v.ir.FPAbsNeg(src_c, false, neg_c)}; + const IR::FpControl fp_control{ + .no_contraction = true, + .rounding = CastFpRounding(fp_rounding), + .fmz_mode = CastFmzMode(fmz_mode), + }; + IR::F32 value{v.ir.FPFma(op_a, op_b, op_c, fp_control)}; + if (fmz_mode == FmzMode::FMZ && !sat) { + // Do not implement FMZ if SAT is enabled, as it does the logic for us. + // On D3D9 mode, anything * 0 is zero, even NAN and infinity + const IR::F32 zero{v.ir.Imm32(0.0f)}; + const IR::U1 zero_a{v.ir.FPEqual(op_a, zero)}; + const IR::U1 zero_b{v.ir.FPEqual(op_b, zero)}; + const IR::U1 any_zero{v.ir.LogicalOr(zero_a, zero_b)}; + value = IR::F32{v.ir.Select(any_zero, op_c, value)}; + } + if (sat) { + value = v.ir.FPSaturate(value); + } + v.F(ffma.dest_reg, value); +} + +void FFMA(TranslatorVisitor& v, u64 insn, const IR::F32& src_b, const IR::F32& src_c) { + union { + u64 raw; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> neg_b; + BitField<49, 1, u64> neg_c; + BitField<50, 1, u64> sat; + BitField<51, 2, FpRounding> fp_rounding; + BitField<53, 2, FmzMode> fmz_mode; + } const ffma{insn}; + + FFMA(v, insn, src_b, src_c, false, ffma.neg_b != 0, ffma.neg_c != 0, ffma.sat != 0, + ffma.cc != 0, ffma.fmz_mode, ffma.fp_rounding); +} +} // Anonymous namespace + +void TranslatorVisitor::FFMA_reg(u64 insn) { + FFMA(*this, insn, GetFloatReg20(insn), GetFloatReg39(insn)); +} + +void TranslatorVisitor::FFMA_rc(u64 insn) { + FFMA(*this, insn, GetFloatReg39(insn), GetFloatCbuf(insn)); +} + +void TranslatorVisitor::FFMA_cr(u64 insn) { + FFMA(*this, insn, GetFloatCbuf(insn), GetFloatReg39(insn)); +} + +void TranslatorVisitor::FFMA_imm(u64 insn) { + FFMA(*this, insn, GetFloatImm20(insn), GetFloatReg39(insn)); +} + +void TranslatorVisitor::FFMA32I(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> src_c; // FFMA32I mirrors the destination and addition register + BitField<52, 1, u64> cc; + BitField<53, 2, FmzMode> fmz_mode; + BitField<55, 1, u64> sat; + BitField<56, 1, u64> neg_a; + BitField<57, 1, u64> neg_c; + } const ffma32i{insn}; + + FFMA(*this, insn, GetFloatImm32(insn), F(ffma32i.src_c), ffma32i.neg_a != 0, false, + ffma32i.neg_c != 0, ffma32i.sat != 0, ffma32i.cc != 0, ffma32i.fmz_mode, FpRounding::RN); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_min_max.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_min_max.cpp new file mode 100755 index 000000000..c0d6ee5af --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_min_max.cpp @@ -0,0 +1,62 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void FMNMX(TranslatorVisitor& v, u64 insn, const IR::F32& src_b) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<39, 3, IR::Pred> pred; + BitField<42, 1, u64> neg_pred; + BitField<44, 1, u64> ftz; + BitField<45, 1, u64> negate_b; + BitField<46, 1, u64> abs_a; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> negate_a; + BitField<49, 1, u64> abs_b; + } const fmnmx{insn}; + + if (fmnmx.cc) { + throw NotImplementedException("FMNMX CC"); + } + + const IR::U1 pred{v.ir.GetPred(fmnmx.pred)}; + const IR::F32 op_a{v.ir.FPAbsNeg(v.F(fmnmx.src_a_reg), fmnmx.abs_a != 0, fmnmx.negate_a != 0)}; + const IR::F32 op_b{v.ir.FPAbsNeg(src_b, fmnmx.abs_b != 0, fmnmx.negate_b != 0)}; + + const IR::FpControl control{ + .no_contraction = false, + .rounding = IR::FpRounding::DontCare, + .fmz_mode = (fmnmx.ftz != 0 ? IR::FmzMode::FTZ : IR::FmzMode::None), + }; + IR::F32 max{v.ir.FPMax(op_a, op_b, control)}; + IR::F32 min{v.ir.FPMin(op_a, op_b, control)}; + + if (fmnmx.neg_pred != 0) { + std::swap(min, max); + } + + v.F(fmnmx.dest_reg, IR::F32{v.ir.Select(pred, min, max)}); +} +} // Anonymous namespace + +void TranslatorVisitor::FMNMX_reg(u64 insn) { + FMNMX(*this, insn, GetFloatReg20(insn)); +} + +void TranslatorVisitor::FMNMX_cbuf(u64 insn) { + FMNMX(*this, insn, GetFloatCbuf(insn)); +} + +void TranslatorVisitor::FMNMX_imm(u64 insn) { + FMNMX(*this, insn, GetFloatImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_multi_function.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_multi_function.cpp new file mode 100755 index 000000000..2f8605619 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_multi_function.cpp @@ -0,0 +1,71 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Operation : u64 { + Cos = 0, + Sin = 1, + Ex2 = 2, // Base 2 exponent + Lg2 = 3, // Base 2 logarithm + Rcp = 4, // Reciprocal + Rsq = 5, // Reciprocal square root + Rcp64H = 6, // 64-bit reciprocal + Rsq64H = 7, // 64-bit reciprocal square root + Sqrt = 8, +}; +} // Anonymous namespace + +void TranslatorVisitor::MUFU(u64 insn) { + // MUFU is used to implement a bunch of special functions. See Operation. + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<20, 4, Operation> operation; + BitField<46, 1, u64> abs; + BitField<48, 1, u64> neg; + BitField<50, 1, u64> sat; + } const mufu{insn}; + + const IR::F32 op_a{ir.FPAbsNeg(F(mufu.src_reg), mufu.abs != 0, mufu.neg != 0)}; + IR::F32 value{[&]() -> IR::F32 { + switch (mufu.operation) { + case Operation::Cos: + return ir.FPCos(op_a); + case Operation::Sin: + return ir.FPSin(op_a); + case Operation::Ex2: + return ir.FPExp2(op_a); + case Operation::Lg2: + return ir.FPLog2(op_a); + case Operation::Rcp: + return ir.FPRecip(op_a); + case Operation::Rsq: + return ir.FPRecipSqrt(op_a); + case Operation::Rcp64H: + throw NotImplementedException("MUFU.RCP64H"); + case Operation::Rsq64H: + throw NotImplementedException("MUFU.RSQ64H"); + case Operation::Sqrt: + return ir.FPSqrt(op_a); + default: + throw NotImplementedException("Invalid MUFU operation {}", mufu.operation.Value()); + } + }()}; + + if (mufu.sat) { + value = ir.FPSaturate(value); + } + + F(mufu.dest_reg, value); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_multiply.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_multiply.cpp new file mode 100755 index 000000000..06226b7ce --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_multiply.cpp @@ -0,0 +1,127 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Scale : u64 { + None, + D2, + D4, + D8, + M8, + M4, + M2, + INVALIDSCALE37, +}; + +float ScaleFactor(Scale scale) { + switch (scale) { + case Scale::None: + return 1.0f; + case Scale::D2: + return 1.0f / 2.0f; + case Scale::D4: + return 1.0f / 4.0f; + case Scale::D8: + return 1.0f / 8.0f; + case Scale::M8: + return 8.0f; + case Scale::M4: + return 4.0f; + case Scale::M2: + return 2.0f; + case Scale::INVALIDSCALE37: + break; + } + throw NotImplementedException("Invalid FMUL scale {}", scale); +} + +void FMUL(TranslatorVisitor& v, u64 insn, const IR::F32& src_b, FmzMode fmz_mode, + FpRounding fp_rounding, Scale scale, bool sat, bool cc, bool neg_b) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a; + } const fmul{insn}; + + if (cc) { + throw NotImplementedException("FMUL CC"); + } + IR::F32 op_a{v.F(fmul.src_a)}; + if (scale != Scale::None) { + if (fmz_mode != FmzMode::FTZ || fp_rounding != FpRounding::RN) { + throw NotImplementedException("FMUL scale with non-FMZ or non-RN modifiers"); + } + op_a = v.ir.FPMul(op_a, v.ir.Imm32(ScaleFactor(scale))); + } + const IR::F32 op_b{v.ir.FPAbsNeg(src_b, false, neg_b)}; + const IR::FpControl fp_control{ + .no_contraction = true, + .rounding = CastFpRounding(fp_rounding), + .fmz_mode = CastFmzMode(fmz_mode), + }; + IR::F32 value{v.ir.FPMul(op_a, op_b, fp_control)}; + if (fmz_mode == FmzMode::FMZ && !sat) { + // Do not implement FMZ if SAT is enabled, as it does the logic for us. + // On D3D9 mode, anything * 0 is zero, even NAN and infinity + const IR::F32 zero{v.ir.Imm32(0.0f)}; + const IR::U1 zero_a{v.ir.FPEqual(op_a, zero)}; + const IR::U1 zero_b{v.ir.FPEqual(op_b, zero)}; + const IR::U1 any_zero{v.ir.LogicalOr(zero_a, zero_b)}; + value = IR::F32{v.ir.Select(any_zero, zero, value)}; + } + if (sat) { + value = v.ir.FPSaturate(value); + } + v.F(fmul.dest_reg, value); +} + +void FMUL(TranslatorVisitor& v, u64 insn, const IR::F32& src_b) { + union { + u64 raw; + BitField<39, 2, FpRounding> fp_rounding; + BitField<41, 3, Scale> scale; + BitField<44, 2, FmzMode> fmz; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> neg_b; + BitField<50, 1, u64> sat; + } const fmul{insn}; + + FMUL(v, insn, src_b, fmul.fmz, fmul.fp_rounding, fmul.scale, fmul.sat != 0, fmul.cc != 0, + fmul.neg_b != 0); +} +} // Anonymous namespace + +void TranslatorVisitor::FMUL_reg(u64 insn) { + return FMUL(*this, insn, GetFloatReg20(insn)); +} + +void TranslatorVisitor::FMUL_cbuf(u64 insn) { + return FMUL(*this, insn, GetFloatCbuf(insn)); +} + +void TranslatorVisitor::FMUL_imm(u64 insn) { + return FMUL(*this, insn, GetFloatImm20(insn)); +} + +void TranslatorVisitor::FMUL32I(u64 insn) { + union { + u64 raw; + BitField<52, 1, u64> cc; + BitField<53, 2, FmzMode> fmz; + BitField<55, 1, u64> sat; + } const fmul32i{insn}; + + FMUL(*this, insn, GetFloatImm32(insn), fmul32i.fmz, FpRounding::RN, Scale::None, + fmul32i.sat != 0, fmul32i.cc != 0, false); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_range_reduction.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_range_reduction.cpp new file mode 100755 index 000000000..f91b93fad --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_range_reduction.cpp @@ -0,0 +1,41 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Mode : u64 { + SINCOS, + EX2, +}; + +void RRO(TranslatorVisitor& v, u64 insn, const IR::F32& src) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<39, 1, Mode> mode; + BitField<45, 1, u64> neg; + BitField<49, 1, u64> abs; + } const rro{insn}; + + v.F(rro.dest_reg, v.ir.FPAbsNeg(src, rro.abs != 0, rro.neg != 0)); +} +} // Anonymous namespace + +void TranslatorVisitor::RRO_reg(u64 insn) { + RRO(*this, insn, GetFloatReg20(insn)); +} + +void TranslatorVisitor::RRO_cbuf(u64 insn) { + RRO(*this, insn, GetFloatCbuf(insn)); +} + +void TranslatorVisitor::RRO_imm(u64) { + throw NotImplementedException("RRO (imm)"); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_set_predicate.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_set_predicate.cpp new file mode 100755 index 000000000..5f93a1513 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_set_predicate.cpp @@ -0,0 +1,60 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void FSETP(TranslatorVisitor& v, u64 insn, const IR::F32& src_b) { + union { + u64 insn; + BitField<0, 3, IR::Pred> dest_pred_b; + BitField<3, 3, IR::Pred> dest_pred_a; + BitField<6, 1, u64> negate_b; + BitField<7, 1, u64> abs_a; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<39, 3, IR::Pred> bop_pred; + BitField<42, 1, u64> neg_bop_pred; + BitField<43, 1, u64> negate_a; + BitField<44, 1, u64> abs_b; + BitField<45, 2, BooleanOp> bop; + BitField<47, 1, u64> ftz; + BitField<48, 4, FPCompareOp> compare_op; + } const fsetp{insn}; + + const IR::F32 op_a{v.ir.FPAbsNeg(v.F(fsetp.src_a_reg), fsetp.abs_a != 0, fsetp.negate_a != 0)}; + const IR::F32 op_b = v.ir.FPAbsNeg(src_b, fsetp.abs_b != 0, fsetp.negate_b != 0); + const IR::FpControl control{ + .no_contraction = false, + .rounding = IR::FpRounding::DontCare, + .fmz_mode = (fsetp.ftz != 0 ? IR::FmzMode::FTZ : IR::FmzMode::None), + }; + + const BooleanOp bop{fsetp.bop}; + const FPCompareOp compare_op{fsetp.compare_op}; + const IR::U1 comparison{FloatingPointCompare(v.ir, op_a, op_b, compare_op, control)}; + const IR::U1 bop_pred{v.ir.GetPred(fsetp.bop_pred, fsetp.neg_bop_pred != 0)}; + const IR::U1 result_a{PredicateCombine(v.ir, comparison, bop_pred, bop)}; + const IR::U1 result_b{PredicateCombine(v.ir, v.ir.LogicalNot(comparison), bop_pred, bop)}; + v.ir.SetPred(fsetp.dest_pred_a, result_a); + v.ir.SetPred(fsetp.dest_pred_b, result_b); +} +} // Anonymous namespace + +void TranslatorVisitor::FSETP_reg(u64 insn) { + FSETP(*this, insn, GetFloatReg20(insn)); +} + +void TranslatorVisitor::FSETP_cbuf(u64 insn) { + FSETP(*this, insn, GetFloatCbuf(insn)); +} + +void TranslatorVisitor::FSETP_imm(u64 insn) { + FSETP(*this, insn, GetFloatImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_swizzled_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_swizzled_add.cpp new file mode 100755 index 000000000..7550a8d4c --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/floating_point_swizzled_add.cpp @@ -0,0 +1,44 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +void TranslatorVisitor::FSWZADD(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<28, 8, u64> swizzle; + BitField<38, 1, u64> ndv; + BitField<39, 2, FpRounding> round; + BitField<44, 1, u64> ftz; + BitField<47, 1, u64> cc; + } const fswzadd{insn}; + + if (fswzadd.ndv != 0) { + throw NotImplementedException("FSWZADD NDV"); + } + + const IR::F32 src_a{GetFloatReg8(insn)}; + const IR::F32 src_b{GetFloatReg20(insn)}; + const IR::U32 swizzle{ir.Imm32(static_cast(fswzadd.swizzle))}; + + const IR::FpControl fp_control{ + .no_contraction = false, + .rounding = CastFpRounding(fswzadd.round), + .fmz_mode = (fswzadd.ftz != 0 ? IR::FmzMode::FTZ : IR::FmzMode::None), + }; + + const IR::F32 result{ir.FSwizzleAdd(src_a, src_b, swizzle, fp_control)}; + F(fswzadd.dest_reg, result); + + if (fswzadd.cc != 0) { + throw NotImplementedException("FSWZADD CC"); + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_add.cpp new file mode 100755 index 000000000..f2738a93b --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_add.cpp @@ -0,0 +1,125 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h" + +namespace Shader::Maxwell { +namespace { +void HADD2(TranslatorVisitor& v, u64 insn, Merge merge, bool ftz, bool sat, bool abs_a, bool neg_a, + Swizzle swizzle_a, bool abs_b, bool neg_b, Swizzle swizzle_b, const IR::U32& src_b) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a; + } const hadd2{insn}; + + auto [lhs_a, rhs_a]{Extract(v.ir, v.X(hadd2.src_a), swizzle_a)}; + auto [lhs_b, rhs_b]{Extract(v.ir, src_b, swizzle_b)}; + const bool promotion{lhs_a.Type() != lhs_b.Type()}; + if (promotion) { + if (lhs_a.Type() == IR::Type::F16) { + lhs_a = v.ir.FPConvert(32, lhs_a); + rhs_a = v.ir.FPConvert(32, rhs_a); + } + if (lhs_b.Type() == IR::Type::F16) { + lhs_b = v.ir.FPConvert(32, lhs_b); + rhs_b = v.ir.FPConvert(32, rhs_b); + } + } + lhs_a = v.ir.FPAbsNeg(lhs_a, abs_a, neg_a); + rhs_a = v.ir.FPAbsNeg(rhs_a, abs_a, neg_a); + + lhs_b = v.ir.FPAbsNeg(lhs_b, abs_b, neg_b); + rhs_b = v.ir.FPAbsNeg(rhs_b, abs_b, neg_b); + + const IR::FpControl fp_control{ + .no_contraction = true, + .rounding = IR::FpRounding::DontCare, + .fmz_mode = (ftz ? IR::FmzMode::FTZ : IR::FmzMode::None), + }; + IR::F16F32F64 lhs{v.ir.FPAdd(lhs_a, lhs_b, fp_control)}; + IR::F16F32F64 rhs{v.ir.FPAdd(rhs_a, rhs_b, fp_control)}; + if (sat) { + lhs = v.ir.FPSaturate(lhs); + rhs = v.ir.FPSaturate(rhs); + } + if (promotion) { + lhs = v.ir.FPConvert(16, lhs); + rhs = v.ir.FPConvert(16, rhs); + } + v.X(hadd2.dest_reg, MergeResult(v.ir, hadd2.dest_reg, lhs, rhs, merge)); +} + +void HADD2(TranslatorVisitor& v, u64 insn, bool sat, bool abs_b, bool neg_b, Swizzle swizzle_b, + const IR::U32& src_b) { + union { + u64 raw; + BitField<49, 2, Merge> merge; + BitField<39, 1, u64> ftz; + BitField<43, 1, u64> neg_a; + BitField<44, 1, u64> abs_a; + BitField<47, 2, Swizzle> swizzle_a; + } const hadd2{insn}; + + HADD2(v, insn, hadd2.merge, hadd2.ftz != 0, sat, hadd2.abs_a != 0, hadd2.neg_a != 0, + hadd2.swizzle_a, abs_b, neg_b, swizzle_b, src_b); +} +} // Anonymous namespace + +void TranslatorVisitor::HADD2_reg(u64 insn) { + union { + u64 raw; + BitField<32, 1, u64> sat; + BitField<31, 1, u64> neg_b; + BitField<30, 1, u64> abs_b; + BitField<28, 2, Swizzle> swizzle_b; + } const hadd2{insn}; + + HADD2(*this, insn, hadd2.sat != 0, hadd2.abs_b != 0, hadd2.neg_b != 0, hadd2.swizzle_b, + GetReg20(insn)); +} + +void TranslatorVisitor::HADD2_cbuf(u64 insn) { + union { + u64 raw; + BitField<52, 1, u64> sat; + BitField<56, 1, u64> neg_b; + BitField<54, 1, u64> abs_b; + } const hadd2{insn}; + + HADD2(*this, insn, hadd2.sat != 0, hadd2.abs_b != 0, hadd2.neg_b != 0, Swizzle::F32, + GetCbuf(insn)); +} + +void TranslatorVisitor::HADD2_imm(u64 insn) { + union { + u64 raw; + BitField<52, 1, u64> sat; + BitField<56, 1, u64> neg_high; + BitField<30, 9, u64> high; + BitField<29, 1, u64> neg_low; + BitField<20, 9, u64> low; + } const hadd2{insn}; + + const u32 imm{ + static_cast(hadd2.low << 6) | static_cast((hadd2.neg_low != 0 ? 1 : 0) << 15) | + static_cast(hadd2.high << 22) | static_cast((hadd2.neg_high != 0 ? 1 : 0) << 31)}; + HADD2(*this, insn, hadd2.sat != 0, false, false, Swizzle::H1_H0, ir.Imm32(imm)); +} + +void TranslatorVisitor::HADD2_32I(u64 insn) { + union { + u64 raw; + BitField<55, 1, u64> ftz; + BitField<52, 1, u64> sat; + BitField<56, 1, u64> neg_a; + BitField<53, 2, Swizzle> swizzle_a; + BitField<20, 32, u64> imm32; + } const hadd2{insn}; + + const u32 imm{static_cast(hadd2.imm32)}; + HADD2(*this, insn, Merge::H1_H0, hadd2.ftz != 0, hadd2.sat != 0, false, hadd2.neg_a != 0, + hadd2.swizzle_a, false, false, Swizzle::H1_H0, ir.Imm32(imm)); +} +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_fused_multiply_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_fused_multiply_add.cpp new file mode 100755 index 000000000..fd7986701 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_fused_multiply_add.cpp @@ -0,0 +1,169 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h" + +namespace Shader::Maxwell { +namespace { +void HFMA2(TranslatorVisitor& v, u64 insn, Merge merge, Swizzle swizzle_a, bool neg_b, bool neg_c, + Swizzle swizzle_b, Swizzle swizzle_c, const IR::U32& src_b, const IR::U32& src_c, + bool sat, HalfPrecision precision) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a; + } const hfma2{insn}; + + auto [lhs_a, rhs_a]{Extract(v.ir, v.X(hfma2.src_a), swizzle_a)}; + auto [lhs_b, rhs_b]{Extract(v.ir, src_b, swizzle_b)}; + auto [lhs_c, rhs_c]{Extract(v.ir, src_c, swizzle_c)}; + const bool promotion{lhs_a.Type() != lhs_b.Type() || lhs_a.Type() != lhs_c.Type()}; + if (promotion) { + if (lhs_a.Type() == IR::Type::F16) { + lhs_a = v.ir.FPConvert(32, lhs_a); + rhs_a = v.ir.FPConvert(32, rhs_a); + } + if (lhs_b.Type() == IR::Type::F16) { + lhs_b = v.ir.FPConvert(32, lhs_b); + rhs_b = v.ir.FPConvert(32, rhs_b); + } + if (lhs_c.Type() == IR::Type::F16) { + lhs_c = v.ir.FPConvert(32, lhs_c); + rhs_c = v.ir.FPConvert(32, rhs_c); + } + } + + lhs_b = v.ir.FPAbsNeg(lhs_b, false, neg_b); + rhs_b = v.ir.FPAbsNeg(rhs_b, false, neg_b); + + lhs_c = v.ir.FPAbsNeg(lhs_c, false, neg_c); + rhs_c = v.ir.FPAbsNeg(rhs_c, false, neg_c); + + const IR::FpControl fp_control{ + .no_contraction = true, + .rounding = IR::FpRounding::DontCare, + .fmz_mode = HalfPrecision2FmzMode(precision), + }; + IR::F16F32F64 lhs{v.ir.FPFma(lhs_a, lhs_b, lhs_c, fp_control)}; + IR::F16F32F64 rhs{v.ir.FPFma(rhs_a, rhs_b, rhs_c, fp_control)}; + if (precision == HalfPrecision::FMZ && !sat) { + // Do not implement FMZ if SAT is enabled, as it does the logic for us. + // On D3D9 mode, anything * 0 is zero, even NAN and infinity + const IR::F32 zero{v.ir.Imm32(0.0f)}; + const IR::U1 lhs_zero_a{v.ir.FPEqual(lhs_a, zero)}; + const IR::U1 lhs_zero_b{v.ir.FPEqual(lhs_b, zero)}; + const IR::U1 lhs_any_zero{v.ir.LogicalOr(lhs_zero_a, lhs_zero_b)}; + lhs = IR::F16F32F64{v.ir.Select(lhs_any_zero, lhs_c, lhs)}; + + const IR::U1 rhs_zero_a{v.ir.FPEqual(rhs_a, zero)}; + const IR::U1 rhs_zero_b{v.ir.FPEqual(rhs_b, zero)}; + const IR::U1 rhs_any_zero{v.ir.LogicalOr(rhs_zero_a, rhs_zero_b)}; + rhs = IR::F16F32F64{v.ir.Select(rhs_any_zero, rhs_c, rhs)}; + } + if (sat) { + lhs = v.ir.FPSaturate(lhs); + rhs = v.ir.FPSaturate(rhs); + } + if (promotion) { + lhs = v.ir.FPConvert(16, lhs); + rhs = v.ir.FPConvert(16, rhs); + } + v.X(hfma2.dest_reg, MergeResult(v.ir, hfma2.dest_reg, lhs, rhs, merge)); +} + +void HFMA2(TranslatorVisitor& v, u64 insn, bool neg_b, bool neg_c, Swizzle swizzle_b, + Swizzle swizzle_c, const IR::U32& src_b, const IR::U32& src_c, bool sat, + HalfPrecision precision) { + union { + u64 raw; + BitField<47, 2, Swizzle> swizzle_a; + BitField<49, 2, Merge> merge; + } const hfma2{insn}; + + HFMA2(v, insn, hfma2.merge, hfma2.swizzle_a, neg_b, neg_c, swizzle_b, swizzle_c, src_b, src_c, + sat, precision); +} +} // Anonymous namespace + +void TranslatorVisitor::HFMA2_reg(u64 insn) { + union { + u64 raw; + BitField<28, 2, Swizzle> swizzle_b; + BitField<32, 1, u64> saturate; + BitField<31, 1, u64> neg_b; + BitField<30, 1, u64> neg_c; + BitField<35, 2, Swizzle> swizzle_c; + BitField<37, 2, HalfPrecision> precision; + } const hfma2{insn}; + + HFMA2(*this, insn, hfma2.neg_b != 0, hfma2.neg_c != 0, hfma2.swizzle_b, hfma2.swizzle_c, + GetReg20(insn), GetReg39(insn), hfma2.saturate != 0, hfma2.precision); +} + +void TranslatorVisitor::HFMA2_rc(u64 insn) { + union { + u64 raw; + BitField<51, 1, u64> neg_c; + BitField<52, 1, u64> saturate; + BitField<53, 2, Swizzle> swizzle_b; + BitField<56, 1, u64> neg_b; + BitField<57, 2, HalfPrecision> precision; + } const hfma2{insn}; + + HFMA2(*this, insn, hfma2.neg_b != 0, hfma2.neg_c != 0, hfma2.swizzle_b, Swizzle::F32, + GetReg39(insn), GetCbuf(insn), hfma2.saturate != 0, hfma2.precision); +} + +void TranslatorVisitor::HFMA2_cr(u64 insn) { + union { + u64 raw; + BitField<51, 1, u64> neg_c; + BitField<52, 1, u64> saturate; + BitField<53, 2, Swizzle> swizzle_c; + BitField<56, 1, u64> neg_b; + BitField<57, 2, HalfPrecision> precision; + } const hfma2{insn}; + + HFMA2(*this, insn, hfma2.neg_b != 0, hfma2.neg_c != 0, Swizzle::F32, hfma2.swizzle_c, + GetCbuf(insn), GetReg39(insn), hfma2.saturate != 0, hfma2.precision); +} + +void TranslatorVisitor::HFMA2_imm(u64 insn) { + union { + u64 raw; + BitField<51, 1, u64> neg_c; + BitField<52, 1, u64> saturate; + BitField<53, 2, Swizzle> swizzle_c; + + BitField<56, 1, u64> neg_high; + BitField<30, 9, u64> high; + BitField<29, 1, u64> neg_low; + BitField<20, 9, u64> low; + BitField<57, 2, HalfPrecision> precision; + } const hfma2{insn}; + + const u32 imm{ + static_cast(hfma2.low << 6) | static_cast((hfma2.neg_low != 0 ? 1 : 0) << 15) | + static_cast(hfma2.high << 22) | static_cast((hfma2.neg_high != 0 ? 1 : 0) << 31)}; + + HFMA2(*this, insn, false, hfma2.neg_c != 0, Swizzle::H1_H0, hfma2.swizzle_c, ir.Imm32(imm), + GetReg39(insn), hfma2.saturate != 0, hfma2.precision); +} + +void TranslatorVisitor::HFMA2_32I(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> src_c; + BitField<20, 32, u64> imm32; + BitField<52, 1, u64> neg_c; + BitField<53, 2, Swizzle> swizzle_a; + BitField<55, 2, HalfPrecision> precision; + } const hfma2{insn}; + + const u32 imm{static_cast(hfma2.imm32)}; + HFMA2(*this, insn, Merge::H1_H0, hfma2.swizzle_a, false, hfma2.neg_c != 0, Swizzle::H1_H0, + Swizzle::H1_H0, ir.Imm32(imm), X(hfma2.src_c), false, hfma2.precision); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.cpp new file mode 100755 index 000000000..0dbeb7f56 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.cpp @@ -0,0 +1,62 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h" + +namespace Shader::Maxwell { + +IR::FmzMode HalfPrecision2FmzMode(HalfPrecision precision) { + switch (precision) { + case HalfPrecision::None: + return IR::FmzMode::None; + case HalfPrecision::FTZ: + return IR::FmzMode::FTZ; + case HalfPrecision::FMZ: + return IR::FmzMode::FMZ; + default: + return IR::FmzMode::DontCare; + } +} + +std::pair Extract(IR::IREmitter& ir, IR::U32 value, Swizzle swizzle) { + switch (swizzle) { + case Swizzle::H1_H0: { + const IR::Value vector{ir.UnpackFloat2x16(value)}; + return {IR::F16{ir.CompositeExtract(vector, 0)}, IR::F16{ir.CompositeExtract(vector, 1)}}; + } + case Swizzle::H0_H0: { + const IR::F16 scalar{ir.CompositeExtract(ir.UnpackFloat2x16(value), 0)}; + return {scalar, scalar}; + } + case Swizzle::H1_H1: { + const IR::F16 scalar{ir.CompositeExtract(ir.UnpackFloat2x16(value), 1)}; + return {scalar, scalar}; + } + case Swizzle::F32: { + const IR::F32 scalar{ir.BitCast(value)}; + return {scalar, scalar}; + } + } + throw InvalidArgument("Invalid swizzle {}", swizzle); +} + +IR::U32 MergeResult(IR::IREmitter& ir, IR::Reg dest, const IR::F16& lhs, const IR::F16& rhs, + Merge merge) { + switch (merge) { + case Merge::H1_H0: + return ir.PackFloat2x16(ir.CompositeConstruct(lhs, rhs)); + case Merge::F32: + return ir.BitCast(ir.FPConvert(32, lhs)); + case Merge::MRG_H0: + case Merge::MRG_H1: { + const IR::Value vector{ir.UnpackFloat2x16(ir.GetReg(dest))}; + const bool is_h0{merge == Merge::MRG_H0}; + const IR::F16 insert{ir.FPConvert(16, is_h0 ? lhs : rhs)}; + return ir.PackFloat2x16(ir.CompositeInsert(vector, insert, is_h0 ? 0 : 1)); + } + } + throw InvalidArgument("Invalid merge {}", merge); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h new file mode 100755 index 000000000..59da56a7e --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h @@ -0,0 +1,42 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { + +enum class Merge : u64 { + H1_H0, + F32, + MRG_H0, + MRG_H1, +}; + +enum class Swizzle : u64 { + H1_H0, + F32, + H0_H0, + H1_H1, +}; + +enum class HalfPrecision : u64 { + None = 0, + FTZ = 1, + FMZ = 2, +}; + +IR::FmzMode HalfPrecision2FmzMode(HalfPrecision precision); + +std::pair Extract(IR::IREmitter& ir, IR::U32 value, Swizzle swizzle); + +IR::U32 MergeResult(IR::IREmitter& ir, IR::Reg dest, const IR::F16& lhs, const IR::F16& rhs, + Merge merge); + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_multiply.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_multiply.cpp new file mode 100755 index 000000000..3f548ce76 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_multiply.cpp @@ -0,0 +1,143 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h" + +namespace Shader::Maxwell { +namespace { +void HMUL2(TranslatorVisitor& v, u64 insn, Merge merge, bool sat, bool abs_a, bool neg_a, + Swizzle swizzle_a, bool abs_b, bool neg_b, Swizzle swizzle_b, const IR::U32& src_b, + HalfPrecision precision) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a; + } const hmul2{insn}; + + auto [lhs_a, rhs_a]{Extract(v.ir, v.X(hmul2.src_a), swizzle_a)}; + auto [lhs_b, rhs_b]{Extract(v.ir, src_b, swizzle_b)}; + const bool promotion{lhs_a.Type() != lhs_b.Type()}; + if (promotion) { + if (lhs_a.Type() == IR::Type::F16) { + lhs_a = v.ir.FPConvert(32, lhs_a); + rhs_a = v.ir.FPConvert(32, rhs_a); + } + if (lhs_b.Type() == IR::Type::F16) { + lhs_b = v.ir.FPConvert(32, lhs_b); + rhs_b = v.ir.FPConvert(32, rhs_b); + } + } + lhs_a = v.ir.FPAbsNeg(lhs_a, abs_a, neg_a); + rhs_a = v.ir.FPAbsNeg(rhs_a, abs_a, neg_a); + + lhs_b = v.ir.FPAbsNeg(lhs_b, abs_b, neg_b); + rhs_b = v.ir.FPAbsNeg(rhs_b, abs_b, neg_b); + + const IR::FpControl fp_control{ + .no_contraction = true, + .rounding = IR::FpRounding::DontCare, + .fmz_mode = HalfPrecision2FmzMode(precision), + }; + IR::F16F32F64 lhs{v.ir.FPMul(lhs_a, lhs_b, fp_control)}; + IR::F16F32F64 rhs{v.ir.FPMul(rhs_a, rhs_b, fp_control)}; + if (precision == HalfPrecision::FMZ && !sat) { + // Do not implement FMZ if SAT is enabled, as it does the logic for us. + // On D3D9 mode, anything * 0 is zero, even NAN and infinity + const IR::F32 zero{v.ir.Imm32(0.0f)}; + const IR::U1 lhs_zero_a{v.ir.FPEqual(lhs_a, zero)}; + const IR::U1 lhs_zero_b{v.ir.FPEqual(lhs_b, zero)}; + const IR::U1 lhs_any_zero{v.ir.LogicalOr(lhs_zero_a, lhs_zero_b)}; + lhs = IR::F16F32F64{v.ir.Select(lhs_any_zero, zero, lhs)}; + + const IR::U1 rhs_zero_a{v.ir.FPEqual(rhs_a, zero)}; + const IR::U1 rhs_zero_b{v.ir.FPEqual(rhs_b, zero)}; + const IR::U1 rhs_any_zero{v.ir.LogicalOr(rhs_zero_a, rhs_zero_b)}; + rhs = IR::F16F32F64{v.ir.Select(rhs_any_zero, zero, rhs)}; + } + if (sat) { + lhs = v.ir.FPSaturate(lhs); + rhs = v.ir.FPSaturate(rhs); + } + if (promotion) { + lhs = v.ir.FPConvert(16, lhs); + rhs = v.ir.FPConvert(16, rhs); + } + v.X(hmul2.dest_reg, MergeResult(v.ir, hmul2.dest_reg, lhs, rhs, merge)); +} + +void HMUL2(TranslatorVisitor& v, u64 insn, bool sat, bool abs_a, bool neg_a, bool abs_b, bool neg_b, + Swizzle swizzle_b, const IR::U32& src_b) { + union { + u64 raw; + BitField<49, 2, Merge> merge; + BitField<47, 2, Swizzle> swizzle_a; + BitField<39, 2, HalfPrecision> precision; + } const hmul2{insn}; + + HMUL2(v, insn, hmul2.merge, sat, abs_a, neg_a, hmul2.swizzle_a, abs_b, neg_b, swizzle_b, src_b, + hmul2.precision); +} +} // Anonymous namespace + +void TranslatorVisitor::HMUL2_reg(u64 insn) { + union { + u64 raw; + BitField<32, 1, u64> sat; + BitField<31, 1, u64> neg_b; + BitField<30, 1, u64> abs_b; + BitField<44, 1, u64> abs_a; + BitField<28, 2, Swizzle> swizzle_b; + } const hmul2{insn}; + + HMUL2(*this, insn, hmul2.sat != 0, hmul2.abs_a != 0, false, hmul2.abs_b != 0, hmul2.neg_b != 0, + hmul2.swizzle_b, GetReg20(insn)); +} + +void TranslatorVisitor::HMUL2_cbuf(u64 insn) { + union { + u64 raw; + BitField<52, 1, u64> sat; + BitField<54, 1, u64> abs_b; + BitField<43, 1, u64> neg_a; + BitField<44, 1, u64> abs_a; + } const hmul2{insn}; + + HMUL2(*this, insn, hmul2.sat != 0, hmul2.abs_a != 0, hmul2.neg_a != 0, hmul2.abs_b != 0, false, + Swizzle::F32, GetCbuf(insn)); +} + +void TranslatorVisitor::HMUL2_imm(u64 insn) { + union { + u64 raw; + BitField<52, 1, u64> sat; + BitField<56, 1, u64> neg_high; + BitField<30, 9, u64> high; + BitField<29, 1, u64> neg_low; + BitField<20, 9, u64> low; + BitField<43, 1, u64> neg_a; + BitField<44, 1, u64> abs_a; + } const hmul2{insn}; + + const u32 imm{ + static_cast(hmul2.low << 6) | static_cast((hmul2.neg_low != 0 ? 1 : 0) << 15) | + static_cast(hmul2.high << 22) | static_cast((hmul2.neg_high != 0 ? 1 : 0) << 31)}; + HMUL2(*this, insn, hmul2.sat != 0, hmul2.abs_a != 0, hmul2.neg_a != 0, false, false, + Swizzle::H1_H0, ir.Imm32(imm)); +} + +void TranslatorVisitor::HMUL2_32I(u64 insn) { + union { + u64 raw; + BitField<55, 2, HalfPrecision> precision; + BitField<52, 1, u64> sat; + BitField<53, 2, Swizzle> swizzle_a; + BitField<20, 32, u64> imm32; + } const hmul2{insn}; + + const u32 imm{static_cast(hmul2.imm32)}; + HMUL2(*this, insn, Merge::H1_H0, hmul2.sat != 0, false, false, hmul2.swizzle_a, false, false, + Swizzle::H1_H0, ir.Imm32(imm), hmul2.precision); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_set.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_set.cpp new file mode 100755 index 000000000..cca5b831f --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_set.cpp @@ -0,0 +1,117 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h" + +namespace Shader::Maxwell { +namespace { +void HSET2(TranslatorVisitor& v, u64 insn, const IR::U32& src_b, bool bf, bool ftz, bool neg_b, + bool abs_b, FPCompareOp compare_op, Swizzle swizzle_b) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<39, 3, IR::Pred> pred; + BitField<42, 1, u64> neg_pred; + BitField<43, 1, u64> neg_a; + BitField<45, 2, BooleanOp> bop; + BitField<44, 1, u64> abs_a; + BitField<47, 2, Swizzle> swizzle_a; + } const hset2{insn}; + + auto [lhs_a, rhs_a]{Extract(v.ir, v.X(hset2.src_a_reg), hset2.swizzle_a)}; + auto [lhs_b, rhs_b]{Extract(v.ir, src_b, swizzle_b)}; + + if (lhs_a.Type() != lhs_b.Type()) { + if (lhs_a.Type() == IR::Type::F16) { + lhs_a = v.ir.FPConvert(32, lhs_a); + rhs_a = v.ir.FPConvert(32, rhs_a); + } + if (lhs_b.Type() == IR::Type::F16) { + lhs_b = v.ir.FPConvert(32, lhs_b); + rhs_b = v.ir.FPConvert(32, rhs_b); + } + } + + lhs_a = v.ir.FPAbsNeg(lhs_a, hset2.abs_a != 0, hset2.neg_a != 0); + rhs_a = v.ir.FPAbsNeg(rhs_a, hset2.abs_a != 0, hset2.neg_a != 0); + + lhs_b = v.ir.FPAbsNeg(lhs_b, abs_b, neg_b); + rhs_b = v.ir.FPAbsNeg(rhs_b, abs_b, neg_b); + + const IR::FpControl control{ + .no_contraction = false, + .rounding = IR::FpRounding::DontCare, + .fmz_mode = (ftz ? IR::FmzMode::FTZ : IR::FmzMode::None), + }; + + IR::U1 pred{v.ir.GetPred(hset2.pred)}; + if (hset2.neg_pred != 0) { + pred = v.ir.LogicalNot(pred); + } + const IR::U1 cmp_result_lhs{FloatingPointCompare(v.ir, lhs_a, lhs_b, compare_op, control)}; + const IR::U1 cmp_result_rhs{FloatingPointCompare(v.ir, rhs_a, rhs_b, compare_op, control)}; + const IR::U1 bop_result_lhs{PredicateCombine(v.ir, cmp_result_lhs, pred, hset2.bop)}; + const IR::U1 bop_result_rhs{PredicateCombine(v.ir, cmp_result_rhs, pred, hset2.bop)}; + + const u32 true_value = bf ? 0x3c00 : 0xffff; + const IR::U32 true_val_lhs{v.ir.Imm32(true_value)}; + const IR::U32 true_val_rhs{v.ir.Imm32(true_value << 16)}; + const IR::U32 fail_result{v.ir.Imm32(0)}; + const IR::U32 result_lhs{v.ir.Select(bop_result_lhs, true_val_lhs, fail_result)}; + const IR::U32 result_rhs{v.ir.Select(bop_result_rhs, true_val_rhs, fail_result)}; + + v.X(hset2.dest_reg, IR::U32{v.ir.BitwiseOr(result_lhs, result_rhs)}); +} +} // Anonymous namespace + +void TranslatorVisitor::HSET2_reg(u64 insn) { + union { + u64 insn; + BitField<30, 1, u64> abs_b; + BitField<49, 1, u64> bf; + BitField<31, 1, u64> neg_b; + BitField<50, 1, u64> ftz; + BitField<35, 4, FPCompareOp> compare_op; + BitField<28, 2, Swizzle> swizzle_b; + } const hset2{insn}; + + HSET2(*this, insn, GetReg20(insn), hset2.bf != 0, hset2.ftz != 0, hset2.neg_b != 0, + hset2.abs_b != 0, hset2.compare_op, hset2.swizzle_b); +} + +void TranslatorVisitor::HSET2_cbuf(u64 insn) { + union { + u64 insn; + BitField<53, 1, u64> bf; + BitField<56, 1, u64> neg_b; + BitField<54, 1, u64> ftz; + BitField<49, 4, FPCompareOp> compare_op; + } const hset2{insn}; + + HSET2(*this, insn, GetCbuf(insn), hset2.bf != 0, hset2.ftz != 0, hset2.neg_b != 0, false, + hset2.compare_op, Swizzle::F32); +} + +void TranslatorVisitor::HSET2_imm(u64 insn) { + union { + u64 insn; + BitField<53, 1, u64> bf; + BitField<54, 1, u64> ftz; + BitField<49, 4, FPCompareOp> compare_op; + BitField<56, 1, u64> neg_high; + BitField<30, 9, u64> high; + BitField<29, 1, u64> neg_low; + BitField<20, 9, u64> low; + } const hset2{insn}; + + const u32 imm{ + static_cast(hset2.low << 6) | static_cast((hset2.neg_low != 0 ? 1 : 0) << 15) | + static_cast(hset2.high << 22) | static_cast((hset2.neg_high != 0 ? 1 : 0) << 31)}; + + HSET2(*this, insn, ir.Imm32(imm), hset2.bf != 0, hset2.ftz != 0, false, false, hset2.compare_op, + Swizzle::H1_H0); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_set_predicate.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_set_predicate.cpp new file mode 100755 index 000000000..b3931dae3 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_set_predicate.cpp @@ -0,0 +1,118 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/frontend/maxwell/translate/impl/half_floating_point_helper.h" + +namespace Shader::Maxwell { +namespace { +void HSETP2(TranslatorVisitor& v, u64 insn, const IR::U32& src_b, bool neg_b, bool abs_b, + Swizzle swizzle_b, FPCompareOp compare_op, bool h_and) { + union { + u64 insn; + BitField<8, 8, IR::Reg> src_a_reg; + BitField<3, 3, IR::Pred> dest_pred_a; + BitField<0, 3, IR::Pred> dest_pred_b; + BitField<39, 3, IR::Pred> pred; + BitField<42, 1, u64> neg_pred; + BitField<43, 1, u64> neg_a; + BitField<45, 2, BooleanOp> bop; + BitField<44, 1, u64> abs_a; + BitField<6, 1, u64> ftz; + BitField<47, 2, Swizzle> swizzle_a; + } const hsetp2{insn}; + + auto [lhs_a, rhs_a]{Extract(v.ir, v.X(hsetp2.src_a_reg), hsetp2.swizzle_a)}; + auto [lhs_b, rhs_b]{Extract(v.ir, src_b, swizzle_b)}; + + if (lhs_a.Type() != lhs_b.Type()) { + if (lhs_a.Type() == IR::Type::F16) { + lhs_a = v.ir.FPConvert(32, lhs_a); + rhs_a = v.ir.FPConvert(32, rhs_a); + } + if (lhs_b.Type() == IR::Type::F16) { + lhs_b = v.ir.FPConvert(32, lhs_b); + rhs_b = v.ir.FPConvert(32, rhs_b); + } + } + + lhs_a = v.ir.FPAbsNeg(lhs_a, hsetp2.abs_a != 0, hsetp2.neg_a != 0); + rhs_a = v.ir.FPAbsNeg(rhs_a, hsetp2.abs_a != 0, hsetp2.neg_a != 0); + + lhs_b = v.ir.FPAbsNeg(lhs_b, abs_b, neg_b); + rhs_b = v.ir.FPAbsNeg(rhs_b, abs_b, neg_b); + + const IR::FpControl control{ + .no_contraction = false, + .rounding = IR::FpRounding::DontCare, + .fmz_mode = (hsetp2.ftz != 0 ? IR::FmzMode::FTZ : IR::FmzMode::None), + }; + + IR::U1 pred{v.ir.GetPred(hsetp2.pred)}; + if (hsetp2.neg_pred != 0) { + pred = v.ir.LogicalNot(pred); + } + const IR::U1 cmp_result_lhs{FloatingPointCompare(v.ir, lhs_a, lhs_b, compare_op, control)}; + const IR::U1 cmp_result_rhs{FloatingPointCompare(v.ir, rhs_a, rhs_b, compare_op, control)}; + const IR::U1 bop_result_lhs{PredicateCombine(v.ir, cmp_result_lhs, pred, hsetp2.bop)}; + const IR::U1 bop_result_rhs{PredicateCombine(v.ir, cmp_result_rhs, pred, hsetp2.bop)}; + + if (h_and) { + auto result = v.ir.LogicalAnd(bop_result_lhs, bop_result_rhs); + v.ir.SetPred(hsetp2.dest_pred_a, result); + v.ir.SetPred(hsetp2.dest_pred_b, v.ir.LogicalNot(result)); + } else { + v.ir.SetPred(hsetp2.dest_pred_a, bop_result_lhs); + v.ir.SetPred(hsetp2.dest_pred_b, bop_result_rhs); + } +} +} // Anonymous namespace + +void TranslatorVisitor::HSETP2_reg(u64 insn) { + union { + u64 insn; + BitField<30, 1, u64> abs_b; + BitField<49, 1, u64> h_and; + BitField<31, 1, u64> neg_b; + BitField<35, 4, FPCompareOp> compare_op; + BitField<28, 2, Swizzle> swizzle_b; + } const hsetp2{insn}; + HSETP2(*this, insn, GetReg20(insn), hsetp2.neg_b != 0, hsetp2.abs_b != 0, hsetp2.swizzle_b, + hsetp2.compare_op, hsetp2.h_and != 0); +} + +void TranslatorVisitor::HSETP2_cbuf(u64 insn) { + union { + u64 insn; + BitField<53, 1, u64> h_and; + BitField<54, 1, u64> abs_b; + BitField<56, 1, u64> neg_b; + BitField<49, 4, FPCompareOp> compare_op; + } const hsetp2{insn}; + + HSETP2(*this, insn, GetCbuf(insn), hsetp2.neg_b != 0, hsetp2.abs_b != 0, Swizzle::F32, + hsetp2.compare_op, hsetp2.h_and != 0); +} + +void TranslatorVisitor::HSETP2_imm(u64 insn) { + union { + u64 insn; + BitField<53, 1, u64> h_and; + BitField<54, 1, u64> ftz; + BitField<49, 4, FPCompareOp> compare_op; + BitField<56, 1, u64> neg_high; + BitField<30, 9, u64> high; + BitField<29, 1, u64> neg_low; + BitField<20, 9, u64> low; + } const hsetp2{insn}; + + const u32 imm{static_cast(hsetp2.low << 6) | + static_cast((hsetp2.neg_low != 0 ? 1 : 0) << 15) | + static_cast(hsetp2.high << 22) | + static_cast((hsetp2.neg_high != 0 ? 1 : 0) << 31)}; + + HSETP2(*this, insn, ir.Imm32(imm), false, false, Swizzle::H1_H0, hsetp2.compare_op, + hsetp2.h_and != 0); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/impl.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/impl.cpp new file mode 100755 index 000000000..b446aae0e --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/impl.cpp @@ -0,0 +1,272 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +[[nodiscard]] IR::U32 CbufLowerBits(IR::IREmitter& ir, bool unaligned, const IR::U32& binding, + u32 offset) { + if (unaligned) { + return ir.Imm32(0); + } + return ir.GetCbuf(binding, IR::U32{IR::Value{offset}}); +} +} // Anonymous namespace + +IR::U32 TranslatorVisitor::X(IR::Reg reg) { + return ir.GetReg(reg); +} + +IR::U64 TranslatorVisitor::L(IR::Reg reg) { + if (!IR::IsAligned(reg, 2)) { + throw NotImplementedException("Unaligned source register {}", reg); + } + return IR::U64{ir.PackUint2x32(ir.CompositeConstruct(X(reg), X(reg + 1)))}; +} + +IR::F32 TranslatorVisitor::F(IR::Reg reg) { + return ir.BitCast(X(reg)); +} + +IR::F64 TranslatorVisitor::D(IR::Reg reg) { + if (!IR::IsAligned(reg, 2)) { + throw NotImplementedException("Unaligned source register {}", reg); + } + return IR::F64{ir.PackDouble2x32(ir.CompositeConstruct(X(reg), X(reg + 1)))}; +} + +void TranslatorVisitor::X(IR::Reg dest_reg, const IR::U32& value) { + ir.SetReg(dest_reg, value); +} + +void TranslatorVisitor::L(IR::Reg dest_reg, const IR::U64& value) { + if (!IR::IsAligned(dest_reg, 2)) { + throw NotImplementedException("Unaligned destination register {}", dest_reg); + } + const IR::Value result{ir.UnpackUint2x32(value)}; + for (int i = 0; i < 2; i++) { + X(dest_reg + i, IR::U32{ir.CompositeExtract(result, static_cast(i))}); + } +} + +void TranslatorVisitor::F(IR::Reg dest_reg, const IR::F32& value) { + X(dest_reg, ir.BitCast(value)); +} + +void TranslatorVisitor::D(IR::Reg dest_reg, const IR::F64& value) { + if (!IR::IsAligned(dest_reg, 2)) { + throw NotImplementedException("Unaligned destination register {}", dest_reg); + } + const IR::Value result{ir.UnpackDouble2x32(value)}; + for (int i = 0; i < 2; i++) { + X(dest_reg + i, IR::U32{ir.CompositeExtract(result, static_cast(i))}); + } +} + +IR::U32 TranslatorVisitor::GetReg8(u64 insn) { + union { + u64 raw; + BitField<8, 8, IR::Reg> index; + } const reg{insn}; + return X(reg.index); +} + +IR::U32 TranslatorVisitor::GetReg20(u64 insn) { + union { + u64 raw; + BitField<20, 8, IR::Reg> index; + } const reg{insn}; + return X(reg.index); +} + +IR::U32 TranslatorVisitor::GetReg39(u64 insn) { + union { + u64 raw; + BitField<39, 8, IR::Reg> index; + } const reg{insn}; + return X(reg.index); +} + +IR::F32 TranslatorVisitor::GetFloatReg8(u64 insn) { + return ir.BitCast(GetReg8(insn)); +} + +IR::F32 TranslatorVisitor::GetFloatReg20(u64 insn) { + return ir.BitCast(GetReg20(insn)); +} + +IR::F32 TranslatorVisitor::GetFloatReg39(u64 insn) { + return ir.BitCast(GetReg39(insn)); +} + +IR::F64 TranslatorVisitor::GetDoubleReg20(u64 insn) { + union { + u64 raw; + BitField<20, 8, IR::Reg> index; + } const reg{insn}; + return D(reg.index); +} + +IR::F64 TranslatorVisitor::GetDoubleReg39(u64 insn) { + union { + u64 raw; + BitField<39, 8, IR::Reg> index; + } const reg{insn}; + return D(reg.index); +} + +static std::pair CbufAddr(u64 insn) { + union { + u64 raw; + BitField<20, 14, u64> offset; + BitField<34, 5, u64> binding; + } const cbuf{insn}; + + if (cbuf.binding >= 18) { + throw NotImplementedException("Out of bounds constant buffer binding {}", cbuf.binding); + } + if (cbuf.offset >= 0x10'000) { + throw NotImplementedException("Out of bounds constant buffer offset {}", cbuf.offset); + } + const IR::Value binding{static_cast(cbuf.binding)}; + const IR::Value byte_offset{static_cast(cbuf.offset) * 4}; + return {IR::U32{binding}, IR::U32{byte_offset}}; +} + +IR::U32 TranslatorVisitor::GetCbuf(u64 insn) { + const auto [binding, byte_offset]{CbufAddr(insn)}; + return ir.GetCbuf(binding, byte_offset); +} + +IR::F32 TranslatorVisitor::GetFloatCbuf(u64 insn) { + const auto [binding, byte_offset]{CbufAddr(insn)}; + return ir.GetFloatCbuf(binding, byte_offset); +} + +IR::F64 TranslatorVisitor::GetDoubleCbuf(u64 insn) { + union { + u64 raw; + BitField<20, 1, u64> unaligned; + } const cbuf{insn}; + + const auto [binding, offset_value]{CbufAddr(insn)}; + const bool unaligned{cbuf.unaligned != 0}; + const u32 offset{offset_value.U32()}; + const IR::Value addr{unaligned ? offset | 4u : (offset & ~7u) | 4u}; + + const IR::U32 value{ir.GetCbuf(binding, IR::U32{addr})}; + const IR::U32 lower_bits{CbufLowerBits(ir, unaligned, binding, offset)}; + return ir.PackDouble2x32(ir.CompositeConstruct(lower_bits, value)); +} + +IR::U64 TranslatorVisitor::GetPackedCbuf(u64 insn) { + union { + u64 raw; + BitField<20, 1, u64> unaligned; + } const cbuf{insn}; + + if (cbuf.unaligned != 0) { + throw NotImplementedException("Unaligned packed constant buffer read"); + } + const auto [binding, lower_offset]{CbufAddr(insn)}; + const IR::U32 upper_offset{ir.Imm32(lower_offset.U32() + 4)}; + const IR::U32 lower_value{ir.GetCbuf(binding, lower_offset)}; + const IR::U32 upper_value{ir.GetCbuf(binding, upper_offset)}; + return ir.PackUint2x32(ir.CompositeConstruct(lower_value, upper_value)); +} + +IR::U32 TranslatorVisitor::GetImm20(u64 insn) { + union { + u64 raw; + BitField<20, 19, u64> value; + BitField<56, 1, u64> is_negative; + } const imm{insn}; + + if (imm.is_negative != 0) { + const s64 raw{static_cast(imm.value)}; + return ir.Imm32(static_cast(-(1LL << 19) + raw)); + } else { + return ir.Imm32(static_cast(imm.value)); + } +} + +IR::F32 TranslatorVisitor::GetFloatImm20(u64 insn) { + union { + u64 raw; + BitField<20, 19, u64> value; + BitField<56, 1, u64> is_negative; + } const imm{insn}; + const u32 sign_bit{static_cast(imm.is_negative != 0 ? (1ULL << 31) : 0)}; + const u32 value{static_cast(imm.value) << 12}; + return ir.Imm32(Common::BitCast(value | sign_bit)); +} + +IR::F64 TranslatorVisitor::GetDoubleImm20(u64 insn) { + union { + u64 raw; + BitField<20, 19, u64> value; + BitField<56, 1, u64> is_negative; + } const imm{insn}; + const u64 sign_bit{imm.is_negative != 0 ? (1ULL << 63) : 0}; + const u64 value{imm.value << 44}; + return ir.Imm64(Common::BitCast(value | sign_bit)); +} + +IR::U64 TranslatorVisitor::GetPackedImm20(u64 insn) { + const s64 value{GetImm20(insn).U32()}; + return ir.Imm64(static_cast(static_cast(value) << 32)); +} + +IR::U32 TranslatorVisitor::GetImm32(u64 insn) { + union { + u64 raw; + BitField<20, 32, u64> value; + } const imm{insn}; + return ir.Imm32(static_cast(imm.value)); +} + +IR::F32 TranslatorVisitor::GetFloatImm32(u64 insn) { + union { + u64 raw; + BitField<20, 32, u64> value; + } const imm{insn}; + return ir.Imm32(Common::BitCast(static_cast(imm.value))); +} + +void TranslatorVisitor::SetZFlag(const IR::U1& value) { + ir.SetZFlag(value); +} + +void TranslatorVisitor::SetSFlag(const IR::U1& value) { + ir.SetSFlag(value); +} + +void TranslatorVisitor::SetCFlag(const IR::U1& value) { + ir.SetCFlag(value); +} + +void TranslatorVisitor::SetOFlag(const IR::U1& value) { + ir.SetOFlag(value); +} + +void TranslatorVisitor::ResetZero() { + SetZFlag(ir.Imm1(false)); +} + +void TranslatorVisitor::ResetSFlag() { + SetSFlag(ir.Imm1(false)); +} + +void TranslatorVisitor::ResetCFlag() { + SetCFlag(ir.Imm1(false)); +} + +void TranslatorVisitor::ResetOFlag() { + SetOFlag(ir.Imm1(false)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/impl.h b/src/shader_recompiler/frontend/maxwell/translate/impl/impl.h new file mode 100755 index 000000000..335e4f24f --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/impl.h @@ -0,0 +1,387 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/maxwell/instruction.h" + +namespace Shader::Maxwell { + +enum class CompareOp : u64 { + False, + LessThan, + Equal, + LessThanEqual, + GreaterThan, + NotEqual, + GreaterThanEqual, + True, +}; + +enum class BooleanOp : u64 { + AND, + OR, + XOR, +}; + +enum class PredicateOp : u64 { + False, + True, + Zero, + NonZero, +}; + +enum class FPCompareOp : u64 { + F, + LT, + EQ, + LE, + GT, + NE, + GE, + NUM, + Nan, + LTU, + EQU, + LEU, + GTU, + NEU, + GEU, + T, +}; + +class TranslatorVisitor { +public: + explicit TranslatorVisitor(Environment& env_, IR::Block& block) : env{env_}, ir(block) {} + + Environment& env; + IR::IREmitter ir; + + void AL2P(u64 insn); + void ALD(u64 insn); + void AST(u64 insn); + void ATOM_cas(u64 insn); + void ATOM(u64 insn); + void ATOMS_cas(u64 insn); + void ATOMS(u64 insn); + void B2R(u64 insn); + void BAR(u64 insn); + void BFE_reg(u64 insn); + void BFE_cbuf(u64 insn); + void BFE_imm(u64 insn); + void BFI_reg(u64 insn); + void BFI_rc(u64 insn); + void BFI_cr(u64 insn); + void BFI_imm(u64 insn); + void BPT(u64 insn); + void BRA(u64 insn); + void BRK(u64 insn); + void BRX(u64 insn); + void CAL(); + void CCTL(u64 insn); + void CCTLL(u64 insn); + void CONT(u64 insn); + void CS2R(u64 insn); + void CSET(u64 insn); + void CSETP(u64 insn); + void DADD_reg(u64 insn); + void DADD_cbuf(u64 insn); + void DADD_imm(u64 insn); + void DEPBAR(); + void DFMA_reg(u64 insn); + void DFMA_rc(u64 insn); + void DFMA_cr(u64 insn); + void DFMA_imm(u64 insn); + void DMNMX_reg(u64 insn); + void DMNMX_cbuf(u64 insn); + void DMNMX_imm(u64 insn); + void DMUL_reg(u64 insn); + void DMUL_cbuf(u64 insn); + void DMUL_imm(u64 insn); + void DSET_reg(u64 insn); + void DSET_cbuf(u64 insn); + void DSET_imm(u64 insn); + void DSETP_reg(u64 insn); + void DSETP_cbuf(u64 insn); + void DSETP_imm(u64 insn); + void EXIT(); + void F2F_reg(u64 insn); + void F2F_cbuf(u64 insn); + void F2F_imm(u64 insn); + void F2I_reg(u64 insn); + void F2I_cbuf(u64 insn); + void F2I_imm(u64 insn); + void FADD_reg(u64 insn); + void FADD_cbuf(u64 insn); + void FADD_imm(u64 insn); + void FADD32I(u64 insn); + void FCHK_reg(u64 insn); + void FCHK_cbuf(u64 insn); + void FCHK_imm(u64 insn); + void FCMP_reg(u64 insn); + void FCMP_rc(u64 insn); + void FCMP_cr(u64 insn); + void FCMP_imm(u64 insn); + void FFMA_reg(u64 insn); + void FFMA_rc(u64 insn); + void FFMA_cr(u64 insn); + void FFMA_imm(u64 insn); + void FFMA32I(u64 insn); + void FLO_reg(u64 insn); + void FLO_cbuf(u64 insn); + void FLO_imm(u64 insn); + void FMNMX_reg(u64 insn); + void FMNMX_cbuf(u64 insn); + void FMNMX_imm(u64 insn); + void FMUL_reg(u64 insn); + void FMUL_cbuf(u64 insn); + void FMUL_imm(u64 insn); + void FMUL32I(u64 insn); + void FSET_reg(u64 insn); + void FSET_cbuf(u64 insn); + void FSET_imm(u64 insn); + void FSETP_reg(u64 insn); + void FSETP_cbuf(u64 insn); + void FSETP_imm(u64 insn); + void FSWZADD(u64 insn); + void GETCRSPTR(u64 insn); + void GETLMEMBASE(u64 insn); + void HADD2_reg(u64 insn); + void HADD2_cbuf(u64 insn); + void HADD2_imm(u64 insn); + void HADD2_32I(u64 insn); + void HFMA2_reg(u64 insn); + void HFMA2_rc(u64 insn); + void HFMA2_cr(u64 insn); + void HFMA2_imm(u64 insn); + void HFMA2_32I(u64 insn); + void HMUL2_reg(u64 insn); + void HMUL2_cbuf(u64 insn); + void HMUL2_imm(u64 insn); + void HMUL2_32I(u64 insn); + void HSET2_reg(u64 insn); + void HSET2_cbuf(u64 insn); + void HSET2_imm(u64 insn); + void HSETP2_reg(u64 insn); + void HSETP2_cbuf(u64 insn); + void HSETP2_imm(u64 insn); + void I2F_reg(u64 insn); + void I2F_cbuf(u64 insn); + void I2F_imm(u64 insn); + void I2I_reg(u64 insn); + void I2I_cbuf(u64 insn); + void I2I_imm(u64 insn); + void IADD_reg(u64 insn); + void IADD_cbuf(u64 insn); + void IADD_imm(u64 insn); + void IADD3_reg(u64 insn); + void IADD3_cbuf(u64 insn); + void IADD3_imm(u64 insn); + void IADD32I(u64 insn); + void ICMP_reg(u64 insn); + void ICMP_rc(u64 insn); + void ICMP_cr(u64 insn); + void ICMP_imm(u64 insn); + void IDE(u64 insn); + void IDP_reg(u64 insn); + void IDP_imm(u64 insn); + void IMAD_reg(u64 insn); + void IMAD_rc(u64 insn); + void IMAD_cr(u64 insn); + void IMAD_imm(u64 insn); + void IMAD32I(u64 insn); + void IMADSP_reg(u64 insn); + void IMADSP_rc(u64 insn); + void IMADSP_cr(u64 insn); + void IMADSP_imm(u64 insn); + void IMNMX_reg(u64 insn); + void IMNMX_cbuf(u64 insn); + void IMNMX_imm(u64 insn); + void IMUL_reg(u64 insn); + void IMUL_cbuf(u64 insn); + void IMUL_imm(u64 insn); + void IMUL32I(u64 insn); + void IPA(u64 insn); + void ISBERD(u64 insn); + void ISCADD_reg(u64 insn); + void ISCADD_cbuf(u64 insn); + void ISCADD_imm(u64 insn); + void ISCADD32I(u64 insn); + void ISET_reg(u64 insn); + void ISET_cbuf(u64 insn); + void ISET_imm(u64 insn); + void ISETP_reg(u64 insn); + void ISETP_cbuf(u64 insn); + void ISETP_imm(u64 insn); + void JCAL(u64 insn); + void JMP(u64 insn); + void JMX(u64 insn); + void KIL(); + void LD(u64 insn); + void LDC(u64 insn); + void LDG(u64 insn); + void LDL(u64 insn); + void LDS(u64 insn); + void LEA_hi_reg(u64 insn); + void LEA_hi_cbuf(u64 insn); + void LEA_lo_reg(u64 insn); + void LEA_lo_cbuf(u64 insn); + void LEA_lo_imm(u64 insn); + void LEPC(u64 insn); + void LONGJMP(u64 insn); + void LOP_reg(u64 insn); + void LOP_cbuf(u64 insn); + void LOP_imm(u64 insn); + void LOP3_reg(u64 insn); + void LOP3_cbuf(u64 insn); + void LOP3_imm(u64 insn); + void LOP32I(u64 insn); + void MEMBAR(u64 insn); + void MOV_reg(u64 insn); + void MOV_cbuf(u64 insn); + void MOV_imm(u64 insn); + void MOV32I(u64 insn); + void MUFU(u64 insn); + void NOP(u64 insn); + void OUT_reg(u64 insn); + void OUT_cbuf(u64 insn); + void OUT_imm(u64 insn); + void P2R_reg(u64 insn); + void P2R_cbuf(u64 insn); + void P2R_imm(u64 insn); + void PBK(); + void PCNT(); + void PEXIT(u64 insn); + void PIXLD(u64 insn); + void PLONGJMP(u64 insn); + void POPC_reg(u64 insn); + void POPC_cbuf(u64 insn); + void POPC_imm(u64 insn); + void PRET(u64 insn); + void PRMT_reg(u64 insn); + void PRMT_rc(u64 insn); + void PRMT_cr(u64 insn); + void PRMT_imm(u64 insn); + void PSET(u64 insn); + void PSETP(u64 insn); + void R2B(u64 insn); + void R2P_reg(u64 insn); + void R2P_cbuf(u64 insn); + void R2P_imm(u64 insn); + void RAM(u64 insn); + void RED(u64 insn); + void RET(u64 insn); + void RRO_reg(u64 insn); + void RRO_cbuf(u64 insn); + void RRO_imm(u64 insn); + void RTT(u64 insn); + void S2R(u64 insn); + void SAM(u64 insn); + void SEL_reg(u64 insn); + void SEL_cbuf(u64 insn); + void SEL_imm(u64 insn); + void SETCRSPTR(u64 insn); + void SETLMEMBASE(u64 insn); + void SHF_l_reg(u64 insn); + void SHF_l_imm(u64 insn); + void SHF_r_reg(u64 insn); + void SHF_r_imm(u64 insn); + void SHFL(u64 insn); + void SHL_reg(u64 insn); + void SHL_cbuf(u64 insn); + void SHL_imm(u64 insn); + void SHR_reg(u64 insn); + void SHR_cbuf(u64 insn); + void SHR_imm(u64 insn); + void SSY(); + void ST(u64 insn); + void STG(u64 insn); + void STL(u64 insn); + void STP(u64 insn); + void STS(u64 insn); + void SUATOM(u64 insn); + void SUATOM_cas(u64 insn); + void SULD(u64 insn); + void SURED(u64 insn); + void SUST(u64 insn); + void SYNC(u64 insn); + void TEX(u64 insn); + void TEX_b(u64 insn); + void TEXS(u64 insn); + void TLD(u64 insn); + void TLD_b(u64 insn); + void TLD4(u64 insn); + void TLD4_b(u64 insn); + void TLD4S(u64 insn); + void TLDS(u64 insn); + void TMML(u64 insn); + void TMML_b(u64 insn); + void TXA(u64 insn); + void TXD(u64 insn); + void TXD_b(u64 insn); + void TXQ(u64 insn); + void TXQ_b(u64 insn); + void VABSDIFF(u64 insn); + void VABSDIFF4(u64 insn); + void VADD(u64 insn); + void VMAD(u64 insn); + void VMNMX(u64 insn); + void VOTE(u64 insn); + void VOTE_vtg(u64 insn); + void VSET(u64 insn); + void VSETP(u64 insn); + void VSHL(u64 insn); + void VSHR(u64 insn); + void XMAD_reg(u64 insn); + void XMAD_rc(u64 insn); + void XMAD_cr(u64 insn); + void XMAD_imm(u64 insn); + + [[nodiscard]] IR::U32 X(IR::Reg reg); + [[nodiscard]] IR::U64 L(IR::Reg reg); + [[nodiscard]] IR::F32 F(IR::Reg reg); + [[nodiscard]] IR::F64 D(IR::Reg reg); + + void X(IR::Reg dest_reg, const IR::U32& value); + void L(IR::Reg dest_reg, const IR::U64& value); + void F(IR::Reg dest_reg, const IR::F32& value); + void D(IR::Reg dest_reg, const IR::F64& value); + + [[nodiscard]] IR::U32 GetReg8(u64 insn); + [[nodiscard]] IR::U32 GetReg20(u64 insn); + [[nodiscard]] IR::U32 GetReg39(u64 insn); + [[nodiscard]] IR::F32 GetFloatReg8(u64 insn); + [[nodiscard]] IR::F32 GetFloatReg20(u64 insn); + [[nodiscard]] IR::F32 GetFloatReg39(u64 insn); + [[nodiscard]] IR::F64 GetDoubleReg20(u64 insn); + [[nodiscard]] IR::F64 GetDoubleReg39(u64 insn); + + [[nodiscard]] IR::U32 GetCbuf(u64 insn); + [[nodiscard]] IR::F32 GetFloatCbuf(u64 insn); + [[nodiscard]] IR::F64 GetDoubleCbuf(u64 insn); + [[nodiscard]] IR::U64 GetPackedCbuf(u64 insn); + + [[nodiscard]] IR::U32 GetImm20(u64 insn); + [[nodiscard]] IR::F32 GetFloatImm20(u64 insn); + [[nodiscard]] IR::F64 GetDoubleImm20(u64 insn); + [[nodiscard]] IR::U64 GetPackedImm20(u64 insn); + + [[nodiscard]] IR::U32 GetImm32(u64 insn); + [[nodiscard]] IR::F32 GetFloatImm32(u64 insn); + + void SetZFlag(const IR::U1& value); + void SetSFlag(const IR::U1& value); + void SetCFlag(const IR::U1& value); + void SetOFlag(const IR::U1& value); + + void ResetZero(); + void ResetSFlag(); + void ResetCFlag(); + void ResetOFlag(); +}; + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_add.cpp new file mode 100755 index 000000000..8ffd84867 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_add.cpp @@ -0,0 +1,105 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void IADD(TranslatorVisitor& v, u64 insn, const IR::U32 op_b, bool neg_a, bool po, bool sat, bool x, + bool cc) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_a; + } const iadd{insn}; + + if (sat) { + throw NotImplementedException("IADD SAT"); + } + if (x && po) { + throw NotImplementedException("IADD X+PO"); + } + // Operand A is always read from here, negated if needed + IR::U32 op_a{v.X(iadd.src_a)}; + if (neg_a) { + op_a = v.ir.INeg(op_a); + } + // Add both operands + IR::U32 result{v.ir.IAdd(op_a, op_b)}; + if (x) { + const IR::U32 carry{v.ir.Select(v.ir.GetCFlag(), v.ir.Imm32(1), v.ir.Imm32(0))}; + result = v.ir.IAdd(result, carry); + } + if (po) { + // .PO adds one to the result + result = v.ir.IAdd(result, v.ir.Imm32(1)); + } + if (cc) { + // Store flags + // TODO: Does this grab the result pre-PO or after? + if (po) { + throw NotImplementedException("IADD CC+PO"); + } + // TODO: How does CC behave when X is set? + if (x) { + throw NotImplementedException("IADD X+CC"); + } + v.SetZFlag(v.ir.GetZeroFromOp(result)); + v.SetSFlag(v.ir.GetSignFromOp(result)); + v.SetCFlag(v.ir.GetCarryFromOp(result)); + v.SetOFlag(v.ir.GetOverflowFromOp(result)); + } + // Store result + v.X(iadd.dest_reg, result); +} + +void IADD(TranslatorVisitor& v, u64 insn, IR::U32 op_b) { + union { + u64 insn; + BitField<43, 1, u64> x; + BitField<47, 1, u64> cc; + BitField<48, 2, u64> three_for_po; + BitField<48, 1, u64> neg_b; + BitField<49, 1, u64> neg_a; + BitField<50, 1, u64> sat; + } const iadd{insn}; + + const bool po{iadd.three_for_po == 3}; + if (!po && iadd.neg_b != 0) { + op_b = v.ir.INeg(op_b); + } + IADD(v, insn, op_b, iadd.neg_a != 0, po, iadd.sat != 0, iadd.x != 0, iadd.cc != 0); +} +} // Anonymous namespace + +void TranslatorVisitor::IADD_reg(u64 insn) { + IADD(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::IADD_cbuf(u64 insn) { + IADD(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::IADD_imm(u64 insn) { + IADD(*this, insn, GetImm20(insn)); +} + +void TranslatorVisitor::IADD32I(u64 insn) { + union { + u64 raw; + BitField<52, 1, u64> cc; + BitField<53, 1, u64> x; + BitField<54, 1, u64> sat; + BitField<55, 2, u64> three_for_po; + BitField<56, 1, u64> neg_a; + } const iadd32i{insn}; + + const bool po{iadd32i.three_for_po == 3}; + const bool neg_a{!po && iadd32i.neg_a != 0}; + IADD(*this, insn, GetImm32(insn), neg_a, po, iadd32i.sat != 0, iadd32i.x != 0, iadd32i.cc != 0); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_add_three_input.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_add_three_input.cpp new file mode 100755 index 000000000..040cfc10f --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_add_three_input.cpp @@ -0,0 +1,122 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Shift : u64 { + None, + Right, + Left, +}; +enum class Half : u64 { + All, + Lower, + Upper, +}; + +[[nodiscard]] IR::U32 IntegerHalf(IR::IREmitter& ir, const IR::U32& value, Half half) { + constexpr bool is_signed{false}; + switch (half) { + case Half::All: + return value; + case Half::Lower: + return ir.BitFieldExtract(value, ir.Imm32(0), ir.Imm32(16), is_signed); + case Half::Upper: + return ir.BitFieldExtract(value, ir.Imm32(16), ir.Imm32(16), is_signed); + } + throw NotImplementedException("Invalid half"); +} + +[[nodiscard]] IR::U32 IntegerShift(IR::IREmitter& ir, const IR::U32& value, Shift shift) { + switch (shift) { + case Shift::None: + return value; + case Shift::Right: { + // 33-bit RS IADD3 edge case + const IR::U1 edge_case{ir.GetCarryFromOp(value)}; + const IR::U32 shifted{ir.ShiftRightLogical(value, ir.Imm32(16))}; + return IR::U32{ir.Select(edge_case, ir.IAdd(shifted, ir.Imm32(0x10000)), shifted)}; + } + case Shift::Left: + return ir.ShiftLeftLogical(value, ir.Imm32(16)); + } + throw NotImplementedException("Invalid shift"); +} + +void IADD3(TranslatorVisitor& v, u64 insn, IR::U32 op_a, IR::U32 op_b, IR::U32 op_c, + Shift shift = Shift::None) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> x; + BitField<49, 1, u64> neg_c; + BitField<50, 1, u64> neg_b; + BitField<51, 1, u64> neg_a; + } iadd3{insn}; + + if (iadd3.neg_a != 0) { + op_a = v.ir.INeg(op_a); + } + if (iadd3.neg_b != 0) { + op_b = v.ir.INeg(op_b); + } + if (iadd3.neg_c != 0) { + op_c = v.ir.INeg(op_c); + } + IR::U32 lhs_1{v.ir.IAdd(op_a, op_b)}; + if (iadd3.x != 0) { + // TODO: How does RS behave when X is set? + if (shift == Shift::Right) { + throw NotImplementedException("IADD3 X+RS"); + } + const IR::U32 carry{v.ir.Select(v.ir.GetCFlag(), v.ir.Imm32(1), v.ir.Imm32(0))}; + lhs_1 = v.ir.IAdd(lhs_1, carry); + } + const IR::U32 lhs_2{IntegerShift(v.ir, lhs_1, shift)}; + const IR::U32 result{v.ir.IAdd(lhs_2, op_c)}; + + v.X(iadd3.dest_reg, result); + if (iadd3.cc != 0) { + // TODO: How does CC behave when X is set? + if (iadd3.x != 0) { + throw NotImplementedException("IADD3 X+CC"); + } + v.SetZFlag(v.ir.GetZeroFromOp(result)); + v.SetSFlag(v.ir.GetSignFromOp(result)); + v.SetCFlag(v.ir.GetCarryFromOp(result)); + const IR::U1 of_1{v.ir.ILessThan(lhs_1, op_a, false)}; + v.SetOFlag(v.ir.LogicalOr(v.ir.GetOverflowFromOp(result), of_1)); + } +} +} // Anonymous namespace + +void TranslatorVisitor::IADD3_reg(u64 insn) { + union { + u64 insn; + BitField<37, 2, Shift> shift; + BitField<35, 2, Half> half_a; + BitField<33, 2, Half> half_b; + BitField<31, 2, Half> half_c; + } const iadd3{insn}; + + const auto op_a{IntegerHalf(ir, GetReg8(insn), iadd3.half_a)}; + const auto op_b{IntegerHalf(ir, GetReg20(insn), iadd3.half_b)}; + const auto op_c{IntegerHalf(ir, GetReg39(insn), iadd3.half_c)}; + IADD3(*this, insn, op_a, op_b, op_c, iadd3.shift); +} + +void TranslatorVisitor::IADD3_cbuf(u64 insn) { + IADD3(*this, insn, GetReg8(insn), GetCbuf(insn), GetReg39(insn)); +} + +void TranslatorVisitor::IADD3_imm(u64 insn) { + IADD3(*this, insn, GetReg8(insn), GetImm20(insn), GetReg39(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_compare.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_compare.cpp new file mode 100755 index 000000000..ba6e01926 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_compare.cpp @@ -0,0 +1,48 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void ICMP(TranslatorVisitor& v, u64 insn, const IR::U32& src_a, const IR::U32& operand) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<48, 1, u64> is_signed; + BitField<49, 3, CompareOp> compare_op; + } const icmp{insn}; + + const IR::U32 zero{v.ir.Imm32(0)}; + const bool is_signed{icmp.is_signed != 0}; + const IR::U1 cmp_result{IntegerCompare(v.ir, operand, zero, icmp.compare_op, is_signed)}; + + const IR::U32 src_reg{v.X(icmp.src_reg)}; + const IR::U32 result{v.ir.Select(cmp_result, src_reg, src_a)}; + + v.X(icmp.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::ICMP_reg(u64 insn) { + ICMP(*this, insn, GetReg20(insn), GetReg39(insn)); +} + +void TranslatorVisitor::ICMP_rc(u64 insn) { + ICMP(*this, insn, GetReg39(insn), GetCbuf(insn)); +} + +void TranslatorVisitor::ICMP_cr(u64 insn) { + ICMP(*this, insn, GetCbuf(insn), GetReg39(insn)); +} + +void TranslatorVisitor::ICMP_imm(u64 insn) { + ICMP(*this, insn, GetImm20(insn), GetReg39(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_compare_and_set.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_compare_and_set.cpp new file mode 100755 index 000000000..34fa7345c --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_compare_and_set.cpp @@ -0,0 +1,123 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +IR::U1 ExtendedIntegerCompare(IR::IREmitter& ir, const IR::U32& operand_1, const IR::U32& operand_2, + CompareOp compare_op, bool is_signed) { + const IR::U32 zero{ir.Imm32(0)}; + const IR::U32 carry{ir.Select(ir.GetCFlag(), ir.Imm32(1), zero)}; + const IR::U1 z_flag{ir.GetZFlag()}; + const IR::U32 intermediate{ir.IAdd(ir.IAdd(operand_1, ir.BitwiseNot(operand_2)), carry)}; + const IR::U1 flip_logic{is_signed ? ir.Imm1(false) + : ir.LogicalXor(ir.ILessThan(operand_1, zero, true), + ir.ILessThan(operand_2, zero, true))}; + switch (compare_op) { + case CompareOp::False: + return ir.Imm1(false); + case CompareOp::LessThan: + return IR::U1{ir.Select(flip_logic, ir.IGreaterThanEqual(intermediate, zero, true), + ir.ILessThan(intermediate, zero, true))}; + case CompareOp::Equal: + return ir.LogicalAnd(ir.IEqual(intermediate, zero), z_flag); + case CompareOp::LessThanEqual: { + const IR::U1 base_cmp{ir.Select(flip_logic, ir.IGreaterThanEqual(intermediate, zero, true), + ir.ILessThan(intermediate, zero, true))}; + return ir.LogicalOr(base_cmp, ir.LogicalAnd(ir.IEqual(intermediate, zero), z_flag)); + } + case CompareOp::GreaterThan: { + const IR::U1 base_cmp{ir.Select(flip_logic, ir.ILessThanEqual(intermediate, zero, true), + ir.IGreaterThan(intermediate, zero, true))}; + const IR::U1 not_z{ir.LogicalNot(z_flag)}; + return ir.LogicalOr(base_cmp, ir.LogicalAnd(ir.IEqual(intermediate, zero), not_z)); + } + case CompareOp::NotEqual: + return ir.LogicalOr(ir.INotEqual(intermediate, zero), + ir.LogicalAnd(ir.IEqual(intermediate, zero), ir.LogicalNot(z_flag))); + case CompareOp::GreaterThanEqual: { + const IR::U1 base_cmp{ir.Select(flip_logic, ir.ILessThan(intermediate, zero, true), + ir.IGreaterThanEqual(intermediate, zero, true))}; + return ir.LogicalOr(base_cmp, ir.LogicalAnd(ir.IEqual(intermediate, zero), z_flag)); + } + case CompareOp::True: + return ir.Imm1(true); + default: + throw NotImplementedException("Invalid compare op {}", compare_op); + } +} + +IR::U1 IsetCompare(IR::IREmitter& ir, const IR::U32& operand_1, const IR::U32& operand_2, + CompareOp compare_op, bool is_signed, bool x) { + return x ? ExtendedIntegerCompare(ir, operand_1, operand_2, compare_op, is_signed) + : IntegerCompare(ir, operand_1, operand_2, compare_op, is_signed); +} + +void ISET(TranslatorVisitor& v, u64 insn, const IR::U32& src_b) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<39, 3, IR::Pred> pred; + BitField<42, 1, u64> neg_pred; + BitField<43, 1, u64> x; + BitField<44, 1, u64> bf; + BitField<45, 2, BooleanOp> bop; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> is_signed; + BitField<49, 3, CompareOp> compare_op; + } const iset{insn}; + + const IR::U32 src_a{v.X(iset.src_reg)}; + const bool is_signed{iset.is_signed != 0}; + const IR::U32 zero{v.ir.Imm32(0)}; + const bool x{iset.x != 0}; + const IR::U1 cmp_result{IsetCompare(v.ir, src_a, src_b, iset.compare_op, is_signed, x)}; + + IR::U1 pred{v.ir.GetPred(iset.pred)}; + if (iset.neg_pred != 0) { + pred = v.ir.LogicalNot(pred); + } + const IR::U1 bop_result{PredicateCombine(v.ir, cmp_result, pred, iset.bop)}; + + const IR::U32 one_mask{v.ir.Imm32(-1)}; + const IR::U32 fp_one{v.ir.Imm32(0x3f800000)}; + const IR::U32 pass_result{iset.bf == 0 ? one_mask : fp_one}; + const IR::U32 result{v.ir.Select(bop_result, pass_result, zero)}; + + v.X(iset.dest_reg, result); + if (iset.cc != 0) { + if (x) { + throw NotImplementedException("ISET.CC + X"); + } + const IR::U1 is_zero{v.ir.IEqual(result, zero)}; + v.SetZFlag(is_zero); + if (iset.bf != 0) { + v.ResetSFlag(); + } else { + v.SetSFlag(v.ir.LogicalNot(is_zero)); + } + v.ResetCFlag(); + v.ResetOFlag(); + } +} +} // Anonymous namespace + +void TranslatorVisitor::ISET_reg(u64 insn) { + ISET(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::ISET_cbuf(u64 insn) { + ISET(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::ISET_imm(u64 insn) { + ISET(*this, insn, GetImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_floating_point_conversion.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_floating_point_conversion.cpp new file mode 100755 index 000000000..e0e157275 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_floating_point_conversion.cpp @@ -0,0 +1,180 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_encoding.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class FloatFormat : u64 { + F16 = 1, + F32 = 2, + F64 = 3, +}; + +enum class IntFormat : u64 { + U8 = 0, + U16 = 1, + U32 = 2, + U64 = 3, +}; + +union Encoding { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 2, FloatFormat> float_format; + BitField<10, 2, IntFormat> int_format; + BitField<13, 1, u64> is_signed; + BitField<39, 2, FpRounding> fp_rounding; + BitField<41, 2, u64> selector; + BitField<47, 1, u64> cc; + BitField<45, 1, u64> neg; + BitField<49, 1, u64> abs; +}; + +bool Is64(u64 insn) { + return Encoding{insn}.int_format == IntFormat::U64; +} + +int BitSize(FloatFormat format) { + switch (format) { + case FloatFormat::F16: + return 16; + case FloatFormat::F32: + return 32; + case FloatFormat::F64: + return 64; + } + throw NotImplementedException("Invalid float format {}", format); +} + +IR::U32 SmallAbs(TranslatorVisitor& v, const IR::U32& value, int bitsize) { + const IR::U32 least_value{v.ir.Imm32(-(1 << (bitsize - 1)))}; + const IR::U32 mask{v.ir.ShiftRightArithmetic(value, v.ir.Imm32(bitsize - 1))}; + const IR::U32 absolute{v.ir.BitwiseXor(v.ir.IAdd(value, mask), mask)}; + const IR::U1 is_least{v.ir.IEqual(value, least_value)}; + return IR::U32{v.ir.Select(is_least, value, absolute)}; +} + +void I2F(TranslatorVisitor& v, u64 insn, IR::U32U64 src) { + const Encoding i2f{insn}; + if (i2f.cc != 0) { + throw NotImplementedException("I2F CC"); + } + const bool is_signed{i2f.is_signed != 0}; + int src_bitsize{}; + switch (i2f.int_format) { + case IntFormat::U8: + src = v.ir.BitFieldExtract(src, v.ir.Imm32(static_cast(i2f.selector) * 8), + v.ir.Imm32(8), is_signed); + if (i2f.abs != 0) { + src = SmallAbs(v, src, 8); + } + src_bitsize = 8; + break; + case IntFormat::U16: + if (i2f.selector == 1 || i2f.selector == 3) { + throw NotImplementedException("Invalid U16 selector {}", i2f.selector.Value()); + } + src = v.ir.BitFieldExtract(src, v.ir.Imm32(static_cast(i2f.selector) * 8), + v.ir.Imm32(16), is_signed); + if (i2f.abs != 0) { + src = SmallAbs(v, src, 16); + } + src_bitsize = 16; + break; + case IntFormat::U32: + case IntFormat::U64: + if (i2f.selector != 0) { + throw NotImplementedException("Unexpected selector {}", i2f.selector.Value()); + } + if (i2f.abs != 0 && is_signed) { + src = v.ir.IAbs(src); + } + src_bitsize = i2f.int_format == IntFormat::U64 ? 64 : 32; + break; + } + const int conversion_src_bitsize{i2f.int_format == IntFormat::U64 ? 64 : 32}; + const int dst_bitsize{BitSize(i2f.float_format)}; + const IR::FpControl fp_control{ + .no_contraction = false, + .rounding = CastFpRounding(i2f.fp_rounding), + .fmz_mode = IR::FmzMode::DontCare, + }; + auto value{v.ir.ConvertIToF(dst_bitsize, conversion_src_bitsize, is_signed, src, fp_control)}; + if (i2f.neg != 0) { + if (i2f.abs != 0 || !is_signed) { + // We know the value is positive + value = v.ir.FPNeg(value); + } else { + // Only negate if the input isn't the lowest value + IR::U1 is_least; + if (src_bitsize == 64) { + is_least = v.ir.IEqual(src, v.ir.Imm64(std::numeric_limits::min())); + } else if (src_bitsize == 32) { + is_least = v.ir.IEqual(src, v.ir.Imm32(std::numeric_limits::min())); + } else { + const IR::U32 least_value{v.ir.Imm32(-(1 << (src_bitsize - 1)))}; + is_least = v.ir.IEqual(src, least_value); + } + value = IR::F16F32F64{v.ir.Select(is_least, value, v.ir.FPNeg(value))}; + } + } + switch (i2f.float_format) { + case FloatFormat::F16: { + const IR::F16 zero{v.ir.FPConvert(16, v.ir.Imm32(0.0f))}; + v.X(i2f.dest_reg, v.ir.PackFloat2x16(v.ir.CompositeConstruct(value, zero))); + break; + } + case FloatFormat::F32: + v.F(i2f.dest_reg, value); + break; + case FloatFormat::F64: { + if (!IR::IsAligned(i2f.dest_reg, 2)) { + throw NotImplementedException("Unaligned destination {}", i2f.dest_reg.Value()); + } + const IR::Value vector{v.ir.UnpackDouble2x32(value)}; + for (int i = 0; i < 2; ++i) { + v.X(i2f.dest_reg + i, IR::U32{v.ir.CompositeExtract(vector, static_cast(i))}); + } + break; + } + default: + throw NotImplementedException("Invalid float format {}", i2f.float_format.Value()); + } +} +} // Anonymous namespace + +void TranslatorVisitor::I2F_reg(u64 insn) { + if (Is64(insn)) { + union { + u64 raw; + BitField<20, 8, IR::Reg> reg; + } const value{insn}; + const IR::Value regs{ir.CompositeConstruct(ir.GetReg(value.reg), ir.GetReg(value.reg + 1))}; + I2F(*this, insn, ir.PackUint2x32(regs)); + } else { + I2F(*this, insn, GetReg20(insn)); + } +} + +void TranslatorVisitor::I2F_cbuf(u64 insn) { + if (Is64(insn)) { + I2F(*this, insn, GetPackedCbuf(insn)); + } else { + I2F(*this, insn, GetCbuf(insn)); + } +} + +void TranslatorVisitor::I2F_imm(u64 insn) { + if (Is64(insn)) { + I2F(*this, insn, GetPackedImm20(insn)); + } else { + I2F(*this, insn, GetImm20(insn)); + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_funnel_shift.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_funnel_shift.cpp new file mode 100755 index 000000000..5feefc0ce --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_funnel_shift.cpp @@ -0,0 +1,82 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class MaxShift : u64 { + U32, + Undefined, + U64, + S64, +}; + +IR::U64 PackedShift(IR::IREmitter& ir, const IR::U64& packed_int, const IR::U32& safe_shift, + bool right_shift, bool is_signed) { + if (!right_shift) { + return ir.ShiftLeftLogical(packed_int, safe_shift); + } + if (is_signed) { + return ir.ShiftRightArithmetic(packed_int, safe_shift); + } + return ir.ShiftRightLogical(packed_int, safe_shift); +} + +void SHF(TranslatorVisitor& v, u64 insn, const IR::U32& shift, const IR::U32& high_bits, + bool right_shift) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<0, 8, IR::Reg> lo_bits_reg; + BitField<37, 2, MaxShift> max_shift; + BitField<47, 1, u64> cc; + BitField<48, 2, u64> x_mode; + BitField<50, 1, u64> wrap; + } const shf{insn}; + + if (shf.cc != 0) { + throw NotImplementedException("SHF CC"); + } + if (shf.x_mode != 0) { + throw NotImplementedException("SHF X Mode"); + } + if (shf.max_shift == MaxShift::Undefined) { + throw NotImplementedException("SHF Use of undefined MaxShift value"); + } + const IR::U32 low_bits{v.X(shf.lo_bits_reg)}; + const IR::U64 packed_int{v.ir.PackUint2x32(v.ir.CompositeConstruct(low_bits, high_bits))}; + const IR::U32 max_shift{shf.max_shift == MaxShift::U32 ? v.ir.Imm32(32) : v.ir.Imm32(63)}; + const IR::U32 safe_shift{shf.wrap != 0 + ? v.ir.BitwiseAnd(shift, v.ir.ISub(max_shift, v.ir.Imm32(1))) + : v.ir.UMin(shift, max_shift)}; + + const bool is_signed{shf.max_shift == MaxShift::S64}; + const IR::U64 shifted_value{PackedShift(v.ir, packed_int, safe_shift, right_shift, is_signed)}; + const IR::Value unpacked_value{v.ir.UnpackUint2x32(shifted_value)}; + + const IR::U32 result{v.ir.CompositeExtract(unpacked_value, right_shift ? 0 : 1)}; + v.X(shf.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::SHF_l_reg(u64 insn) { + SHF(*this, insn, GetReg20(insn), GetReg39(insn), false); +} + +void TranslatorVisitor::SHF_l_imm(u64 insn) { + SHF(*this, insn, GetImm20(insn), GetReg39(insn), false); +} + +void TranslatorVisitor::SHF_r_reg(u64 insn) { + SHF(*this, insn, GetReg20(insn), GetReg39(insn), true); +} + +void TranslatorVisitor::SHF_r_imm(u64 insn) { + SHF(*this, insn, GetImm20(insn), GetReg39(insn), true); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_minimum_maximum.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_minimum_maximum.cpp new file mode 100755 index 000000000..1badbacc4 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_minimum_maximum.cpp @@ -0,0 +1,64 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void IMNMX(TranslatorVisitor& v, u64 insn, const IR::U32& op_b) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<39, 3, IR::Pred> pred; + BitField<42, 1, u64> neg_pred; + BitField<43, 2, u64> mode; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> is_signed; + } const imnmx{insn}; + + if (imnmx.cc != 0) { + throw NotImplementedException("IMNMX CC"); + } + + if (imnmx.mode != 0) { + throw NotImplementedException("IMNMX.MODE"); + } + + const IR::U1 pred{v.ir.GetPred(imnmx.pred)}; + const IR::U32 op_a{v.X(imnmx.src_reg)}; + IR::U32 min; + IR::U32 max; + + if (imnmx.is_signed != 0) { + min = IR::U32{v.ir.SMin(op_a, op_b)}; + max = IR::U32{v.ir.SMax(op_a, op_b)}; + } else { + min = IR::U32{v.ir.UMin(op_a, op_b)}; + max = IR::U32{v.ir.UMax(op_a, op_b)}; + } + if (imnmx.neg_pred != 0) { + std::swap(min, max); + } + + const IR::U32 result{v.ir.Select(pred, min, max)}; + v.X(imnmx.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::IMNMX_reg(u64 insn) { + IMNMX(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::IMNMX_cbuf(u64 insn) { + IMNMX(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::IMNMX_imm(u64 insn) { + IMNMX(*this, insn, GetImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_popcount.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_popcount.cpp new file mode 100755 index 000000000..5ece7678d --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_popcount.cpp @@ -0,0 +1,36 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void POPC(TranslatorVisitor& v, u64 insn, const IR::U32& src) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<40, 1, u64> tilde; + } const popc{insn}; + + const IR::U32 operand = popc.tilde == 0 ? src : v.ir.BitwiseNot(src); + const IR::U32 result = v.ir.BitCount(operand); + v.X(popc.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::POPC_reg(u64 insn) { + POPC(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::POPC_cbuf(u64 insn) { + POPC(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::POPC_imm(u64 insn) { + POPC(*this, insn, GetImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_scaled_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_scaled_add.cpp new file mode 100755 index 000000000..044671943 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_scaled_add.cpp @@ -0,0 +1,86 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void ISCADD(TranslatorVisitor& v, u64 insn, IR::U32 op_b, bool cc, bool neg_a, bool neg_b, + u64 scale_imm) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> op_a; + } const iscadd{insn}; + + const bool po{neg_a && neg_b}; + IR::U32 op_a{v.X(iscadd.op_a)}; + if (po) { + // When PO is present, add one + op_b = v.ir.IAdd(op_b, v.ir.Imm32(1)); + } else { + // When PO is not present, the bits are interpreted as negation + if (neg_a) { + op_a = v.ir.INeg(op_a); + } + if (neg_b) { + op_b = v.ir.INeg(op_b); + } + } + // With the operands already processed, scale A + const IR::U32 scale{v.ir.Imm32(static_cast(scale_imm))}; + const IR::U32 scaled_a{v.ir.ShiftLeftLogical(op_a, scale)}; + + const IR::U32 result{v.ir.IAdd(scaled_a, op_b)}; + v.X(iscadd.dest_reg, result); + + if (cc) { + v.SetZFlag(v.ir.GetZeroFromOp(result)); + v.SetSFlag(v.ir.GetSignFromOp(result)); + const IR::U1 carry{v.ir.GetCarryFromOp(result)}; + const IR::U1 overflow{v.ir.GetOverflowFromOp(result)}; + v.SetCFlag(po ? v.ir.LogicalOr(carry, v.ir.GetCarryFromOp(op_b)) : carry); + v.SetOFlag(po ? v.ir.LogicalOr(overflow, v.ir.GetOverflowFromOp(op_b)) : overflow); + } +} + +void ISCADD(TranslatorVisitor& v, u64 insn, IR::U32 op_b) { + union { + u64 raw; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> neg_b; + BitField<49, 1, u64> neg_a; + BitField<39, 5, u64> scale; + } const iscadd{insn}; + + ISCADD(v, insn, op_b, iscadd.cc != 0, iscadd.neg_a != 0, iscadd.neg_b != 0, iscadd.scale); +} + +} // Anonymous namespace + +void TranslatorVisitor::ISCADD_reg(u64 insn) { + ISCADD(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::ISCADD_cbuf(u64 insn) { + ISCADD(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::ISCADD_imm(u64 insn) { + ISCADD(*this, insn, GetImm20(insn)); +} + +void TranslatorVisitor::ISCADD32I(u64 insn) { + union { + u64 raw; + BitField<52, 1, u64> cc; + BitField<53, 5, u64> scale; + } const iscadd{insn}; + + return ISCADD(*this, insn, GetImm32(insn), iscadd.cc != 0, false, false, iscadd.scale); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_set_predicate.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_set_predicate.cpp new file mode 100755 index 000000000..7743701d0 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_set_predicate.cpp @@ -0,0 +1,49 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void ISETP(TranslatorVisitor& v, u64 insn, const IR::U32& op_b) { + union { + u64 raw; + BitField<0, 3, IR::Pred> dest_pred_b; + BitField<3, 3, IR::Pred> dest_pred_a; + BitField<8, 8, IR::Reg> src_reg_a; + BitField<39, 3, IR::Pred> bop_pred; + BitField<42, 1, u64> neg_bop_pred; + BitField<45, 2, BooleanOp> bop; + BitField<48, 1, u64> is_signed; + BitField<49, 3, CompareOp> compare_op; + } const isetp{insn}; + + const BooleanOp bop{isetp.bop}; + const CompareOp compare_op{isetp.compare_op}; + const IR::U32 op_a{v.X(isetp.src_reg_a)}; + const IR::U1 comparison{IntegerCompare(v.ir, op_a, op_b, compare_op, isetp.is_signed != 0)}; + const IR::U1 bop_pred{v.ir.GetPred(isetp.bop_pred, isetp.neg_bop_pred != 0)}; + const IR::U1 result_a{PredicateCombine(v.ir, comparison, bop_pred, bop)}; + const IR::U1 result_b{PredicateCombine(v.ir, v.ir.LogicalNot(comparison), bop_pred, bop)}; + v.ir.SetPred(isetp.dest_pred_a, result_a); + v.ir.SetPred(isetp.dest_pred_b, result_b); +} +} // Anonymous namespace + +void TranslatorVisitor::ISETP_reg(u64 insn) { + ISETP(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::ISETP_cbuf(u64 insn) { + ISETP(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::ISETP_imm(u64 insn) { + ISETP(*this, insn, GetImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_shift_left.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_shift_left.cpp new file mode 100755 index 000000000..20af68852 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_shift_left.cpp @@ -0,0 +1,71 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void SHL(TranslatorVisitor& v, u64 insn, const IR::U32& unsafe_shift) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg_a; + BitField<39, 1, u64> w; + BitField<43, 1, u64> x; + BitField<47, 1, u64> cc; + } const shl{insn}; + + if (shl.x != 0) { + throw NotImplementedException("SHL.X"); + } + if (shl.cc != 0) { + throw NotImplementedException("SHL.CC"); + } + const IR::U32 base{v.X(shl.src_reg_a)}; + IR::U32 result; + if (shl.w != 0) { + // When .W is set, the shift value is wrapped + // To emulate this we just have to wrap it ourselves. + const IR::U32 shift{v.ir.BitwiseAnd(unsafe_shift, v.ir.Imm32(31))}; + result = v.ir.ShiftLeftLogical(base, shift); + } else { + // When .W is not set, the shift value is clamped between 0 and 32. + // To emulate this we have to have in mind the special shift of 32, that evaluates as 0. + // We can safely evaluate an out of bounds shift according to the SPIR-V specification: + // + // https://www.khronos.org/registry/spir-v/specs/unified1/SPIRV.html#OpShiftLeftLogical + // "Shift is treated as unsigned. The resulting value is undefined if Shift is greater than + // or equal to the bit width of the components of Base." + // + // And on the GLASM specification it is also safe to evaluate out of bounds: + // + // https://www.khronos.org/registry/OpenGL/extensions/NV/NV_gpu_program4.txt + // "The results of a shift operation ("<<") are undefined if the value of the second operand + // is negative, or greater than or equal to the number of bits in the first operand." + // + // Emphasis on undefined results in contrast to undefined behavior. + // + const IR::U1 is_safe{v.ir.ILessThan(unsafe_shift, v.ir.Imm32(32), false)}; + const IR::U32 unsafe_result{v.ir.ShiftLeftLogical(base, unsafe_shift)}; + result = IR::U32{v.ir.Select(is_safe, unsafe_result, v.ir.Imm32(0))}; + } + v.X(shl.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::SHL_reg(u64 insn) { + SHL(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::SHL_cbuf(u64 insn) { + SHL(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::SHL_imm(u64 insn) { + SHL(*this, insn, GetImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_shift_right.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_shift_right.cpp new file mode 100755 index 000000000..be00bb605 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_shift_right.cpp @@ -0,0 +1,66 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void SHR(TranslatorVisitor& v, u64 insn, const IR::U32& shift) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg_a; + BitField<39, 1, u64> is_wrapped; + BitField<40, 1, u64> brev; + BitField<43, 1, u64> xmode; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> is_signed; + } const shr{insn}; + + if (shr.xmode != 0) { + throw NotImplementedException("SHR.XMODE"); + } + if (shr.cc != 0) { + throw NotImplementedException("SHR.CC"); + } + + IR::U32 base{v.X(shr.src_reg_a)}; + if (shr.brev == 1) { + base = v.ir.BitReverse(base); + } + IR::U32 result; + const IR::U32 safe_shift = shr.is_wrapped == 0 ? shift : v.ir.BitwiseAnd(shift, v.ir.Imm32(31)); + if (shr.is_signed == 1) { + result = IR::U32{v.ir.ShiftRightArithmetic(base, safe_shift)}; + } else { + result = IR::U32{v.ir.ShiftRightLogical(base, safe_shift)}; + } + + if (shr.is_wrapped == 0) { + const IR::U32 zero{v.ir.Imm32(0)}; + const IR::U32 safe_bits{v.ir.Imm32(32)}; + + const IR::U1 is_negative{v.ir.ILessThan(result, zero, true)}; + const IR::U1 is_safe{v.ir.ILessThan(shift, safe_bits, false)}; + const IR::U32 clamped_value{v.ir.Select(is_negative, v.ir.Imm32(-1), zero)}; + result = IR::U32{v.ir.Select(is_safe, result, clamped_value)}; + } + v.X(shr.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::SHR_reg(u64 insn) { + SHR(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::SHR_cbuf(u64 insn) { + SHR(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::SHR_imm(u64 insn) { + SHR(*this, insn, GetImm20(insn)); +} +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_short_multiply_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_short_multiply_add.cpp new file mode 100755 index 000000000..2932cdc42 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_short_multiply_add.cpp @@ -0,0 +1,135 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class SelectMode : u64 { + Default, + CLO, + CHI, + CSFU, + CBCC, +}; + +enum class Half : u64 { + H0, // Least-significant bits (15:0) + H1, // Most-significant bits (31:16) +}; + +IR::U32 ExtractHalf(TranslatorVisitor& v, const IR::U32& src, Half half, bool is_signed) { + const IR::U32 offset{v.ir.Imm32(half == Half::H1 ? 16 : 0)}; + return v.ir.BitFieldExtract(src, offset, v.ir.Imm32(16), is_signed); +} + +void XMAD(TranslatorVisitor& v, u64 insn, const IR::U32& src_b, const IR::U32& src_c, + SelectMode select_mode, Half half_b, bool psl, bool mrg, bool x) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg_a; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> is_a_signed; + BitField<49, 1, u64> is_b_signed; + BitField<53, 1, Half> half_a; + } const xmad{insn}; + + if (x) { + throw NotImplementedException("XMAD X"); + } + const IR::U32 op_a{ExtractHalf(v, v.X(xmad.src_reg_a), xmad.half_a, xmad.is_a_signed != 0)}; + const IR::U32 op_b{ExtractHalf(v, src_b, half_b, xmad.is_b_signed != 0)}; + + IR::U32 product{v.ir.IMul(op_a, op_b)}; + if (psl) { + // .PSL shifts the product 16 bits + product = v.ir.ShiftLeftLogical(product, v.ir.Imm32(16)); + } + const IR::U32 op_c{[&]() -> IR::U32 { + switch (select_mode) { + case SelectMode::Default: + return src_c; + case SelectMode::CLO: + return ExtractHalf(v, src_c, Half::H0, false); + case SelectMode::CHI: + return ExtractHalf(v, src_c, Half::H1, false); + case SelectMode::CBCC: + return v.ir.IAdd(v.ir.ShiftLeftLogical(src_b, v.ir.Imm32(16)), src_c); + case SelectMode::CSFU: + throw NotImplementedException("XMAD CSFU"); + } + throw NotImplementedException("Invalid XMAD select mode {}", select_mode); + }()}; + IR::U32 result{v.ir.IAdd(product, op_c)}; + if (mrg) { + // .MRG inserts src_b [15:0] into result's [31:16]. + const IR::U32 lsb_b{ExtractHalf(v, src_b, Half::H0, false)}; + result = v.ir.BitFieldInsert(result, lsb_b, v.ir.Imm32(16), v.ir.Imm32(16)); + } + if (xmad.cc) { + throw NotImplementedException("XMAD CC"); + } + // Store result + v.X(xmad.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::XMAD_reg(u64 insn) { + union { + u64 raw; + BitField<35, 1, Half> half_b; + BitField<36, 1, u64> psl; + BitField<37, 1, u64> mrg; + BitField<38, 1, u64> x; + BitField<50, 3, SelectMode> select_mode; + } const xmad{insn}; + + XMAD(*this, insn, GetReg20(insn), GetReg39(insn), xmad.select_mode, xmad.half_b, xmad.psl != 0, + xmad.mrg != 0, xmad.x != 0); +} + +void TranslatorVisitor::XMAD_rc(u64 insn) { + union { + u64 raw; + BitField<50, 2, SelectMode> select_mode; + BitField<52, 1, Half> half_b; + BitField<54, 1, u64> x; + } const xmad{insn}; + + XMAD(*this, insn, GetReg39(insn), GetCbuf(insn), xmad.select_mode, xmad.half_b, false, false, + xmad.x != 0); +} + +void TranslatorVisitor::XMAD_cr(u64 insn) { + union { + u64 raw; + BitField<50, 2, SelectMode> select_mode; + BitField<52, 1, Half> half_b; + BitField<54, 1, u64> x; + BitField<55, 1, u64> psl; + BitField<56, 1, u64> mrg; + } const xmad{insn}; + + XMAD(*this, insn, GetCbuf(insn), GetReg39(insn), xmad.select_mode, xmad.half_b, xmad.psl != 0, + xmad.mrg != 0, xmad.x != 0); +} + +void TranslatorVisitor::XMAD_imm(u64 insn) { + union { + u64 raw; + BitField<20, 16, u64> src_b; + BitField<36, 1, u64> psl; + BitField<37, 1, u64> mrg; + BitField<38, 1, u64> x; + BitField<50, 3, SelectMode> select_mode; + } const xmad{insn}; + + XMAD(*this, insn, ir.Imm32(static_cast(xmad.src_b)), GetReg39(insn), xmad.select_mode, + Half::H0, xmad.psl != 0, xmad.mrg != 0, xmad.x != 0); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/integer_to_integer_conversion.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_to_integer_conversion.cpp new file mode 100755 index 000000000..53e8d8923 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/integer_to_integer_conversion.cpp @@ -0,0 +1,126 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class IntegerWidth : u64 { + Byte, + Short, + Word, +}; + +[[nodiscard]] IR::U32 WidthSize(IR::IREmitter& ir, IntegerWidth width) { + switch (width) { + case IntegerWidth::Byte: + return ir.Imm32(8); + case IntegerWidth::Short: + return ir.Imm32(16); + case IntegerWidth::Word: + return ir.Imm32(32); + default: + throw NotImplementedException("Invalid width {}", width); + } +} + +[[nodiscard]] IR::U32 ConvertInteger(IR::IREmitter& ir, const IR::U32& src, + IntegerWidth dst_width) { + const IR::U32 zero{ir.Imm32(0)}; + const IR::U32 count{WidthSize(ir, dst_width)}; + return ir.BitFieldExtract(src, zero, count, false); +} + +[[nodiscard]] IR::U32 SaturateInteger(IR::IREmitter& ir, const IR::U32& src, IntegerWidth dst_width, + bool dst_signed, bool src_signed) { + IR::U32 min{}; + IR::U32 max{}; + const IR::U32 zero{ir.Imm32(0)}; + switch (dst_width) { + case IntegerWidth::Byte: + min = dst_signed && src_signed ? ir.Imm32(0xffffff80) : zero; + max = dst_signed ? ir.Imm32(0x7f) : ir.Imm32(0xff); + break; + case IntegerWidth::Short: + min = dst_signed && src_signed ? ir.Imm32(0xffff8000) : zero; + max = dst_signed ? ir.Imm32(0x7fff) : ir.Imm32(0xffff); + break; + case IntegerWidth::Word: + min = dst_signed && src_signed ? ir.Imm32(0x80000000) : zero; + max = dst_signed ? ir.Imm32(0x7fffffff) : ir.Imm32(0xffffffff); + break; + default: + throw NotImplementedException("Invalid width {}", dst_width); + } + const IR::U32 value{!dst_signed && src_signed ? ir.SMax(zero, src) : src}; + return dst_signed && src_signed ? ir.SClamp(value, min, max) : ir.UClamp(value, min, max); +} + +void I2I(TranslatorVisitor& v, u64 insn, const IR::U32& src_a) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 2, IntegerWidth> dst_fmt; + BitField<12, 1, u64> dst_fmt_sign; + BitField<10, 2, IntegerWidth> src_fmt; + BitField<13, 1, u64> src_fmt_sign; + BitField<41, 3, u64> selector; + BitField<45, 1, u64> neg; + BitField<47, 1, u64> cc; + BitField<49, 1, u64> abs; + BitField<50, 1, u64> sat; + } const i2i{insn}; + + if (i2i.src_fmt == IntegerWidth::Short && (i2i.selector == 1 || i2i.selector == 3)) { + throw NotImplementedException("16-bit source format incompatible with selector {}", + i2i.selector); + } + if (i2i.src_fmt == IntegerWidth::Word && i2i.selector != 0) { + throw NotImplementedException("32-bit source format incompatible with selector {}", + i2i.selector); + } + + const s32 selector{static_cast(i2i.selector)}; + const IR::U32 offset{v.ir.Imm32(selector * 8)}; + const IR::U32 count{WidthSize(v.ir, i2i.src_fmt)}; + const bool src_signed{i2i.src_fmt_sign != 0}; + const bool dst_signed{i2i.dst_fmt_sign != 0}; + const bool sat{i2i.sat != 0}; + + IR::U32 src_values{v.ir.BitFieldExtract(src_a, offset, count, src_signed)}; + if (i2i.abs != 0) { + src_values = v.ir.IAbs(src_values); + } + if (i2i.neg != 0) { + src_values = v.ir.INeg(src_values); + } + const IR::U32 result{ + sat ? SaturateInteger(v.ir, src_values, i2i.dst_fmt, dst_signed, src_signed) + : ConvertInteger(v.ir, src_values, i2i.dst_fmt)}; + + v.X(i2i.dest_reg, result); + if (i2i.cc != 0) { + v.SetZFlag(v.ir.GetZeroFromOp(result)); + v.SetSFlag(v.ir.GetSignFromOp(result)); + v.ResetCFlag(); + v.ResetOFlag(); + } +} +} // Anonymous namespace + +void TranslatorVisitor::I2I_reg(u64 insn) { + I2I(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::I2I_cbuf(u64 insn) { + I2I(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::I2I_imm(u64 insn) { + I2I(*this, insn, GetImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/internal_stage_buffer_entry_read.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/internal_stage_buffer_entry_read.cpp new file mode 100755 index 000000000..9b85f8059 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/internal_stage_buffer_entry_read.cpp @@ -0,0 +1,53 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Mode : u64 { + Default, + Patch, + Prim, + Attr, +}; + +enum class Shift : u64 { + Default, + U16, + B32, +}; + +} // Anonymous namespace + +void TranslatorVisitor::ISBERD(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<31, 1, u64> skew; + BitField<32, 1, u64> o; + BitField<33, 2, Mode> mode; + BitField<47, 2, Shift> shift; + } const isberd{insn}; + + if (isberd.skew != 0) { + throw NotImplementedException("SKEW"); + } + if (isberd.o != 0) { + throw NotImplementedException("O"); + } + if (isberd.mode != Mode::Default) { + throw NotImplementedException("Mode {}", isberd.mode.Value()); + } + if (isberd.shift != Shift::Default) { + throw NotImplementedException("Shift {}", isberd.shift.Value()); + } + LOG_WARNING(Shader, "(STUBBED) called"); + X(isberd.dest_reg, X(isberd.src_reg)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/load_constant.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/load_constant.cpp new file mode 100755 index 000000000..2300088e3 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/load_constant.cpp @@ -0,0 +1,62 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/load_constant.h" + +namespace Shader::Maxwell { +using namespace LDC; +namespace { +std::pair Slot(IR::IREmitter& ir, Mode mode, const IR::U32& imm_index, + const IR::U32& reg, const IR::U32& imm) { + switch (mode) { + case Mode::Default: + return {imm_index, ir.IAdd(reg, imm)}; + default: + break; + } + throw NotImplementedException("Mode {}", mode); +} +} // Anonymous namespace + +void TranslatorVisitor::LDC(u64 insn) { + const Encoding ldc{insn}; + const IR::U32 imm_index{ir.Imm32(static_cast(ldc.index))}; + const IR::U32 reg{X(ldc.src_reg)}; + const IR::U32 imm{ir.Imm32(static_cast(ldc.offset))}; + const auto [index, offset]{Slot(ir, ldc.mode, imm_index, reg, imm)}; + switch (ldc.size) { + case Size::U8: + X(ldc.dest_reg, IR::U32{ir.GetCbuf(index, offset, 8, false)}); + break; + case Size::S8: + X(ldc.dest_reg, IR::U32{ir.GetCbuf(index, offset, 8, true)}); + break; + case Size::U16: + X(ldc.dest_reg, IR::U32{ir.GetCbuf(index, offset, 16, false)}); + break; + case Size::S16: + X(ldc.dest_reg, IR::U32{ir.GetCbuf(index, offset, 16, true)}); + break; + case Size::B32: + X(ldc.dest_reg, IR::U32{ir.GetCbuf(index, offset, 32, false)}); + break; + case Size::B64: { + if (!IR::IsAligned(ldc.dest_reg, 2)) { + throw NotImplementedException("Unaligned destination register"); + } + const IR::Value vector{ir.GetCbuf(index, offset, 64, false)}; + for (int i = 0; i < 2; ++i) { + X(ldc.dest_reg + i, IR::U32{ir.CompositeExtract(vector, static_cast(i))}); + } + break; + } + default: + throw NotImplementedException("Invalid size {}", ldc.size.Value()); + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/load_constant.h b/src/shader_recompiler/frontend/maxwell/translate/impl/load_constant.h new file mode 100755 index 000000000..3074ea0e3 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/load_constant.h @@ -0,0 +1,39 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/reg.h" + +namespace Shader::Maxwell::LDC { + +enum class Mode : u64 { + Default, + IL, + IS, + ISL, +}; + +enum class Size : u64 { + U8, + S8, + U16, + S16, + B32, + B64, +}; + +union Encoding { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<20, 16, s64> offset; + BitField<36, 5, u64> index; + BitField<44, 2, Mode> mode; + BitField<48, 3, Size> size; +}; + +} // namespace Shader::Maxwell::LDC diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/load_effective_address.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/load_effective_address.cpp new file mode 100755 index 000000000..4a0f04e47 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/load_effective_address.cpp @@ -0,0 +1,108 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void LEA_hi(TranslatorVisitor& v, u64 insn, const IR::U32& base, IR::U32 offset_hi, u64 scale, + bool neg, bool x) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> offset_lo_reg; + BitField<47, 1, u64> cc; + BitField<48, 3, IR::Pred> pred; + } const lea{insn}; + + if (x) { + throw NotImplementedException("LEA.HI X"); + } + if (lea.pred != IR::Pred::PT) { + throw NotImplementedException("LEA.HI Pred"); + } + if (lea.cc != 0) { + throw NotImplementedException("LEA.HI CC"); + } + + const IR::U32 offset_lo{v.X(lea.offset_lo_reg)}; + const IR::U64 packed_offset{v.ir.PackUint2x32(v.ir.CompositeConstruct(offset_lo, offset_hi))}; + const IR::U64 offset{neg ? IR::U64{v.ir.INeg(packed_offset)} : packed_offset}; + + const s32 hi_scale{32 - static_cast(scale)}; + const IR::U64 scaled_offset{v.ir.ShiftRightLogical(offset, v.ir.Imm32(hi_scale))}; + const IR::U32 scaled_offset_w0{v.ir.CompositeExtract(v.ir.UnpackUint2x32(scaled_offset), 0)}; + + IR::U32 result{v.ir.IAdd(base, scaled_offset_w0)}; + v.X(lea.dest_reg, result); +} + +void LEA_lo(TranslatorVisitor& v, u64 insn, const IR::U32& base) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> offset_lo_reg; + BitField<39, 5, u64> scale; + BitField<45, 1, u64> neg; + BitField<46, 1, u64> x; + BitField<47, 1, u64> cc; + BitField<48, 3, IR::Pred> pred; + } const lea{insn}; + if (lea.x != 0) { + throw NotImplementedException("LEA.LO X"); + } + if (lea.pred != IR::Pred::PT) { + throw NotImplementedException("LEA.LO Pred"); + } + if (lea.cc != 0) { + throw NotImplementedException("LEA.LO CC"); + } + + const IR::U32 offset_lo{v.X(lea.offset_lo_reg)}; + const s32 scale{static_cast(lea.scale)}; + const IR::U32 offset{lea.neg != 0 ? IR::U32{v.ir.INeg(offset_lo)} : offset_lo}; + const IR::U32 scaled_offset{v.ir.ShiftLeftLogical(offset, v.ir.Imm32(scale))}; + + IR::U32 result{v.ir.IAdd(base, scaled_offset)}; + v.X(lea.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::LEA_hi_reg(u64 insn) { + union { + u64 insn; + BitField<28, 5, u64> scale; + BitField<37, 1, u64> neg; + BitField<38, 1, u64> x; + } const lea{insn}; + + LEA_hi(*this, insn, GetReg20(insn), GetReg39(insn), lea.scale, lea.neg != 0, lea.x != 0); +} + +void TranslatorVisitor::LEA_hi_cbuf(u64 insn) { + union { + u64 insn; + BitField<51, 5, u64> scale; + BitField<56, 1, u64> neg; + BitField<57, 1, u64> x; + } const lea{insn}; + + LEA_hi(*this, insn, GetCbuf(insn), GetReg39(insn), lea.scale, lea.neg != 0, lea.x != 0); +} + +void TranslatorVisitor::LEA_lo_reg(u64 insn) { + LEA_lo(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::LEA_lo_cbuf(u64 insn) { + LEA_lo(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::LEA_lo_imm(u64 insn) { + LEA_lo(*this, insn, GetImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/load_store_attribute.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/load_store_attribute.cpp new file mode 100755 index 000000000..7d7dcc3cb --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/load_store_attribute.cpp @@ -0,0 +1,196 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Size : u64 { + B32, + B64, + B96, + B128, +}; + +enum class InterpolationMode : u64 { + Pass, + Multiply, + Constant, + Sc, +}; + +enum class SampleMode : u64 { + Default, + Centroid, + Offset, +}; + +u32 NumElements(Size size) { + switch (size) { + case Size::B32: + return 1; + case Size::B64: + return 2; + case Size::B96: + return 3; + case Size::B128: + return 4; + } + throw InvalidArgument("Invalid size {}", size); +} + +template +void HandleIndexed(TranslatorVisitor& v, IR::Reg index_reg, u32 num_elements, F&& f) { + const IR::U32 index_value{v.X(index_reg)}; + for (u32 element = 0; element < num_elements; ++element) { + const IR::U32 final_offset{ + element == 0 ? index_value : IR::U32{v.ir.IAdd(index_value, v.ir.Imm32(element * 4U))}}; + f(element, final_offset); + } +} + +} // Anonymous namespace + +void TranslatorVisitor::ALD(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> index_reg; + BitField<20, 10, u64> absolute_offset; + BitField<20, 11, s64> relative_offset; + BitField<39, 8, IR::Reg> vertex_reg; + BitField<32, 1, u64> o; + BitField<31, 1, u64> patch; + BitField<47, 2, Size> size; + } const ald{insn}; + + const u64 offset{ald.absolute_offset.Value()}; + if (offset % 4 != 0) { + throw NotImplementedException("Unaligned absolute offset {}", offset); + } + const IR::U32 vertex{X(ald.vertex_reg)}; + const u32 num_elements{NumElements(ald.size)}; + if (ald.index_reg == IR::Reg::RZ) { + for (u32 element = 0; element < num_elements; ++element) { + if (ald.patch != 0) { + const IR::Patch patch{offset / 4 + element}; + F(ald.dest_reg + element, ir.GetPatch(patch)); + } else { + const IR::Attribute attr{offset / 4 + element}; + F(ald.dest_reg + element, ir.GetAttribute(attr, vertex)); + } + } + return; + } + if (ald.patch != 0) { + throw NotImplementedException("Indirect patch read"); + } + HandleIndexed(*this, ald.index_reg, num_elements, [&](u32 element, IR::U32 final_offset) { + F(ald.dest_reg + element, ir.GetAttributeIndexed(final_offset, vertex)); + }); +} + +void TranslatorVisitor::AST(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> src_reg; + BitField<8, 8, IR::Reg> index_reg; + BitField<20, 10, u64> absolute_offset; + BitField<20, 11, s64> relative_offset; + BitField<31, 1, u64> patch; + BitField<39, 8, IR::Reg> vertex_reg; + BitField<47, 2, Size> size; + } const ast{insn}; + + if (ast.index_reg != IR::Reg::RZ) { + throw NotImplementedException("Indexed store"); + } + const u64 offset{ast.absolute_offset.Value()}; + if (offset % 4 != 0) { + throw NotImplementedException("Unaligned absolute offset {}", offset); + } + const IR::U32 vertex{X(ast.vertex_reg)}; + const u32 num_elements{NumElements(ast.size)}; + if (ast.index_reg == IR::Reg::RZ) { + for (u32 element = 0; element < num_elements; ++element) { + if (ast.patch != 0) { + const IR::Patch patch{offset / 4 + element}; + ir.SetPatch(patch, F(ast.src_reg + element)); + } else { + const IR::Attribute attr{offset / 4 + element}; + ir.SetAttribute(attr, F(ast.src_reg + element), vertex); + } + } + return; + } + if (ast.patch != 0) { + throw NotImplementedException("Indexed tessellation patch store"); + } + HandleIndexed(*this, ast.index_reg, num_elements, [&](u32 element, IR::U32 final_offset) { + ir.SetAttributeIndexed(final_offset, F(ast.src_reg + element), vertex); + }); +} + +void TranslatorVisitor::IPA(u64 insn) { + // IPA is the instruction used to read varyings from a fragment shader. + // gl_FragCoord is mapped to the gl_Position attribute. + // It yields unknown results when used outside of the fragment shader stage. + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> index_reg; + BitField<20, 8, IR::Reg> multiplier; + BitField<30, 8, IR::Attribute> attribute; + BitField<38, 1, u64> idx; + BitField<51, 1, u64> sat; + BitField<52, 2, SampleMode> sample_mode; + BitField<54, 2, InterpolationMode> interpolation_mode; + } const ipa{insn}; + + // Indexed IPAs are used for indexed varyings. + // For example: + // + // in vec4 colors[4]; + // uniform int idx; + // void main() { + // gl_FragColor = colors[idx]; + // } + const bool is_indexed{ipa.idx != 0 && ipa.index_reg != IR::Reg::RZ}; + const IR::Attribute attribute{ipa.attribute}; + IR::F32 value{is_indexed ? ir.GetAttributeIndexed(X(ipa.index_reg)) + : ir.GetAttribute(attribute)}; + if (IR::IsGeneric(attribute)) { + const ProgramHeader& sph{env.SPH()}; + const u32 attr_index{IR::GenericAttributeIndex(attribute)}; + const u32 element{static_cast(attribute) % 4}; + const std::array input_map{sph.ps.GenericInputMap(attr_index)}; + const bool is_perspective{input_map[element] == Shader::PixelImap::Perspective}; + if (is_perspective) { + const IR::F32 position_w{ir.GetAttribute(IR::Attribute::PositionW)}; + value = ir.FPMul(value, position_w); + } + } + if (ipa.interpolation_mode == InterpolationMode::Multiply) { + value = ir.FPMul(value, F(ipa.multiplier)); + } + + // Saturated IPAs are generally generated out of clamped varyings. + // For example: clamp(some_varying, 0.0, 1.0) + const bool is_saturated{ipa.sat != 0}; + if (is_saturated) { + if (attribute == IR::Attribute::FrontFace) { + throw NotImplementedException("IPA.SAT on FrontFace"); + } + value = ir.FPSaturate(value); + } + + F(ipa.dest_reg, value); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/load_store_local_shared.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/load_store_local_shared.cpp new file mode 100755 index 000000000..20df163f2 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/load_store_local_shared.cpp @@ -0,0 +1,201 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Size : u64 { + U8, + S8, + U16, + S16, + B32, + B64, + B128, +}; + +IR::U32 Offset(TranslatorVisitor& v, u64 insn) { + union { + u64 raw; + BitField<8, 8, IR::Reg> offset_reg; + BitField<20, 24, u64> absolute_offset; + BitField<20, 24, s64> relative_offset; + } const encoding{insn}; + + if (encoding.offset_reg == IR::Reg::RZ) { + return v.ir.Imm32(static_cast(encoding.absolute_offset)); + } else { + const s32 relative{static_cast(encoding.relative_offset.Value())}; + return v.ir.IAdd(v.X(encoding.offset_reg), v.ir.Imm32(relative)); + } +} + +std::pair WordOffset(TranslatorVisitor& v, u64 insn) { + const IR::U32 offset{Offset(v, insn)}; + if (offset.IsImmediate()) { + return {v.ir.Imm32(offset.U32() / 4), offset}; + } else { + return {v.ir.ShiftRightArithmetic(offset, v.ir.Imm32(2)), offset}; + } +} + +std::pair GetSize(u64 insn) { + union { + u64 raw; + BitField<48, 3, Size> size; + } const encoding{insn}; + + switch (encoding.size) { + case Size::U8: + return {8, false}; + case Size::S8: + return {8, true}; + case Size::U16: + return {16, false}; + case Size::S16: + return {16, true}; + case Size::B32: + return {32, false}; + case Size::B64: + return {64, false}; + case Size::B128: + return {128, false}; + default: + throw NotImplementedException("Invalid size {}", encoding.size.Value()); + } +} + +IR::Reg Reg(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> reg; + } const encoding{insn}; + + return encoding.reg; +} + +IR::U32 ByteOffset(IR::IREmitter& ir, const IR::U32& offset) { + return ir.BitwiseAnd(ir.ShiftLeftLogical(offset, ir.Imm32(3)), ir.Imm32(24)); +} + +IR::U32 ShortOffset(IR::IREmitter& ir, const IR::U32& offset) { + return ir.BitwiseAnd(ir.ShiftLeftLogical(offset, ir.Imm32(3)), ir.Imm32(16)); +} +} // Anonymous namespace + +void TranslatorVisitor::LDL(u64 insn) { + const auto [word_offset, offset]{WordOffset(*this, insn)}; + const IR::Reg dest{Reg(insn)}; + const auto [bit_size, is_signed]{GetSize(insn)}; + switch (bit_size) { + case 8: { + const IR::U32 bit{ByteOffset(ir, offset)}; + X(dest, ir.BitFieldExtract(ir.LoadLocal(word_offset), bit, ir.Imm32(8), is_signed)); + break; + } + case 16: { + const IR::U32 bit{ShortOffset(ir, offset)}; + X(dest, ir.BitFieldExtract(ir.LoadLocal(word_offset), bit, ir.Imm32(16), is_signed)); + break; + } + case 32: + case 64: + case 128: + if (!IR::IsAligned(dest, static_cast(bit_size / 32))) { + throw NotImplementedException("Unaligned destination register {}", dest); + } + X(dest, ir.LoadLocal(word_offset)); + for (int i = 1; i < bit_size / 32; ++i) { + X(dest + i, ir.LoadLocal(ir.IAdd(word_offset, ir.Imm32(i)))); + } + break; + } +} + +void TranslatorVisitor::LDS(u64 insn) { + const IR::U32 offset{Offset(*this, insn)}; + const IR::Reg dest{Reg(insn)}; + const auto [bit_size, is_signed]{GetSize(insn)}; + const IR::Value value{ir.LoadShared(bit_size, is_signed, offset)}; + switch (bit_size) { + case 8: + case 16: + case 32: + X(dest, IR::U32{value}); + break; + case 64: + case 128: + if (!IR::IsAligned(dest, static_cast(bit_size / 32))) { + throw NotImplementedException("Unaligned destination register {}", dest); + } + for (int element = 0; element < bit_size / 32; ++element) { + X(dest + element, IR::U32{ir.CompositeExtract(value, static_cast(element))}); + } + break; + } +} + +void TranslatorVisitor::STL(u64 insn) { + const auto [word_offset, offset]{WordOffset(*this, insn)}; + const IR::Reg reg{Reg(insn)}; + const IR::U32 src{X(reg)}; + const int bit_size{GetSize(insn).first}; + switch (bit_size) { + case 8: { + const IR::U32 bit{ByteOffset(ir, offset)}; + const IR::U32 value{ir.BitFieldInsert(ir.LoadLocal(word_offset), src, bit, ir.Imm32(8))}; + ir.WriteLocal(word_offset, value); + break; + } + case 16: { + const IR::U32 bit{ShortOffset(ir, offset)}; + const IR::U32 value{ir.BitFieldInsert(ir.LoadLocal(word_offset), src, bit, ir.Imm32(16))}; + ir.WriteLocal(word_offset, value); + break; + } + case 32: + case 64: + case 128: + if (!IR::IsAligned(reg, static_cast(bit_size / 32))) { + throw NotImplementedException("Unaligned source register"); + } + ir.WriteLocal(word_offset, src); + for (int i = 1; i < bit_size / 32; ++i) { + ir.WriteLocal(ir.IAdd(word_offset, ir.Imm32(i)), X(reg + i)); + } + break; + } +} + +void TranslatorVisitor::STS(u64 insn) { + const IR::U32 offset{Offset(*this, insn)}; + const IR::Reg reg{Reg(insn)}; + const int bit_size{GetSize(insn).first}; + switch (bit_size) { + case 8: + case 16: + case 32: + ir.WriteShared(bit_size, offset, X(reg)); + break; + case 64: + if (!IR::IsAligned(reg, 2)) { + throw NotImplementedException("Unaligned source register {}", reg); + } + ir.WriteShared(64, offset, ir.CompositeConstruct(X(reg), X(reg + 1))); + break; + case 128: { + if (!IR::IsAligned(reg, 2)) { + throw NotImplementedException("Unaligned source register {}", reg); + } + const IR::Value vector{ir.CompositeConstruct(X(reg), X(reg + 1), X(reg + 2), X(reg + 3))}; + ir.WriteShared(128, offset, vector); + break; + } + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/load_store_memory.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/load_store_memory.cpp new file mode 100755 index 000000000..36c5cff2f --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/load_store_memory.cpp @@ -0,0 +1,184 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class LoadSize : u64 { + U8, // Zero-extend + S8, // Sign-extend + U16, // Zero-extend + S16, // Sign-extend + B32, + B64, + B128, + U128, // ??? +}; + +enum class StoreSize : u64 { + U8, // Zero-extend + S8, // Sign-extend + U16, // Zero-extend + S16, // Sign-extend + B32, + B64, + B128, +}; + +// See Table 27 in https://docs.nvidia.com/cuda/parallel-thread-execution/index.html +enum class LoadCache : u64 { + CA, // Cache at all levels, likely to be accessed again + CG, // Cache at global level (cache in L2 and below, not L1) + CI, // ??? + CV, // Don't cache and fetch again (consider cached system memory lines stale, fetch again) +}; + +// See Table 28 in https://docs.nvidia.com/cuda/parallel-thread-execution/index.html +enum class StoreCache : u64 { + WB, // Cache write-back all coherent levels + CG, // Cache at global level + CS, // Cache streaming, likely to be accessed once + WT, // Cache write-through (to system memory) +}; + +IR::U64 Address(TranslatorVisitor& v, u64 insn) { + union { + u64 raw; + BitField<8, 8, IR::Reg> addr_reg; + BitField<20, 24, s64> addr_offset; + BitField<20, 24, u64> rz_addr_offset; + BitField<45, 1, u64> e; + } const mem{insn}; + + const IR::U64 address{[&]() -> IR::U64 { + if (mem.e == 0) { + // LDG/STG without .E uses a 32-bit pointer, zero-extend it + return v.ir.UConvert(64, v.X(mem.addr_reg)); + } + if (!IR::IsAligned(mem.addr_reg, 2)) { + throw NotImplementedException("Unaligned address register"); + } + // Pack two registers to build the 64-bit address + return v.ir.PackUint2x32(v.ir.CompositeConstruct(v.X(mem.addr_reg), v.X(mem.addr_reg + 1))); + }()}; + const u64 addr_offset{[&]() -> u64 { + if (mem.addr_reg == IR::Reg::RZ) { + // When RZ is used, the address is an absolute address + return static_cast(mem.rz_addr_offset.Value()); + } else { + return static_cast(mem.addr_offset.Value()); + } + }()}; + // Apply the offset + return v.ir.IAdd(address, v.ir.Imm64(addr_offset)); +} +} // Anonymous namespace + +void TranslatorVisitor::LDG(u64 insn) { + // LDG loads global memory into registers + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<46, 2, LoadCache> cache; + BitField<48, 3, LoadSize> size; + } const ldg{insn}; + + // Pointer to load data from + const IR::U64 address{Address(*this, insn)}; + const IR::Reg dest_reg{ldg.dest_reg}; + switch (ldg.size) { + case LoadSize::U8: + X(dest_reg, ir.LoadGlobalU8(address)); + break; + case LoadSize::S8: + X(dest_reg, ir.LoadGlobalS8(address)); + break; + case LoadSize::U16: + X(dest_reg, ir.LoadGlobalU16(address)); + break; + case LoadSize::S16: + X(dest_reg, ir.LoadGlobalS16(address)); + break; + case LoadSize::B32: + X(dest_reg, ir.LoadGlobal32(address)); + break; + case LoadSize::B64: { + if (!IR::IsAligned(dest_reg, 2)) { + throw NotImplementedException("Unaligned data registers"); + } + const IR::Value vector{ir.LoadGlobal64(address)}; + for (int i = 0; i < 2; ++i) { + X(dest_reg + i, IR::U32{ir.CompositeExtract(vector, static_cast(i))}); + } + break; + } + case LoadSize::B128: + case LoadSize::U128: { + if (!IR::IsAligned(dest_reg, 4)) { + throw NotImplementedException("Unaligned data registers"); + } + const IR::Value vector{ir.LoadGlobal128(address)}; + for (int i = 0; i < 4; ++i) { + X(dest_reg + i, IR::U32{ir.CompositeExtract(vector, static_cast(i))}); + } + break; + } + default: + throw NotImplementedException("Invalid LDG size {}", ldg.size.Value()); + } +} + +void TranslatorVisitor::STG(u64 insn) { + // STG stores registers into global memory. + union { + u64 raw; + BitField<0, 8, IR::Reg> data_reg; + BitField<46, 2, StoreCache> cache; + BitField<48, 3, StoreSize> size; + } const stg{insn}; + + // Pointer to store data into + const IR::U64 address{Address(*this, insn)}; + const IR::Reg data_reg{stg.data_reg}; + switch (stg.size) { + case StoreSize::U8: + ir.WriteGlobalU8(address, X(data_reg)); + break; + case StoreSize::S8: + ir.WriteGlobalS8(address, X(data_reg)); + break; + case StoreSize::U16: + ir.WriteGlobalU16(address, X(data_reg)); + break; + case StoreSize::S16: + ir.WriteGlobalS16(address, X(data_reg)); + break; + case StoreSize::B32: + ir.WriteGlobal32(address, X(data_reg)); + break; + case StoreSize::B64: { + if (!IR::IsAligned(data_reg, 2)) { + throw NotImplementedException("Unaligned data registers"); + } + const IR::Value vector{ir.CompositeConstruct(X(data_reg), X(data_reg + 1))}; + ir.WriteGlobal64(address, vector); + break; + } + case StoreSize::B128: + if (!IR::IsAligned(data_reg, 4)) { + throw NotImplementedException("Unaligned data registers"); + } + const IR::Value vector{ + ir.CompositeConstruct(X(data_reg), X(data_reg + 1), X(data_reg + 2), X(data_reg + 3))}; + ir.WriteGlobal128(address, vector); + break; + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/logic_operation.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/logic_operation.cpp new file mode 100755 index 000000000..92cd27ed4 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/logic_operation.cpp @@ -0,0 +1,116 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class LogicalOp : u64 { + AND, + OR, + XOR, + PASS_B, +}; + +[[nodiscard]] IR::U32 LogicalOperation(IR::IREmitter& ir, const IR::U32& operand_1, + const IR::U32& operand_2, LogicalOp op) { + switch (op) { + case LogicalOp::AND: + return ir.BitwiseAnd(operand_1, operand_2); + case LogicalOp::OR: + return ir.BitwiseOr(operand_1, operand_2); + case LogicalOp::XOR: + return ir.BitwiseXor(operand_1, operand_2); + case LogicalOp::PASS_B: + return operand_2; + default: + throw NotImplementedException("Invalid Logical operation {}", op); + } +} + +void LOP(TranslatorVisitor& v, u64 insn, IR::U32 op_b, bool x, bool cc, bool inv_a, bool inv_b, + LogicalOp bit_op, std::optional pred_op = std::nullopt, + IR::Pred dest_pred = IR::Pred::PT) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + } const lop{insn}; + + if (x) { + throw NotImplementedException("X"); + } + IR::U32 op_a{v.X(lop.src_reg)}; + if (inv_a != 0) { + op_a = v.ir.BitwiseNot(op_a); + } + if (inv_b != 0) { + op_b = v.ir.BitwiseNot(op_b); + } + + const IR::U32 result{LogicalOperation(v.ir, op_a, op_b, bit_op)}; + if (pred_op) { + const IR::U1 pred_result{PredicateOperation(v.ir, result, *pred_op)}; + v.ir.SetPred(dest_pred, pred_result); + } + if (cc) { + if (bit_op == LogicalOp::PASS_B) { + v.SetZFlag(v.ir.IEqual(result, v.ir.Imm32(0))); + v.SetSFlag(v.ir.ILessThan(result, v.ir.Imm32(0), true)); + } else { + v.SetZFlag(v.ir.GetZeroFromOp(result)); + v.SetSFlag(v.ir.GetSignFromOp(result)); + } + v.ResetCFlag(); + v.ResetOFlag(); + } + v.X(lop.dest_reg, result); +} + +void LOP(TranslatorVisitor& v, u64 insn, const IR::U32& op_b) { + union { + u64 insn; + BitField<39, 1, u64> inv_a; + BitField<40, 1, u64> inv_b; + BitField<41, 2, LogicalOp> bit_op; + BitField<43, 1, u64> x; + BitField<44, 2, PredicateOp> pred_op; + BitField<47, 1, u64> cc; + BitField<48, 3, IR::Pred> dest_pred; + } const lop{insn}; + + LOP(v, insn, op_b, lop.x != 0, lop.cc != 0, lop.inv_a != 0, lop.inv_b != 0, lop.bit_op, + lop.pred_op, lop.dest_pred); +} +} // Anonymous namespace + +void TranslatorVisitor::LOP_reg(u64 insn) { + LOP(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::LOP_cbuf(u64 insn) { + LOP(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::LOP_imm(u64 insn) { + LOP(*this, insn, GetImm20(insn)); +} + +void TranslatorVisitor::LOP32I(u64 insn) { + union { + u64 raw; + BitField<53, 2, LogicalOp> bit_op; + BitField<57, 1, u64> x; + BitField<52, 1, u64> cc; + BitField<55, 1, u64> inv_a; + BitField<56, 1, u64> inv_b; + } const lop32i{insn}; + + LOP(*this, insn, GetImm32(insn), lop32i.x != 0, lop32i.cc != 0, lop32i.inv_a != 0, + lop32i.inv_b != 0, lop32i.bit_op); +} +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/logic_operation_three_input.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/logic_operation_three_input.cpp new file mode 100755 index 000000000..e0fe47912 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/logic_operation_three_input.cpp @@ -0,0 +1,122 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +// https://forums.developer.nvidia.com/t/reverse-lut-for-lop3-lut/110651 +// Emulate GPU's LOP3.LUT (three-input logic op with 8-bit truth table) +IR::U32 ApplyLUT(IR::IREmitter& ir, const IR::U32& a, const IR::U32& b, const IR::U32& c, + u64 ttbl) { + IR::U32 r{ir.Imm32(0)}; + const IR::U32 not_a{ir.BitwiseNot(a)}; + const IR::U32 not_b{ir.BitwiseNot(b)}; + const IR::U32 not_c{ir.BitwiseNot(c)}; + if (ttbl & 0x01) { + // r |= ~a & ~b & ~c; + const auto lhs{ir.BitwiseAnd(not_a, not_b)}; + const auto rhs{ir.BitwiseAnd(lhs, not_c)}; + r = ir.BitwiseOr(r, rhs); + } + if (ttbl & 0x02) { + // r |= ~a & ~b & c; + const auto lhs{ir.BitwiseAnd(not_a, not_b)}; + const auto rhs{ir.BitwiseAnd(lhs, c)}; + r = ir.BitwiseOr(r, rhs); + } + if (ttbl & 0x04) { + // r |= ~a & b & ~c; + const auto lhs{ir.BitwiseAnd(not_a, b)}; + const auto rhs{ir.BitwiseAnd(lhs, not_c)}; + r = ir.BitwiseOr(r, rhs); + } + if (ttbl & 0x08) { + // r |= ~a & b & c; + const auto lhs{ir.BitwiseAnd(not_a, b)}; + const auto rhs{ir.BitwiseAnd(lhs, c)}; + r = ir.BitwiseOr(r, rhs); + } + if (ttbl & 0x10) { + // r |= a & ~b & ~c; + const auto lhs{ir.BitwiseAnd(a, not_b)}; + const auto rhs{ir.BitwiseAnd(lhs, not_c)}; + r = ir.BitwiseOr(r, rhs); + } + if (ttbl & 0x20) { + // r |= a & ~b & c; + const auto lhs{ir.BitwiseAnd(a, not_b)}; + const auto rhs{ir.BitwiseAnd(lhs, c)}; + r = ir.BitwiseOr(r, rhs); + } + if (ttbl & 0x40) { + // r |= a & b & ~c; + const auto lhs{ir.BitwiseAnd(a, b)}; + const auto rhs{ir.BitwiseAnd(lhs, not_c)}; + r = ir.BitwiseOr(r, rhs); + } + if (ttbl & 0x80) { + // r |= a & b & c; + const auto lhs{ir.BitwiseAnd(a, b)}; + const auto rhs{ir.BitwiseAnd(lhs, c)}; + r = ir.BitwiseOr(r, rhs); + } + return r; +} + +IR::U32 LOP3(TranslatorVisitor& v, u64 insn, const IR::U32& op_b, const IR::U32& op_c, u64 lut) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<47, 1, u64> cc; + } const lop3{insn}; + + if (lop3.cc != 0) { + throw NotImplementedException("LOP3 CC"); + } + + const IR::U32 op_a{v.X(lop3.src_reg)}; + const IR::U32 result{ApplyLUT(v.ir, op_a, op_b, op_c, lut)}; + v.X(lop3.dest_reg, result); + return result; +} + +u64 GetLut48(u64 insn) { + union { + u64 raw; + BitField<48, 8, u64> lut; + } const lut{insn}; + return lut.lut; +} +} // Anonymous namespace + +void TranslatorVisitor::LOP3_reg(u64 insn) { + union { + u64 insn; + BitField<28, 8, u64> lut; + BitField<38, 1, u64> x; + BitField<36, 2, PredicateOp> pred_op; + BitField<48, 3, IR::Pred> pred; + } const lop3{insn}; + + if (lop3.x != 0) { + throw NotImplementedException("LOP3 X"); + } + const IR::U32 result{LOP3(*this, insn, GetReg20(insn), GetReg39(insn), lop3.lut)}; + const IR::U1 pred_result{PredicateOperation(ir, result, lop3.pred_op)}; + ir.SetPred(lop3.pred, pred_result); +} + +void TranslatorVisitor::LOP3_cbuf(u64 insn) { + LOP3(*this, insn, GetCbuf(insn), GetReg39(insn), GetLut48(insn)); +} + +void TranslatorVisitor::LOP3_imm(u64 insn) { + LOP3(*this, insn, GetImm20(insn), GetReg39(insn), GetLut48(insn)); +} +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/move_predicate_to_register.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/move_predicate_to_register.cpp new file mode 100755 index 000000000..4324fd443 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/move_predicate_to_register.cpp @@ -0,0 +1,66 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Mode : u64 { + PR, + CC, +}; +} // Anonymous namespace + +void TranslatorVisitor::P2R_reg(u64) { + throw NotImplementedException("P2R (reg)"); +} + +void TranslatorVisitor::P2R_cbuf(u64) { + throw NotImplementedException("P2R (cbuf)"); +} + +void TranslatorVisitor::P2R_imm(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src; + BitField<40, 1, Mode> mode; + BitField<41, 2, u64> byte_selector; + } const p2r{insn}; + + const u32 mask{GetImm20(insn).U32()}; + const bool pr_mode{p2r.mode == Mode::PR}; + const u32 num_items{pr_mode ? 7U : 4U}; + const u32 offset{static_cast(p2r.byte_selector) * 8}; + IR::U32 insert{ir.Imm32(0)}; + for (u32 index = 0; index < num_items; ++index) { + if (((mask >> index) & 1) == 0) { + continue; + } + const IR::U1 cond{[this, index, pr_mode] { + if (pr_mode) { + return ir.GetPred(IR::Pred{index}); + } + switch (index) { + case 0: + return ir.GetZFlag(); + case 1: + return ir.GetSFlag(); + case 2: + return ir.GetCFlag(); + case 3: + return ir.GetOFlag(); + } + throw LogicError("Unreachable P2R index"); + }()}; + const IR::U32 bit{ir.Select(cond, ir.Imm32(1U << (index + offset)), ir.Imm32(0))}; + insert = ir.BitwiseOr(insert, bit); + } + const IR::U32 masked_out{ir.BitwiseAnd(X(p2r.src), ir.Imm32(~(mask << offset)))}; + X(p2r.dest_reg, ir.BitwiseOr(masked_out, insert)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/move_register.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/move_register.cpp new file mode 100755 index 000000000..6bb08db8a --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/move_register.cpp @@ -0,0 +1,44 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void MOV(TranslatorVisitor& v, u64 insn, const IR::U32& src, bool is_mov32i = false) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<39, 4, u64> mask; + BitField<12, 4, u64> mov32i_mask; + } const mov{insn}; + + if ((is_mov32i ? mov.mov32i_mask : mov.mask) != 0xf) { + throw NotImplementedException("Non-full move mask"); + } + v.X(mov.dest_reg, src); +} +} // Anonymous namespace + +void TranslatorVisitor::MOV_reg(u64 insn) { + MOV(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::MOV_cbuf(u64 insn) { + MOV(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::MOV_imm(u64 insn) { + MOV(*this, insn, GetImm20(insn)); +} + +void TranslatorVisitor::MOV32I(u64 insn) { + MOV(*this, insn, GetImm32(insn), true); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/move_register_to_predicate.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/move_register_to_predicate.cpp new file mode 100755 index 000000000..eda5f177b --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/move_register_to_predicate.cpp @@ -0,0 +1,71 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Mode : u64 { + PR, + CC, +}; + +void SetFlag(IR::IREmitter& ir, const IR::U1& inv_mask_bit, const IR::U1& src_bit, u32 index) { + switch (index) { + case 0: + return ir.SetZFlag(IR::U1{ir.Select(inv_mask_bit, ir.GetZFlag(), src_bit)}); + case 1: + return ir.SetSFlag(IR::U1{ir.Select(inv_mask_bit, ir.GetSFlag(), src_bit)}); + case 2: + return ir.SetCFlag(IR::U1{ir.Select(inv_mask_bit, ir.GetCFlag(), src_bit)}); + case 3: + return ir.SetOFlag(IR::U1{ir.Select(inv_mask_bit, ir.GetOFlag(), src_bit)}); + default: + throw LogicError("Unreachable R2P index"); + } +} + +void R2P(TranslatorVisitor& v, u64 insn, const IR::U32& mask) { + union { + u64 raw; + BitField<8, 8, IR::Reg> src_reg; + BitField<40, 1, Mode> mode; + BitField<41, 2, u64> byte_selector; + } const r2p{insn}; + const IR::U32 src{v.X(r2p.src_reg)}; + const IR::U32 count{v.ir.Imm32(1)}; + const bool pr_mode{r2p.mode == Mode::PR}; + const u32 num_items{pr_mode ? 7U : 4U}; + const u32 offset_base{static_cast(r2p.byte_selector) * 8}; + for (u32 index = 0; index < num_items; ++index) { + const IR::U32 offset{v.ir.Imm32(offset_base + index)}; + const IR::U1 src_zero{v.ir.GetZeroFromOp(v.ir.BitFieldExtract(src, offset, count, false))}; + const IR::U1 src_bit{v.ir.LogicalNot(src_zero)}; + const IR::U32 mask_bfe{v.ir.BitFieldExtract(mask, v.ir.Imm32(index), count, false)}; + const IR::U1 inv_mask_bit{v.ir.GetZeroFromOp(mask_bfe)}; + if (pr_mode) { + const IR::Pred pred{index}; + v.ir.SetPred(pred, IR::U1{v.ir.Select(inv_mask_bit, v.ir.GetPred(pred), src_bit)}); + } else { + SetFlag(v.ir, inv_mask_bit, src_bit, index); + } + } +} +} // Anonymous namespace + +void TranslatorVisitor::R2P_reg(u64 insn) { + R2P(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::R2P_cbuf(u64 insn) { + R2P(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::R2P_imm(u64 insn) { + R2P(*this, insn, GetImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/move_special_register.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/move_special_register.cpp new file mode 100755 index 000000000..20cb2674e --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/move_special_register.cpp @@ -0,0 +1,181 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class SpecialRegister : u64 { + SR_LANEID = 0, + SR_CLOCK = 1, + SR_VIRTCFG = 2, + SR_VIRTID = 3, + SR_PM0 = 4, + SR_PM1 = 5, + SR_PM2 = 6, + SR_PM3 = 7, + SR_PM4 = 8, + SR_PM5 = 9, + SR_PM6 = 10, + SR_PM7 = 11, + SR12 = 12, + SR13 = 13, + SR14 = 14, + SR_ORDERING_TICKET = 15, + SR_PRIM_TYPE = 16, + SR_INVOCATION_ID = 17, + SR_Y_DIRECTION = 18, + SR_THREAD_KILL = 19, + SM_SHADER_TYPE = 20, + SR_DIRECTCBEWRITEADDRESSLOW = 21, + SR_DIRECTCBEWRITEADDRESSHIGH = 22, + SR_DIRECTCBEWRITEENABLE = 23, + SR_MACHINE_ID_0 = 24, + SR_MACHINE_ID_1 = 25, + SR_MACHINE_ID_2 = 26, + SR_MACHINE_ID_3 = 27, + SR_AFFINITY = 28, + SR_INVOCATION_INFO = 29, + SR_WSCALEFACTOR_XY = 30, + SR_WSCALEFACTOR_Z = 31, + SR_TID = 32, + SR_TID_X = 33, + SR_TID_Y = 34, + SR_TID_Z = 35, + SR_CTA_PARAM = 36, + SR_CTAID_X = 37, + SR_CTAID_Y = 38, + SR_CTAID_Z = 39, + SR_NTID = 40, + SR_CirQueueIncrMinusOne = 41, + SR_NLATC = 42, + SR43 = 43, + SR_SM_SPA_VERSION = 44, + SR_MULTIPASSSHADERINFO = 45, + SR_LWINHI = 46, + SR_SWINHI = 47, + SR_SWINLO = 48, + SR_SWINSZ = 49, + SR_SMEMSZ = 50, + SR_SMEMBANKS = 51, + SR_LWINLO = 52, + SR_LWINSZ = 53, + SR_LMEMLOSZ = 54, + SR_LMEMHIOFF = 55, + SR_EQMASK = 56, + SR_LTMASK = 57, + SR_LEMASK = 58, + SR_GTMASK = 59, + SR_GEMASK = 60, + SR_REGALLOC = 61, + SR_BARRIERALLOC = 62, + SR63 = 63, + SR_GLOBALERRORSTATUS = 64, + SR65 = 65, + SR_WARPERRORSTATUS = 66, + SR_WARPERRORSTATUSCLEAR = 67, + SR68 = 68, + SR69 = 69, + SR70 = 70, + SR71 = 71, + SR_PM_HI0 = 72, + SR_PM_HI1 = 73, + SR_PM_HI2 = 74, + SR_PM_HI3 = 75, + SR_PM_HI4 = 76, + SR_PM_HI5 = 77, + SR_PM_HI6 = 78, + SR_PM_HI7 = 79, + SR_CLOCKLO = 80, + SR_CLOCKHI = 81, + SR_GLOBALTIMERLO = 82, + SR_GLOBALTIMERHI = 83, + SR84 = 84, + SR85 = 85, + SR86 = 86, + SR87 = 87, + SR88 = 88, + SR89 = 89, + SR90 = 90, + SR91 = 91, + SR92 = 92, + SR93 = 93, + SR94 = 94, + SR95 = 95, + SR_HWTASKID = 96, + SR_CIRCULARQUEUEENTRYINDEX = 97, + SR_CIRCULARQUEUEENTRYADDRESSLOW = 98, + SR_CIRCULARQUEUEENTRYADDRESSHIGH = 99, +}; + +[[nodiscard]] IR::U32 Read(IR::IREmitter& ir, SpecialRegister special_register) { + switch (special_register) { + case SpecialRegister::SR_INVOCATION_ID: + return ir.InvocationId(); + case SpecialRegister::SR_THREAD_KILL: + return IR::U32{ir.Select(ir.IsHelperInvocation(), ir.Imm32(-1), ir.Imm32(0))}; + case SpecialRegister::SR_INVOCATION_INFO: + LOG_WARNING(Shader, "(STUBBED) SR_INVOCATION_INFO"); + return ir.Imm32(0x00ff'0000); + case SpecialRegister::SR_TID: { + const IR::Value tid{ir.LocalInvocationId()}; + return ir.BitFieldInsert(ir.BitFieldInsert(IR::U32{ir.CompositeExtract(tid, 0)}, + IR::U32{ir.CompositeExtract(tid, 1)}, + ir.Imm32(16), ir.Imm32(8)), + IR::U32{ir.CompositeExtract(tid, 2)}, ir.Imm32(26), ir.Imm32(6)); + } + case SpecialRegister::SR_TID_X: + return ir.LocalInvocationIdX(); + case SpecialRegister::SR_TID_Y: + return ir.LocalInvocationIdY(); + case SpecialRegister::SR_TID_Z: + return ir.LocalInvocationIdZ(); + case SpecialRegister::SR_CTAID_X: + return ir.WorkgroupIdX(); + case SpecialRegister::SR_CTAID_Y: + return ir.WorkgroupIdY(); + case SpecialRegister::SR_CTAID_Z: + return ir.WorkgroupIdZ(); + case SpecialRegister::SR_WSCALEFACTOR_XY: + LOG_WARNING(Shader, "(STUBBED) SR_WSCALEFACTOR_XY"); + return ir.Imm32(Common::BitCast(1.0f)); + case SpecialRegister::SR_WSCALEFACTOR_Z: + LOG_WARNING(Shader, "(STUBBED) SR_WSCALEFACTOR_Z"); + return ir.Imm32(Common::BitCast(1.0f)); + case SpecialRegister::SR_LANEID: + return ir.LaneId(); + case SpecialRegister::SR_EQMASK: + return ir.SubgroupEqMask(); + case SpecialRegister::SR_LTMASK: + return ir.SubgroupLtMask(); + case SpecialRegister::SR_LEMASK: + return ir.SubgroupLeMask(); + case SpecialRegister::SR_GTMASK: + return ir.SubgroupGtMask(); + case SpecialRegister::SR_GEMASK: + return ir.SubgroupGeMask(); + case SpecialRegister::SR_Y_DIRECTION: + return ir.BitCast(ir.YDirection()); + case SpecialRegister::SR_AFFINITY: + LOG_WARNING(Shader, "(STUBBED) SR_AFFINITY"); + return ir.Imm32(0); // This is the default value hardware returns. + default: + throw NotImplementedException("S2R special register {}", special_register); + } +} +} // Anonymous namespace + +void TranslatorVisitor::S2R(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<20, 8, SpecialRegister> src_reg; + } const s2r{insn}; + + X(s2r.dest_reg, Read(ir, s2r.src_reg)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/not_implemented.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/not_implemented.cpp new file mode 100755 index 000000000..7e26ab359 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/not_implemented.cpp @@ -0,0 +1,283 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/opcodes.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { + +[[noreturn]] static void ThrowNotImplemented(Opcode opcode) { + throw NotImplementedException("Instruction {} is not implemented", opcode); +} + +void TranslatorVisitor::ATOM_cas(u64) { + ThrowNotImplemented(Opcode::ATOM_cas); +} + +void TranslatorVisitor::ATOMS_cas(u64) { + ThrowNotImplemented(Opcode::ATOMS_cas); +} + +void TranslatorVisitor::B2R(u64) { + ThrowNotImplemented(Opcode::B2R); +} + +void TranslatorVisitor::BPT(u64) { + ThrowNotImplemented(Opcode::BPT); +} + +void TranslatorVisitor::BRA(u64) { + ThrowNotImplemented(Opcode::BRA); +} + +void TranslatorVisitor::BRK(u64) { + ThrowNotImplemented(Opcode::BRK); +} + +void TranslatorVisitor::CAL() { + // CAL is a no-op +} + +void TranslatorVisitor::CCTL(u64) { + ThrowNotImplemented(Opcode::CCTL); +} + +void TranslatorVisitor::CCTLL(u64) { + ThrowNotImplemented(Opcode::CCTLL); +} + +void TranslatorVisitor::CONT(u64) { + ThrowNotImplemented(Opcode::CONT); +} + +void TranslatorVisitor::CS2R(u64) { + ThrowNotImplemented(Opcode::CS2R); +} + +void TranslatorVisitor::FCHK_reg(u64) { + ThrowNotImplemented(Opcode::FCHK_reg); +} + +void TranslatorVisitor::FCHK_cbuf(u64) { + ThrowNotImplemented(Opcode::FCHK_cbuf); +} + +void TranslatorVisitor::FCHK_imm(u64) { + ThrowNotImplemented(Opcode::FCHK_imm); +} + +void TranslatorVisitor::GETCRSPTR(u64) { + ThrowNotImplemented(Opcode::GETCRSPTR); +} + +void TranslatorVisitor::GETLMEMBASE(u64) { + ThrowNotImplemented(Opcode::GETLMEMBASE); +} + +void TranslatorVisitor::IDE(u64) { + ThrowNotImplemented(Opcode::IDE); +} + +void TranslatorVisitor::IDP_reg(u64) { + ThrowNotImplemented(Opcode::IDP_reg); +} + +void TranslatorVisitor::IDP_imm(u64) { + ThrowNotImplemented(Opcode::IDP_imm); +} + +void TranslatorVisitor::IMAD_reg(u64) { + ThrowNotImplemented(Opcode::IMAD_reg); +} + +void TranslatorVisitor::IMAD_rc(u64) { + ThrowNotImplemented(Opcode::IMAD_rc); +} + +void TranslatorVisitor::IMAD_cr(u64) { + ThrowNotImplemented(Opcode::IMAD_cr); +} + +void TranslatorVisitor::IMAD_imm(u64) { + ThrowNotImplemented(Opcode::IMAD_imm); +} + +void TranslatorVisitor::IMAD32I(u64) { + ThrowNotImplemented(Opcode::IMAD32I); +} + +void TranslatorVisitor::IMADSP_reg(u64) { + ThrowNotImplemented(Opcode::IMADSP_reg); +} + +void TranslatorVisitor::IMADSP_rc(u64) { + ThrowNotImplemented(Opcode::IMADSP_rc); +} + +void TranslatorVisitor::IMADSP_cr(u64) { + ThrowNotImplemented(Opcode::IMADSP_cr); +} + +void TranslatorVisitor::IMADSP_imm(u64) { + ThrowNotImplemented(Opcode::IMADSP_imm); +} + +void TranslatorVisitor::IMUL_reg(u64) { + ThrowNotImplemented(Opcode::IMUL_reg); +} + +void TranslatorVisitor::IMUL_cbuf(u64) { + ThrowNotImplemented(Opcode::IMUL_cbuf); +} + +void TranslatorVisitor::IMUL_imm(u64) { + ThrowNotImplemented(Opcode::IMUL_imm); +} + +void TranslatorVisitor::IMUL32I(u64) { + ThrowNotImplemented(Opcode::IMUL32I); +} + +void TranslatorVisitor::JCAL(u64) { + ThrowNotImplemented(Opcode::JCAL); +} + +void TranslatorVisitor::JMP(u64) { + ThrowNotImplemented(Opcode::JMP); +} + +void TranslatorVisitor::KIL() { + // KIL is a no-op +} + +void TranslatorVisitor::LD(u64) { + ThrowNotImplemented(Opcode::LD); +} + +void TranslatorVisitor::LEPC(u64) { + ThrowNotImplemented(Opcode::LEPC); +} + +void TranslatorVisitor::LONGJMP(u64) { + ThrowNotImplemented(Opcode::LONGJMP); +} + +void TranslatorVisitor::NOP(u64) { + // NOP is No-Op. +} + +void TranslatorVisitor::PBK() { + // PBK is a no-op +} + +void TranslatorVisitor::PCNT() { + // PCNT is a no-op +} + +void TranslatorVisitor::PEXIT(u64) { + ThrowNotImplemented(Opcode::PEXIT); +} + +void TranslatorVisitor::PLONGJMP(u64) { + ThrowNotImplemented(Opcode::PLONGJMP); +} + +void TranslatorVisitor::PRET(u64) { + ThrowNotImplemented(Opcode::PRET); +} + +void TranslatorVisitor::PRMT_reg(u64) { + ThrowNotImplemented(Opcode::PRMT_reg); +} + +void TranslatorVisitor::PRMT_rc(u64) { + ThrowNotImplemented(Opcode::PRMT_rc); +} + +void TranslatorVisitor::PRMT_cr(u64) { + ThrowNotImplemented(Opcode::PRMT_cr); +} + +void TranslatorVisitor::PRMT_imm(u64) { + ThrowNotImplemented(Opcode::PRMT_imm); +} + +void TranslatorVisitor::R2B(u64) { + ThrowNotImplemented(Opcode::R2B); +} + +void TranslatorVisitor::RAM(u64) { + ThrowNotImplemented(Opcode::RAM); +} + +void TranslatorVisitor::RET(u64) { + ThrowNotImplemented(Opcode::RET); +} + +void TranslatorVisitor::RTT(u64) { + ThrowNotImplemented(Opcode::RTT); +} + +void TranslatorVisitor::SAM(u64) { + ThrowNotImplemented(Opcode::SAM); +} + +void TranslatorVisitor::SETCRSPTR(u64) { + ThrowNotImplemented(Opcode::SETCRSPTR); +} + +void TranslatorVisitor::SETLMEMBASE(u64) { + ThrowNotImplemented(Opcode::SETLMEMBASE); +} + +void TranslatorVisitor::SSY() { + // SSY is a no-op +} + +void TranslatorVisitor::ST(u64) { + ThrowNotImplemented(Opcode::ST); +} + +void TranslatorVisitor::STP(u64) { + ThrowNotImplemented(Opcode::STP); +} + +void TranslatorVisitor::SUATOM_cas(u64) { + ThrowNotImplemented(Opcode::SUATOM_cas); +} + +void TranslatorVisitor::SYNC(u64) { + ThrowNotImplemented(Opcode::SYNC); +} + +void TranslatorVisitor::TXA(u64) { + ThrowNotImplemented(Opcode::TXA); +} + +void TranslatorVisitor::VABSDIFF(u64) { + ThrowNotImplemented(Opcode::VABSDIFF); +} + +void TranslatorVisitor::VABSDIFF4(u64) { + ThrowNotImplemented(Opcode::VABSDIFF4); +} + +void TranslatorVisitor::VADD(u64) { + ThrowNotImplemented(Opcode::VADD); +} + +void TranslatorVisitor::VSET(u64) { + ThrowNotImplemented(Opcode::VSET); +} +void TranslatorVisitor::VSHL(u64) { + ThrowNotImplemented(Opcode::VSHL); +} + +void TranslatorVisitor::VSHR(u64) { + ThrowNotImplemented(Opcode::VSHR); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/output_geometry.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/output_geometry.cpp new file mode 100755 index 000000000..01cfad88d --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/output_geometry.cpp @@ -0,0 +1,45 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +void OUT(TranslatorVisitor& v, u64 insn, IR::U32 stream_index) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> output_reg; // Not needed on host + BitField<39, 1, u64> emit; + BitField<40, 1, u64> cut; + } const out{insn}; + + stream_index = v.ir.BitwiseAnd(stream_index, v.ir.Imm32(0b11)); + + if (out.emit != 0) { + v.ir.EmitVertex(stream_index); + } + if (out.cut != 0) { + v.ir.EndPrimitive(stream_index); + } + // Host doesn't need the output register, but we can write to it to avoid undefined reads + v.X(out.dest_reg, v.ir.Imm32(0)); +} +} // Anonymous namespace + +void TranslatorVisitor::OUT_reg(u64 insn) { + OUT(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::OUT_cbuf(u64 insn) { + OUT(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::OUT_imm(u64 insn) { + OUT(*this, insn, GetImm20(insn)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/pixel_load.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/pixel_load.cpp new file mode 100755 index 000000000..b4767afb5 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/pixel_load.cpp @@ -0,0 +1,46 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Mode : u64 { + Default, + CovMask, + Covered, + Offset, + CentroidOffset, + MyIndex, +}; +} // Anonymous namespace + +void TranslatorVisitor::PIXLD(u64 insn) { + union { + u64 raw; + BitField<31, 3, Mode> mode; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> addr_reg; + BitField<20, 8, s64> addr_offset; + BitField<45, 3, IR::Pred> dest_pred; + } const pixld{insn}; + + if (pixld.dest_pred != IR::Pred::PT) { + throw NotImplementedException("Destination predicate"); + } + if (pixld.addr_reg != IR::Reg::RZ || pixld.addr_offset != 0) { + throw NotImplementedException("Non-zero source register"); + } + switch (pixld.mode) { + case Mode::MyIndex: + X(pixld.dest_reg, ir.SampleId()); + break; + default: + throw NotImplementedException("Mode {}", pixld.mode.Value()); + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/predicate_set_predicate.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/predicate_set_predicate.cpp new file mode 100755 index 000000000..75d1fa8c1 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/predicate_set_predicate.cpp @@ -0,0 +1,38 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +void TranslatorVisitor::PSETP(u64 insn) { + union { + u64 raw; + BitField<0, 3, IR::Pred> dest_pred_b; + BitField<3, 3, IR::Pred> dest_pred_a; + BitField<12, 3, IR::Pred> pred_a; + BitField<15, 1, u64> neg_pred_a; + BitField<24, 2, BooleanOp> bop_1; + BitField<29, 3, IR::Pred> pred_b; + BitField<32, 1, u64> neg_pred_b; + BitField<39, 3, IR::Pred> pred_c; + BitField<42, 1, u64> neg_pred_c; + BitField<45, 2, BooleanOp> bop_2; + } const pset{insn}; + + const IR::U1 pred_a{ir.GetPred(pset.pred_a, pset.neg_pred_a != 0)}; + const IR::U1 pred_b{ir.GetPred(pset.pred_b, pset.neg_pred_b != 0)}; + const IR::U1 pred_c{ir.GetPred(pset.pred_c, pset.neg_pred_c != 0)}; + + const IR::U1 lhs_a{PredicateCombine(ir, pred_a, pred_b, pset.bop_1)}; + const IR::U1 lhs_b{PredicateCombine(ir, ir.LogicalNot(pred_a), pred_b, pset.bop_1)}; + const IR::U1 result_a{PredicateCombine(ir, lhs_a, pred_c, pset.bop_2)}; + const IR::U1 result_b{PredicateCombine(ir, lhs_b, pred_c, pset.bop_2)}; + + ir.SetPred(pset.dest_pred_a, result_a); + ir.SetPred(pset.dest_pred_b, result_b); +} +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/predicate_set_register.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/predicate_set_register.cpp new file mode 100755 index 000000000..b02789874 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/predicate_set_register.cpp @@ -0,0 +1,53 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +void TranslatorVisitor::PSET(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<12, 3, IR::Pred> pred_a; + BitField<15, 1, u64> neg_pred_a; + BitField<24, 2, BooleanOp> bop_1; + BitField<29, 3, IR::Pred> pred_b; + BitField<32, 1, u64> neg_pred_b; + BitField<39, 3, IR::Pred> pred_c; + BitField<42, 1, u64> neg_pred_c; + BitField<44, 1, u64> bf; + BitField<45, 2, BooleanOp> bop_2; + BitField<47, 1, u64> cc; + } const pset{insn}; + + const IR::U1 pred_a{ir.GetPred(pset.pred_a, pset.neg_pred_a != 0)}; + const IR::U1 pred_b{ir.GetPred(pset.pred_b, pset.neg_pred_b != 0)}; + const IR::U1 pred_c{ir.GetPred(pset.pred_c, pset.neg_pred_c != 0)}; + + const IR::U1 res_1{PredicateCombine(ir, pred_a, pred_b, pset.bop_1)}; + const IR::U1 res_2{PredicateCombine(ir, res_1, pred_c, pset.bop_2)}; + + const IR::U32 true_result{pset.bf != 0 ? ir.Imm32(0x3f800000) : ir.Imm32(-1)}; + const IR::U32 zero{ir.Imm32(0)}; + + const IR::U32 result{ir.Select(res_2, true_result, zero)}; + + X(pset.dest_reg, result); + if (pset.cc != 0) { + const IR::U1 is_zero{ir.IEqual(result, zero)}; + SetZFlag(is_zero); + if (pset.bf != 0) { + ResetSFlag(); + } else { + SetSFlag(ir.LogicalNot(is_zero)); + } + ResetOFlag(); + ResetCFlag(); + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/select_source_with_predicate.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/select_source_with_predicate.cpp new file mode 100755 index 000000000..93baa75a9 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/select_source_with_predicate.cpp @@ -0,0 +1,44 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { + +void SEL(TranslatorVisitor& v, u64 insn, const IR::U32& src) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<39, 3, IR::Pred> pred; + BitField<42, 1, u64> neg_pred; + } const sel{insn}; + + const IR::U1 pred = v.ir.GetPred(sel.pred); + IR::U32 op_a{v.X(sel.src_reg)}; + IR::U32 op_b{src}; + if (sel.neg_pred != 0) { + std::swap(op_a, op_b); + } + const IR::U32 result{v.ir.Select(pred, op_a, op_b)}; + + v.X(sel.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::SEL_reg(u64 insn) { + SEL(*this, insn, GetReg20(insn)); +} + +void TranslatorVisitor::SEL_cbuf(u64 insn) { + SEL(*this, insn, GetCbuf(insn)); +} + +void TranslatorVisitor::SEL_imm(u64 insn) { + SEL(*this, insn, GetImm20(insn)); +} +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/surface_atomic_operations.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/surface_atomic_operations.cpp new file mode 100755 index 000000000..44144f154 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/surface_atomic_operations.cpp @@ -0,0 +1,208 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Type : u64 { + _1D, + BUFFER_1D, + ARRAY_1D, + _2D, + ARRAY_2D, + _3D, +}; + +enum class Size : u64 { + U32, + S32, + U64, + S64, + F32FTZRN, + F16x2FTZRN, + SD32, + SD64, +}; + +enum class AtomicOp : u64 { + ADD, + MIN, + MAX, + INC, + DEC, + AND, + OR, + XOR, + EXCH, +}; + +enum class Clamp : u64 { + IGN, + Default, + TRAP, +}; + +TextureType GetType(Type type) { + switch (type) { + case Type::_1D: + return TextureType::Color1D; + case Type::BUFFER_1D: + return TextureType::Buffer; + case Type::ARRAY_1D: + return TextureType::ColorArray1D; + case Type::_2D: + return TextureType::Color2D; + case Type::ARRAY_2D: + return TextureType::ColorArray2D; + case Type::_3D: + return TextureType::Color3D; + } + throw NotImplementedException("Invalid type {}", type); +} + +IR::Value MakeCoords(TranslatorVisitor& v, IR::Reg reg, Type type) { + const auto array{[&](int index) { + return v.ir.BitFieldExtract(v.X(reg + index), v.ir.Imm32(0), v.ir.Imm32(16)); + }}; + switch (type) { + case Type::_1D: + case Type::BUFFER_1D: + return v.X(reg); + case Type::_2D: + return v.ir.CompositeConstruct(v.X(reg), v.X(reg + 1)); + case Type::_3D: + return v.ir.CompositeConstruct(v.X(reg), v.X(reg + 1), v.X(reg + 2)); + default: + break; + } + throw NotImplementedException("Invalid type {}", type); +} + +IR::Value ApplyAtomicOp(IR::IREmitter& ir, const IR::U32& handle, const IR::Value& coords, + const IR::Value& op_b, IR::TextureInstInfo info, AtomicOp op, + bool is_signed) { + switch (op) { + case AtomicOp::ADD: + return ir.ImageAtomicIAdd(handle, coords, op_b, info); + case AtomicOp::MIN: + return ir.ImageAtomicIMin(handle, coords, op_b, is_signed, info); + case AtomicOp::MAX: + return ir.ImageAtomicIMax(handle, coords, op_b, is_signed, info); + case AtomicOp::INC: + return ir.ImageAtomicInc(handle, coords, op_b, info); + case AtomicOp::DEC: + return ir.ImageAtomicDec(handle, coords, op_b, info); + case AtomicOp::AND: + return ir.ImageAtomicAnd(handle, coords, op_b, info); + case AtomicOp::OR: + return ir.ImageAtomicOr(handle, coords, op_b, info); + case AtomicOp::XOR: + return ir.ImageAtomicXor(handle, coords, op_b, info); + case AtomicOp::EXCH: + return ir.ImageAtomicExchange(handle, coords, op_b, info); + default: + throw NotImplementedException("Atomic Operation {}", op); + } +} + +ImageFormat Format(Size size) { + switch (size) { + case Size::U32: + case Size::S32: + case Size::SD32: + return ImageFormat::R32_UINT; + default: + break; + } + throw NotImplementedException("Invalid size {}", size); +} + +bool IsSizeInt32(Size size) { + switch (size) { + case Size::U32: + case Size::S32: + case Size::SD32: + return true; + default: + return false; + } +} + +void ImageAtomOp(TranslatorVisitor& v, IR::Reg dest_reg, IR::Reg operand_reg, IR::Reg coord_reg, + IR::Reg bindless_reg, AtomicOp op, Clamp clamp, Size size, Type type, + u64 bound_offset, bool is_bindless, bool write_result) { + if (clamp != Clamp::IGN) { + throw NotImplementedException("Clamp {}", clamp); + } + if (!IsSizeInt32(size)) { + throw NotImplementedException("Size {}", size); + } + const bool is_signed{size == Size::S32}; + const ImageFormat format{Format(size)}; + const TextureType tex_type{GetType(type)}; + const IR::Value coords{MakeCoords(v, coord_reg, type)}; + + const IR::U32 handle{is_bindless != 0 ? v.X(bindless_reg) + : v.ir.Imm32(static_cast(bound_offset * 4))}; + IR::TextureInstInfo info{}; + info.type.Assign(tex_type); + info.image_format.Assign(format); + + // TODO: float/64-bit operand + const IR::Value op_b{v.X(operand_reg)}; + const IR::Value color{ApplyAtomicOp(v.ir, handle, coords, op_b, info, op, is_signed)}; + + if (write_result) { + v.X(dest_reg, IR::U32{color}); + } +} +} // Anonymous namespace + +void TranslatorVisitor::SUATOM(u64 insn) { + union { + u64 raw; + BitField<54, 1, u64> is_bindless; + BitField<29, 4, AtomicOp> op; + BitField<33, 3, Type> type; + BitField<51, 3, Size> size; + BitField<49, 2, Clamp> clamp; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> coord_reg; + BitField<20, 8, IR::Reg> operand_reg; + BitField<36, 13, u64> bound_offset; // !is_bindless + BitField<39, 8, IR::Reg> bindless_reg; // is_bindless + } const suatom{insn}; + + ImageAtomOp(*this, suatom.dest_reg, suatom.operand_reg, suatom.coord_reg, suatom.bindless_reg, + suatom.op, suatom.clamp, suatom.size, suatom.type, suatom.bound_offset, + suatom.is_bindless != 0, true); +} + +void TranslatorVisitor::SURED(u64 insn) { + // TODO: confirm offsets + union { + u64 raw; + BitField<51, 1, u64> is_bound; + BitField<21, 3, AtomicOp> op; + BitField<33, 3, Type> type; + BitField<20, 3, Size> size; + BitField<49, 2, Clamp> clamp; + BitField<0, 8, IR::Reg> operand_reg; + BitField<8, 8, IR::Reg> coord_reg; + BitField<36, 13, u64> bound_offset; // is_bound + BitField<39, 8, IR::Reg> bindless_reg; // !is_bound + } const sured{insn}; + ImageAtomOp(*this, IR::Reg::RZ, sured.operand_reg, sured.coord_reg, sured.bindless_reg, + sured.op, sured.clamp, sured.size, sured.type, sured.bound_offset, + sured.is_bound == 0, false); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/surface_load_store.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/surface_load_store.cpp new file mode 100755 index 000000000..7dc793ad7 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/surface_load_store.cpp @@ -0,0 +1,281 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Type : u64 { + _1D, + BUFFER_1D, + ARRAY_1D, + _2D, + ARRAY_2D, + _3D, +}; + +constexpr unsigned R = 1 << 0; +constexpr unsigned G = 1 << 1; +constexpr unsigned B = 1 << 2; +constexpr unsigned A = 1 << 3; + +constexpr std::array MASK{ + 0U, // + R, // + G, // + R | G, // + B, // + R | B, // + G | B, // + R | G | B, // + A, // + R | A, // + G | A, // + R | G | A, // + B | A, // + R | B | A, // + G | B | A, // + R | G | B | A, // +}; + +enum class Size : u64 { + U8, + S8, + U16, + S16, + B32, + B64, + B128, +}; + +enum class Clamp : u64 { + IGN, + Default, + TRAP, +}; + +// https://docs.nvidia.com/cuda/parallel-thread-execution/index.html#cache-operators +enum class LoadCache : u64 { + CA, // Cache at all levels, likely to be accessed again + CG, // Cache at global level (L2 and below, not L1) + CI, // ??? + CV, // Don't cache and fetch again (volatile) +}; + +enum class StoreCache : u64 { + WB, // Cache write-back all coherent levels + CG, // Cache at global level (L2 and below, not L1) + CS, // Cache streaming, likely to be accessed once + WT, // Cache write-through (to system memory, volatile?) +}; + +ImageFormat Format(Size size) { + switch (size) { + case Size::U8: + return ImageFormat::R8_UINT; + case Size::S8: + return ImageFormat::R8_SINT; + case Size::U16: + return ImageFormat::R16_UINT; + case Size::S16: + return ImageFormat::R16_SINT; + case Size::B32: + return ImageFormat::R32_UINT; + case Size::B64: + return ImageFormat::R32G32_UINT; + case Size::B128: + return ImageFormat::R32G32B32A32_UINT; + } + throw NotImplementedException("Invalid size {}", size); +} + +int SizeInRegs(Size size) { + switch (size) { + case Size::U8: + case Size::S8: + case Size::U16: + case Size::S16: + case Size::B32: + return 1; + case Size::B64: + return 2; + case Size::B128: + return 4; + } + throw NotImplementedException("Invalid size {}", size); +} + +TextureType GetType(Type type) { + switch (type) { + case Type::_1D: + return TextureType::Color1D; + case Type::BUFFER_1D: + return TextureType::Buffer; + case Type::ARRAY_1D: + return TextureType::ColorArray1D; + case Type::_2D: + return TextureType::Color2D; + case Type::ARRAY_2D: + return TextureType::ColorArray2D; + case Type::_3D: + return TextureType::Color3D; + } + throw NotImplementedException("Invalid type {}", type); +} + +IR::Value MakeCoords(TranslatorVisitor& v, IR::Reg reg, Type type) { + const auto array{[&](int index) { + return v.ir.BitFieldExtract(v.X(reg + index), v.ir.Imm32(0), v.ir.Imm32(16)); + }}; + switch (type) { + case Type::_1D: + case Type::BUFFER_1D: + return v.X(reg); + case Type::ARRAY_1D: + return v.ir.CompositeConstruct(v.X(reg), array(1)); + case Type::_2D: + return v.ir.CompositeConstruct(v.X(reg), v.X(reg + 1)); + case Type::ARRAY_2D: + return v.ir.CompositeConstruct(v.X(reg), v.X(reg + 1), array(2)); + case Type::_3D: + return v.ir.CompositeConstruct(v.X(reg), v.X(reg + 1), v.X(reg + 2)); + } + throw NotImplementedException("Invalid type {}", type); +} + +unsigned SwizzleMask(u64 swizzle) { + if (swizzle == 0 || swizzle >= MASK.size()) { + throw NotImplementedException("Invalid swizzle {}", swizzle); + } + return MASK[swizzle]; +} + +IR::Value MakeColor(IR::IREmitter& ir, IR::Reg reg, int num_regs) { + std::array colors; + for (int i = 0; i < num_regs; ++i) { + colors[i] = ir.GetReg(reg + i); + } + for (int i = num_regs; i < 4; ++i) { + colors[i] = ir.Imm32(0); + } + return ir.CompositeConstruct(colors[0], colors[1], colors[2], colors[3]); +} +} // Anonymous namespace + +void TranslatorVisitor::SULD(u64 insn) { + union { + u64 raw; + BitField<51, 1, u64> is_bound; + BitField<52, 1, u64> d; + BitField<23, 1, u64> ba; + BitField<33, 3, Type> type; + BitField<24, 2, LoadCache> cache; + BitField<20, 3, Size> size; // .D + BitField<20, 4, u64> swizzle; // .P + BitField<49, 2, Clamp> clamp; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> coord_reg; + BitField<36, 13, u64> bound_offset; // is_bound + BitField<39, 8, IR::Reg> bindless_reg; // !is_bound + } const suld{insn}; + + if (suld.clamp != Clamp::IGN) { + throw NotImplementedException("Clamp {}", suld.clamp.Value()); + } + if (suld.cache != LoadCache::CA && suld.cache != LoadCache::CG) { + throw NotImplementedException("Cache {}", suld.cache.Value()); + } + const bool is_typed{suld.d != 0}; + if (is_typed && suld.ba != 0) { + throw NotImplementedException("BA"); + } + + const ImageFormat format{is_typed ? Format(suld.size) : ImageFormat::Typeless}; + const TextureType type{GetType(suld.type)}; + const IR::Value coords{MakeCoords(*this, suld.coord_reg, suld.type)}; + const IR::U32 handle{suld.is_bound != 0 ? ir.Imm32(static_cast(suld.bound_offset * 4)) + : X(suld.bindless_reg)}; + IR::TextureInstInfo info{}; + info.type.Assign(type); + info.image_format.Assign(format); + + const IR::Value result{ir.ImageRead(handle, coords, info)}; + IR::Reg dest_reg{suld.dest_reg}; + if (is_typed) { + const int num_regs{SizeInRegs(suld.size)}; + for (int i = 0; i < num_regs; ++i) { + X(dest_reg + i, IR::U32{ir.CompositeExtract(result, i)}); + } + } else { + const unsigned mask{SwizzleMask(suld.swizzle)}; + const int bits{std::popcount(mask)}; + if (!IR::IsAligned(dest_reg, bits == 3 ? 4 : bits)) { + throw NotImplementedException("Unaligned destination register"); + } + for (unsigned component = 0; component < 4; ++component) { + if (((mask >> component) & 1) == 0) { + continue; + } + X(dest_reg, IR::U32{ir.CompositeExtract(result, component)}); + ++dest_reg; + } + } +} + +void TranslatorVisitor::SUST(u64 insn) { + union { + u64 raw; + BitField<51, 1, u64> is_bound; + BitField<52, 1, u64> d; + BitField<23, 1, u64> ba; + BitField<33, 3, Type> type; + BitField<24, 2, StoreCache> cache; + BitField<20, 3, Size> size; // .D + BitField<20, 4, u64> swizzle; // .P + BitField<49, 2, Clamp> clamp; + BitField<0, 8, IR::Reg> data_reg; + BitField<8, 8, IR::Reg> coord_reg; + BitField<36, 13, u64> bound_offset; // is_bound + BitField<39, 8, IR::Reg> bindless_reg; // !is_bound + } const sust{insn}; + + if (sust.clamp != Clamp::IGN) { + throw NotImplementedException("Clamp {}", sust.clamp.Value()); + } + if (sust.cache != StoreCache::WB && sust.cache != StoreCache::CG) { + throw NotImplementedException("Cache {}", sust.cache.Value()); + } + const bool is_typed{sust.d != 0}; + if (is_typed && sust.ba != 0) { + throw NotImplementedException("BA"); + } + const ImageFormat format{is_typed ? Format(sust.size) : ImageFormat::Typeless}; + const TextureType type{GetType(sust.type)}; + const IR::Value coords{MakeCoords(*this, sust.coord_reg, sust.type)}; + const IR::U32 handle{sust.is_bound != 0 ? ir.Imm32(static_cast(sust.bound_offset * 4)) + : X(sust.bindless_reg)}; + IR::TextureInstInfo info{}; + info.type.Assign(type); + info.image_format.Assign(format); + + IR::Value color; + if (is_typed) { + color = MakeColor(ir, sust.data_reg, SizeInRegs(sust.size)); + } else { + const unsigned mask{SwizzleMask(sust.swizzle)}; + if (mask != 0xf) { + throw NotImplementedException("Non-full mask"); + } + color = MakeColor(ir, sust.data_reg, 4); + } + ir.ImageWrite(handle, coords, color, info); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/texture_fetch.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_fetch.cpp new file mode 100755 index 000000000..0046b5edd --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_fetch.cpp @@ -0,0 +1,236 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Blod : u64 { + None, + LZ, + LB, + LL, + INVALIDBLOD4, + INVALIDBLOD5, + LBA, + LLA, +}; + +enum class TextureType : u64 { + _1D, + ARRAY_1D, + _2D, + ARRAY_2D, + _3D, + ARRAY_3D, + CUBE, + ARRAY_CUBE, +}; + +Shader::TextureType GetType(TextureType type) { + switch (type) { + case TextureType::_1D: + return Shader::TextureType::Color1D; + case TextureType::ARRAY_1D: + return Shader::TextureType::ColorArray1D; + case TextureType::_2D: + return Shader::TextureType::Color2D; + case TextureType::ARRAY_2D: + return Shader::TextureType::ColorArray2D; + case TextureType::_3D: + return Shader::TextureType::Color3D; + case TextureType::ARRAY_3D: + throw NotImplementedException("3D array texture type"); + case TextureType::CUBE: + return Shader::TextureType::ColorCube; + case TextureType::ARRAY_CUBE: + return Shader::TextureType::ColorArrayCube; + } + throw NotImplementedException("Invalid texture type {}", type); +} + +IR::Value MakeCoords(TranslatorVisitor& v, IR::Reg reg, TextureType type) { + const auto read_array{[&]() -> IR::F32 { return v.ir.ConvertUToF(32, 16, v.X(reg)); }}; + switch (type) { + case TextureType::_1D: + return v.F(reg); + case TextureType::ARRAY_1D: + return v.ir.CompositeConstruct(v.F(reg + 1), read_array()); + case TextureType::_2D: + return v.ir.CompositeConstruct(v.F(reg), v.F(reg + 1)); + case TextureType::ARRAY_2D: + return v.ir.CompositeConstruct(v.F(reg + 1), v.F(reg + 2), read_array()); + case TextureType::_3D: + return v.ir.CompositeConstruct(v.F(reg), v.F(reg + 1), v.F(reg + 2)); + case TextureType::ARRAY_3D: + throw NotImplementedException("3D array texture type"); + case TextureType::CUBE: + return v.ir.CompositeConstruct(v.F(reg), v.F(reg + 1), v.F(reg + 2)); + case TextureType::ARRAY_CUBE: + return v.ir.CompositeConstruct(v.F(reg + 1), v.F(reg + 2), v.F(reg + 3), read_array()); + } + throw NotImplementedException("Invalid texture type {}", type); +} + +IR::F32 MakeLod(TranslatorVisitor& v, IR::Reg& reg, Blod blod) { + switch (blod) { + case Blod::None: + return v.ir.Imm32(0.0f); + case Blod::LZ: + return v.ir.Imm32(0.0f); + case Blod::LB: + case Blod::LL: + case Blod::LBA: + case Blod::LLA: + return v.F(reg++); + case Blod::INVALIDBLOD4: + case Blod::INVALIDBLOD5: + break; + } + throw NotImplementedException("Invalid blod {}", blod); +} + +IR::Value MakeOffset(TranslatorVisitor& v, IR::Reg& reg, TextureType type) { + const IR::U32 value{v.X(reg++)}; + switch (type) { + case TextureType::_1D: + case TextureType::ARRAY_1D: + return v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(4), true); + case TextureType::_2D: + case TextureType::ARRAY_2D: + return v.ir.CompositeConstruct( + v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(4), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(4), v.ir.Imm32(4), true)); + case TextureType::_3D: + case TextureType::ARRAY_3D: + return v.ir.CompositeConstruct( + v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(4), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(4), v.ir.Imm32(4), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(8), v.ir.Imm32(4), true)); + case TextureType::CUBE: + case TextureType::ARRAY_CUBE: + throw NotImplementedException("Illegal offset on CUBE sample"); + } + throw NotImplementedException("Invalid texture type {}", type); +} + +bool HasExplicitLod(Blod blod) { + switch (blod) { + case Blod::LL: + case Blod::LLA: + case Blod::LZ: + return true; + default: + return false; + } +} + +void Impl(TranslatorVisitor& v, u64 insn, bool aoffi, Blod blod, bool lc, + std::optional cbuf_offset) { + union { + u64 raw; + BitField<35, 1, u64> ndv; + BitField<49, 1, u64> nodep; + BitField<50, 1, u64> dc; + BitField<51, 3, IR::Pred> sparse_pred; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> coord_reg; + BitField<20, 8, IR::Reg> meta_reg; + BitField<28, 3, TextureType> type; + BitField<31, 4, u64> mask; + } const tex{insn}; + + if (lc) { + throw NotImplementedException("LC"); + } + const IR::Value coords{MakeCoords(v, tex.coord_reg, tex.type)}; + + IR::Reg meta_reg{tex.meta_reg}; + IR::Value handle; + IR::Value offset; + IR::F32 dref; + IR::F32 lod_clamp; + if (cbuf_offset) { + handle = v.ir.Imm32(*cbuf_offset); + } else { + handle = v.X(meta_reg++); + } + const IR::F32 lod{MakeLod(v, meta_reg, blod)}; + if (aoffi) { + offset = MakeOffset(v, meta_reg, tex.type); + } + if (tex.dc != 0) { + dref = v.F(meta_reg++); + } + IR::TextureInstInfo info{}; + info.type.Assign(GetType(tex.type)); + info.is_depth.Assign(tex.dc != 0 ? 1 : 0); + info.has_bias.Assign(blod == Blod::LB || blod == Blod::LBA ? 1 : 0); + info.has_lod_clamp.Assign(lc ? 1 : 0); + + const IR::Value sample{[&]() -> IR::Value { + if (tex.dc == 0) { + if (HasExplicitLod(blod)) { + return v.ir.ImageSampleExplicitLod(handle, coords, lod, offset, info); + } else { + return v.ir.ImageSampleImplicitLod(handle, coords, lod, offset, lod_clamp, info); + } + } + if (HasExplicitLod(blod)) { + return v.ir.ImageSampleDrefExplicitLod(handle, coords, dref, lod, offset, info); + } else { + return v.ir.ImageSampleDrefImplicitLod(handle, coords, dref, lod, offset, lod_clamp, + info); + } + }()}; + + IR::Reg dest_reg{tex.dest_reg}; + for (int element = 0; element < 4; ++element) { + if (((tex.mask >> element) & 1) == 0) { + continue; + } + IR::F32 value; + if (tex.dc != 0) { + value = element < 3 ? IR::F32{sample} : v.ir.Imm32(1.0f); + } else { + value = IR::F32{v.ir.CompositeExtract(sample, static_cast(element))}; + } + v.F(dest_reg, value); + ++dest_reg; + } + if (tex.sparse_pred != IR::Pred::PT) { + v.ir.SetPred(tex.sparse_pred, v.ir.LogicalNot(v.ir.GetSparseFromOp(sample))); + } +} +} // Anonymous namespace + +void TranslatorVisitor::TEX(u64 insn) { + union { + u64 raw; + BitField<54, 1, u64> aoffi; + BitField<55, 3, Blod> blod; + BitField<58, 1, u64> lc; + BitField<36, 13, u64> cbuf_offset; + } const tex{insn}; + + Impl(*this, insn, tex.aoffi != 0, tex.blod, tex.lc != 0, static_cast(tex.cbuf_offset * 4)); +} + +void TranslatorVisitor::TEX_b(u64 insn) { + union { + u64 raw; + BitField<36, 1, u64> aoffi; + BitField<37, 3, Blod> blod; + BitField<40, 1, u64> lc; + } const tex{insn}; + + Impl(*this, insn, tex.aoffi != 0, tex.blod, tex.lc != 0, std::nullopt); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/texture_fetch_swizzled.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_fetch_swizzled.cpp new file mode 100755 index 000000000..154e7f1a1 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_fetch_swizzled.cpp @@ -0,0 +1,266 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Precision : u64 { + F16, + F32, +}; + +union Encoding { + u64 raw; + BitField<59, 1, Precision> precision; + BitField<53, 4, u64> encoding; + BitField<49, 1, u64> nodep; + BitField<28, 8, IR::Reg> dest_reg_b; + BitField<0, 8, IR::Reg> dest_reg_a; + BitField<8, 8, IR::Reg> src_reg_a; + BitField<20, 8, IR::Reg> src_reg_b; + BitField<36, 13, u64> cbuf_offset; + BitField<50, 3, u64> swizzle; +}; + +constexpr unsigned R = 1; +constexpr unsigned G = 2; +constexpr unsigned B = 4; +constexpr unsigned A = 8; + +constexpr std::array RG_LUT{ + R, // + G, // + B, // + A, // + R | G, // + R | A, // + G | A, // + B | A, // +}; + +constexpr std::array RGBA_LUT{ + R | G | B, // + R | G | A, // + R | B | A, // + G | B | A, // + R | G | B | A, // +}; + +void CheckAlignment(IR::Reg reg, size_t alignment) { + if (!IR::IsAligned(reg, alignment)) { + throw NotImplementedException("Unaligned source register {}", reg); + } +} + +template +IR::Value Composite(TranslatorVisitor& v, Args... regs) { + return v.ir.CompositeConstruct(v.F(regs)...); +} + +IR::F32 ReadArray(TranslatorVisitor& v, const IR::U32& value) { + return v.ir.ConvertUToF(32, 16, v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(16))); +} + +IR::Value Sample(TranslatorVisitor& v, u64 insn) { + const Encoding texs{insn}; + const IR::U32 handle{v.ir.Imm32(static_cast(texs.cbuf_offset * 4))}; + const IR::F32 zero{v.ir.Imm32(0.0f)}; + const IR::Reg reg_a{texs.src_reg_a}; + const IR::Reg reg_b{texs.src_reg_b}; + IR::TextureInstInfo info{}; + if (texs.precision == Precision::F16) { + info.relaxed_precision.Assign(1); + } + switch (texs.encoding) { + case 0: // 1D.LZ + info.type.Assign(TextureType::Color1D); + return v.ir.ImageSampleExplicitLod(handle, v.F(reg_a), zero, {}, info); + case 1: // 2D + info.type.Assign(TextureType::Color2D); + return v.ir.ImageSampleImplicitLod(handle, Composite(v, reg_a, reg_b), {}, {}, {}, info); + case 2: // 2D.LZ + info.type.Assign(TextureType::Color2D); + return v.ir.ImageSampleExplicitLod(handle, Composite(v, reg_a, reg_b), zero, {}, info); + case 3: // 2D.LL + CheckAlignment(reg_a, 2); + info.type.Assign(TextureType::Color2D); + return v.ir.ImageSampleExplicitLod(handle, Composite(v, reg_a, reg_a + 1), v.F(reg_b), {}, + info); + case 4: // 2D.DC + CheckAlignment(reg_a, 2); + info.type.Assign(TextureType::Color2D); + info.is_depth.Assign(1); + return v.ir.ImageSampleDrefImplicitLod(handle, Composite(v, reg_a, reg_a + 1), v.F(reg_b), + {}, {}, {}, info); + case 5: // 2D.LL.DC + CheckAlignment(reg_a, 2); + CheckAlignment(reg_b, 2); + info.type.Assign(TextureType::Color2D); + info.is_depth.Assign(1); + return v.ir.ImageSampleDrefExplicitLod(handle, Composite(v, reg_a, reg_a + 1), + v.F(reg_b + 1), v.F(reg_b), {}, info); + case 6: // 2D.LZ.DC + CheckAlignment(reg_a, 2); + info.type.Assign(TextureType::Color2D); + info.is_depth.Assign(1); + return v.ir.ImageSampleDrefExplicitLod(handle, Composite(v, reg_a, reg_a + 1), v.F(reg_b), + zero, {}, info); + case 7: // ARRAY_2D + CheckAlignment(reg_a, 2); + info.type.Assign(TextureType::ColorArray2D); + return v.ir.ImageSampleImplicitLod( + handle, v.ir.CompositeConstruct(v.F(reg_a + 1), v.F(reg_b), ReadArray(v, v.X(reg_a))), + {}, {}, {}, info); + case 8: // ARRAY_2D.LZ + CheckAlignment(reg_a, 2); + info.type.Assign(TextureType::ColorArray2D); + return v.ir.ImageSampleExplicitLod( + handle, v.ir.CompositeConstruct(v.F(reg_a + 1), v.F(reg_b), ReadArray(v, v.X(reg_a))), + zero, {}, info); + case 9: // ARRAY_2D.LZ.DC + CheckAlignment(reg_a, 2); + CheckAlignment(reg_b, 2); + info.type.Assign(TextureType::ColorArray2D); + info.is_depth.Assign(1); + return v.ir.ImageSampleDrefExplicitLod( + handle, v.ir.CompositeConstruct(v.F(reg_a + 1), v.F(reg_b), ReadArray(v, v.X(reg_a))), + v.F(reg_b + 1), zero, {}, info); + case 10: // 3D + CheckAlignment(reg_a, 2); + info.type.Assign(TextureType::Color3D); + return v.ir.ImageSampleImplicitLod(handle, Composite(v, reg_a, reg_a + 1, reg_b), {}, {}, + {}, info); + case 11: // 3D.LZ + CheckAlignment(reg_a, 2); + info.type.Assign(TextureType::Color3D); + return v.ir.ImageSampleExplicitLod(handle, Composite(v, reg_a, reg_a + 1, reg_b), zero, {}, + info); + case 12: // CUBE + CheckAlignment(reg_a, 2); + info.type.Assign(TextureType::ColorCube); + return v.ir.ImageSampleImplicitLod(handle, Composite(v, reg_a, reg_a + 1, reg_b), {}, {}, + {}, info); + case 13: // CUBE.LL + CheckAlignment(reg_a, 2); + CheckAlignment(reg_b, 2); + info.type.Assign(TextureType::ColorCube); + return v.ir.ImageSampleExplicitLod(handle, Composite(v, reg_a, reg_a + 1, reg_b), + v.F(reg_b + 1), {}, info); + default: + throw NotImplementedException("Illegal encoding {}", texs.encoding.Value()); + } +} + +unsigned Swizzle(u64 insn) { + const Encoding texs{insn}; + const size_t encoding{texs.swizzle}; + if (texs.dest_reg_b == IR::Reg::RZ) { + if (encoding >= RG_LUT.size()) { + throw NotImplementedException("Illegal RG encoding {}", encoding); + } + return RG_LUT[encoding]; + } else { + if (encoding >= RGBA_LUT.size()) { + throw NotImplementedException("Illegal RGBA encoding {}", encoding); + } + return RGBA_LUT[encoding]; + } +} + +IR::F32 Extract(TranslatorVisitor& v, const IR::Value& sample, unsigned component) { + const bool is_shadow{sample.Type() == IR::Type::F32}; + if (is_shadow) { + const bool is_alpha{component == 3}; + return is_alpha ? v.ir.Imm32(1.0f) : IR::F32{sample}; + } else { + return IR::F32{v.ir.CompositeExtract(sample, component)}; + } +} + +IR::Reg RegStoreComponent32(u64 insn, unsigned index) { + const Encoding texs{insn}; + switch (index) { + case 0: + return texs.dest_reg_a; + case 1: + CheckAlignment(texs.dest_reg_a, 2); + return texs.dest_reg_a + 1; + case 2: + return texs.dest_reg_b; + case 3: + CheckAlignment(texs.dest_reg_b, 2); + return texs.dest_reg_b + 1; + } + throw LogicError("Invalid store index {}", index); +} + +void Store32(TranslatorVisitor& v, u64 insn, const IR::Value& sample) { + const unsigned swizzle{Swizzle(insn)}; + unsigned store_index{0}; + for (unsigned component = 0; component < 4; ++component) { + if (((swizzle >> component) & 1) == 0) { + continue; + } + const IR::Reg dest{RegStoreComponent32(insn, store_index)}; + v.F(dest, Extract(v, sample, component)); + ++store_index; + } +} + +IR::U32 Pack(TranslatorVisitor& v, const IR::F32& lhs, const IR::F32& rhs) { + return v.ir.PackHalf2x16(v.ir.CompositeConstruct(lhs, rhs)); +} + +void Store16(TranslatorVisitor& v, u64 insn, const IR::Value& sample) { + const unsigned swizzle{Swizzle(insn)}; + unsigned store_index{0}; + std::array swizzled; + for (unsigned component = 0; component < 4; ++component) { + if (((swizzle >> component) & 1) == 0) { + continue; + } + swizzled[store_index] = Extract(v, sample, component); + ++store_index; + } + const IR::F32 zero{v.ir.Imm32(0.0f)}; + const Encoding texs{insn}; + switch (store_index) { + case 1: + v.X(texs.dest_reg_a, Pack(v, swizzled[0], zero)); + break; + case 2: + case 3: + case 4: + v.X(texs.dest_reg_a, Pack(v, swizzled[0], swizzled[1])); + switch (store_index) { + case 2: + break; + case 3: + v.X(texs.dest_reg_b, Pack(v, swizzled[2], zero)); + break; + case 4: + v.X(texs.dest_reg_b, Pack(v, swizzled[2], swizzled[3])); + break; + } + break; + } +} +} // Anonymous namespace + +void TranslatorVisitor::TEXS(u64 insn) { + const IR::Value sample{Sample(*this, insn)}; + if (Encoding{insn}.precision == Precision::F32) { + Store32(*this, insn, sample); + } else { + Store16(*this, insn, sample); + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/texture_gather.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_gather.cpp new file mode 100755 index 000000000..218cbc1a8 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_gather.cpp @@ -0,0 +1,208 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { + +enum class TextureType : u64 { + _1D, + ARRAY_1D, + _2D, + ARRAY_2D, + _3D, + ARRAY_3D, + CUBE, + ARRAY_CUBE, +}; + +enum class OffsetType : u64 { + None = 0, + AOFFI, + PTP, + Invalid, +}; + +enum class ComponentType : u64 { + R = 0, + G = 1, + B = 2, + A = 3, +}; + +Shader::TextureType GetType(TextureType type) { + switch (type) { + case TextureType::_1D: + return Shader::TextureType::Color1D; + case TextureType::ARRAY_1D: + return Shader::TextureType::ColorArray1D; + case TextureType::_2D: + return Shader::TextureType::Color2D; + case TextureType::ARRAY_2D: + return Shader::TextureType::ColorArray2D; + case TextureType::_3D: + return Shader::TextureType::Color3D; + case TextureType::ARRAY_3D: + throw NotImplementedException("3D array texture type"); + case TextureType::CUBE: + return Shader::TextureType::ColorCube; + case TextureType::ARRAY_CUBE: + return Shader::TextureType::ColorArrayCube; + } + throw NotImplementedException("Invalid texture type {}", type); +} + +IR::Value MakeCoords(TranslatorVisitor& v, IR::Reg reg, TextureType type) { + const auto read_array{[&]() -> IR::F32 { return v.ir.ConvertUToF(32, 16, v.X(reg)); }}; + switch (type) { + case TextureType::_1D: + return v.F(reg); + case TextureType::ARRAY_1D: + return v.ir.CompositeConstruct(v.F(reg + 1), read_array()); + case TextureType::_2D: + return v.ir.CompositeConstruct(v.F(reg), v.F(reg + 1)); + case TextureType::ARRAY_2D: + return v.ir.CompositeConstruct(v.F(reg + 1), v.F(reg + 2), read_array()); + case TextureType::_3D: + return v.ir.CompositeConstruct(v.F(reg), v.F(reg + 1), v.F(reg + 2)); + case TextureType::ARRAY_3D: + throw NotImplementedException("3D array texture type"); + case TextureType::CUBE: + return v.ir.CompositeConstruct(v.F(reg), v.F(reg + 1), v.F(reg + 2)); + case TextureType::ARRAY_CUBE: + return v.ir.CompositeConstruct(v.F(reg + 1), v.F(reg + 2), v.F(reg + 3), read_array()); + } + throw NotImplementedException("Invalid texture type {}", type); +} + +IR::Value MakeOffset(TranslatorVisitor& v, IR::Reg& reg, TextureType type) { + const IR::U32 value{v.X(reg++)}; + switch (type) { + case TextureType::_1D: + case TextureType::ARRAY_1D: + return v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(6), true); + case TextureType::_2D: + case TextureType::ARRAY_2D: + return v.ir.CompositeConstruct( + v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(6), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(8), v.ir.Imm32(6), true)); + case TextureType::_3D: + case TextureType::ARRAY_3D: + return v.ir.CompositeConstruct( + v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(6), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(8), v.ir.Imm32(6), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(16), v.ir.Imm32(6), true)); + case TextureType::CUBE: + case TextureType::ARRAY_CUBE: + throw NotImplementedException("Illegal offset on CUBE sample"); + } + throw NotImplementedException("Invalid texture type {}", type); +} + +std::pair MakeOffsetPTP(TranslatorVisitor& v, IR::Reg& reg) { + const IR::U32 value1{v.X(reg++)}; + const IR::U32 value2{v.X(reg++)}; + const IR::U32 bitsize{v.ir.Imm32(6)}; + const auto make_vector{[&v, &bitsize](const IR::U32& value) { + return v.ir.CompositeConstruct(v.ir.BitFieldExtract(value, v.ir.Imm32(0), bitsize, true), + v.ir.BitFieldExtract(value, v.ir.Imm32(8), bitsize, true), + v.ir.BitFieldExtract(value, v.ir.Imm32(16), bitsize, true), + v.ir.BitFieldExtract(value, v.ir.Imm32(24), bitsize, true)); + }}; + return {make_vector(value1), make_vector(value2)}; +} + +void Impl(TranslatorVisitor& v, u64 insn, ComponentType component_type, OffsetType offset_type, + bool is_bindless) { + union { + u64 raw; + BitField<35, 1, u64> ndv; + BitField<49, 1, u64> nodep; + BitField<50, 1, u64> dc; + BitField<51, 3, IR::Pred> sparse_pred; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> coord_reg; + BitField<20, 8, IR::Reg> meta_reg; + BitField<28, 3, TextureType> type; + BitField<31, 4, u64> mask; + BitField<36, 13, u64> cbuf_offset; + } const tld4{insn}; + + const IR::Value coords{MakeCoords(v, tld4.coord_reg, tld4.type)}; + + IR::Reg meta_reg{tld4.meta_reg}; + IR::Value handle; + IR::Value offset; + IR::Value offset2; + IR::F32 dref; + if (!is_bindless) { + handle = v.ir.Imm32(static_cast(tld4.cbuf_offset.Value() * 4)); + } else { + handle = v.X(meta_reg++); + } + switch (offset_type) { + case OffsetType::None: + break; + case OffsetType::AOFFI: + offset = MakeOffset(v, meta_reg, tld4.type); + break; + case OffsetType::PTP: + std::tie(offset, offset2) = MakeOffsetPTP(v, meta_reg); + break; + default: + throw NotImplementedException("Invalid offset type {}", offset_type); + } + if (tld4.dc != 0) { + dref = v.F(meta_reg++); + } + IR::TextureInstInfo info{}; + info.type.Assign(GetType(tld4.type)); + info.is_depth.Assign(tld4.dc != 0 ? 1 : 0); + info.gather_component.Assign(static_cast(component_type)); + const IR::Value sample{[&] { + if (tld4.dc == 0) { + return v.ir.ImageGather(handle, coords, offset, offset2, info); + } + return v.ir.ImageGatherDref(handle, coords, offset, offset2, dref, info); + }()}; + + IR::Reg dest_reg{tld4.dest_reg}; + for (size_t element = 0; element < 4; ++element) { + if (((tld4.mask >> element) & 1) == 0) { + continue; + } + v.F(dest_reg, IR::F32{v.ir.CompositeExtract(sample, element)}); + ++dest_reg; + } + if (tld4.sparse_pred != IR::Pred::PT) { + v.ir.SetPred(tld4.sparse_pred, v.ir.LogicalNot(v.ir.GetSparseFromOp(sample))); + } +} +} // Anonymous namespace + +void TranslatorVisitor::TLD4(u64 insn) { + union { + u64 raw; + BitField<56, 2, ComponentType> component; + BitField<54, 2, OffsetType> offset; + } const tld4{insn}; + Impl(*this, insn, tld4.component, tld4.offset, false); +} + +void TranslatorVisitor::TLD4_b(u64 insn) { + union { + u64 raw; + BitField<38, 2, ComponentType> component; + BitField<36, 2, OffsetType> offset; + } const tld4{insn}; + Impl(*this, insn, tld4.component, tld4.offset, true); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/texture_gather_swizzled.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_gather_swizzled.cpp new file mode 100755 index 000000000..34efa2d50 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_gather_swizzled.cpp @@ -0,0 +1,134 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Precision : u64 { + F32, + F16, +}; + +enum class ComponentType : u64 { + R = 0, + G = 1, + B = 2, + A = 3, +}; + +union Encoding { + u64 raw; + BitField<55, 1, Precision> precision; + BitField<52, 2, ComponentType> component_type; + BitField<51, 1, u64> aoffi; + BitField<50, 1, u64> dc; + BitField<49, 1, u64> nodep; + BitField<28, 8, IR::Reg> dest_reg_b; + BitField<0, 8, IR::Reg> dest_reg_a; + BitField<8, 8, IR::Reg> src_reg_a; + BitField<20, 8, IR::Reg> src_reg_b; + BitField<36, 13, u64> cbuf_offset; +}; + +void CheckAlignment(IR::Reg reg, size_t alignment) { + if (!IR::IsAligned(reg, alignment)) { + throw NotImplementedException("Unaligned source register {}", reg); + } +} + +IR::Value MakeOffset(TranslatorVisitor& v, IR::Reg reg) { + const IR::U32 value{v.X(reg)}; + return v.ir.CompositeConstruct(v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(6), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(8), v.ir.Imm32(6), true)); +} + +IR::Value Sample(TranslatorVisitor& v, u64 insn) { + const Encoding tld4s{insn}; + const IR::U32 handle{v.ir.Imm32(static_cast(tld4s.cbuf_offset * 4))}; + const IR::Reg reg_a{tld4s.src_reg_a}; + const IR::Reg reg_b{tld4s.src_reg_b}; + IR::TextureInstInfo info{}; + if (tld4s.precision == Precision::F16) { + info.relaxed_precision.Assign(1); + } + info.gather_component.Assign(static_cast(tld4s.component_type.Value())); + info.type.Assign(Shader::TextureType::Color2D); + info.is_depth.Assign(tld4s.dc != 0 ? 1 : 0); + IR::Value coords; + if (tld4s.aoffi != 0) { + CheckAlignment(reg_a, 2); + coords = v.ir.CompositeConstruct(v.F(reg_a), v.F(reg_a + 1)); + IR::Value offset = MakeOffset(v, reg_b); + if (tld4s.dc != 0) { + CheckAlignment(reg_b, 2); + IR::F32 dref = v.F(reg_b + 1); + return v.ir.ImageGatherDref(handle, coords, offset, {}, dref, info); + } + return v.ir.ImageGather(handle, coords, offset, {}, info); + } + if (tld4s.dc != 0) { + CheckAlignment(reg_a, 2); + coords = v.ir.CompositeConstruct(v.F(reg_a), v.F(reg_a + 1)); + IR::F32 dref = v.F(reg_b); + return v.ir.ImageGatherDref(handle, coords, {}, {}, dref, info); + } + coords = v.ir.CompositeConstruct(v.F(reg_a), v.F(reg_b)); + return v.ir.ImageGather(handle, coords, {}, {}, info); +} + +IR::Reg RegStoreComponent32(u64 insn, size_t index) { + const Encoding tlds4{insn}; + switch (index) { + case 0: + return tlds4.dest_reg_a; + case 1: + CheckAlignment(tlds4.dest_reg_a, 2); + return tlds4.dest_reg_a + 1; + case 2: + return tlds4.dest_reg_b; + case 3: + CheckAlignment(tlds4.dest_reg_b, 2); + return tlds4.dest_reg_b + 1; + } + throw LogicError("Invalid store index {}", index); +} + +void Store32(TranslatorVisitor& v, u64 insn, const IR::Value& sample) { + for (size_t component = 0; component < 4; ++component) { + const IR::Reg dest{RegStoreComponent32(insn, component)}; + v.F(dest, IR::F32{v.ir.CompositeExtract(sample, component)}); + } +} + +IR::U32 Pack(TranslatorVisitor& v, const IR::F32& lhs, const IR::F32& rhs) { + return v.ir.PackHalf2x16(v.ir.CompositeConstruct(lhs, rhs)); +} + +void Store16(TranslatorVisitor& v, u64 insn, const IR::Value& sample) { + std::array swizzled; + for (size_t component = 0; component < 4; ++component) { + swizzled[component] = IR::F32{v.ir.CompositeExtract(sample, component)}; + } + const Encoding tld4s{insn}; + v.X(tld4s.dest_reg_a, Pack(v, swizzled[0], swizzled[1])); + v.X(tld4s.dest_reg_b, Pack(v, swizzled[2], swizzled[3])); +} +} // Anonymous namespace + +void TranslatorVisitor::TLD4S(u64 insn) { + const IR::Value sample{Sample(*this, insn)}; + if (Encoding{insn}.precision == Precision::F32) { + Store32(*this, insn, sample); + } else { + Store16(*this, insn, sample); + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/texture_gradient.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_gradient.cpp new file mode 100755 index 000000000..c3fe3ffda --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_gradient.cpp @@ -0,0 +1,182 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { + +enum class TextureType : u64 { + _1D, + ARRAY_1D, + _2D, + ARRAY_2D, + _3D, + ARRAY_3D, + CUBE, + ARRAY_CUBE, +}; + +Shader::TextureType GetType(TextureType type) { + switch (type) { + case TextureType::_1D: + return Shader::TextureType::Color1D; + case TextureType::ARRAY_1D: + return Shader::TextureType::ColorArray1D; + case TextureType::_2D: + return Shader::TextureType::Color2D; + case TextureType::ARRAY_2D: + return Shader::TextureType::ColorArray2D; + case TextureType::_3D: + return Shader::TextureType::Color3D; + case TextureType::ARRAY_3D: + throw NotImplementedException("3D array texture type"); + case TextureType::CUBE: + return Shader::TextureType::ColorCube; + case TextureType::ARRAY_CUBE: + return Shader::TextureType::ColorArrayCube; + } + throw NotImplementedException("Invalid texture type {}", type); +} + +IR::Value MakeOffset(TranslatorVisitor& v, IR::Reg reg, bool has_lod_clamp) { + const IR::U32 value{v.X(reg)}; + const u32 base{has_lod_clamp ? 12U : 16U}; + return v.ir.CompositeConstruct( + v.ir.BitFieldExtract(value, v.ir.Imm32(base), v.ir.Imm32(4), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(base + 4), v.ir.Imm32(4), true)); +} + +void Impl(TranslatorVisitor& v, u64 insn, bool is_bindless) { + union { + u64 raw; + BitField<49, 1, u64> nodep; + BitField<35, 1, u64> aoffi; + BitField<50, 1, u64> lc; + BitField<51, 3, IR::Pred> sparse_pred; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> coord_reg; + BitField<20, 8, IR::Reg> derivate_reg; + BitField<28, 3, TextureType> type; + BitField<31, 4, u64> mask; + BitField<36, 13, u64> cbuf_offset; + } const txd{insn}; + + const bool has_lod_clamp = txd.lc != 0; + if (has_lod_clamp) { + throw NotImplementedException("TXD.LC - CLAMP is not implemented"); + } + + IR::Value coords; + u32 num_derivates{}; + IR::Reg base_reg{txd.coord_reg}; + IR::Reg last_reg; + IR::Value handle; + if (is_bindless) { + handle = v.X(base_reg++); + } else { + handle = v.ir.Imm32(static_cast(txd.cbuf_offset.Value() * 4)); + } + + const auto read_array{[&]() -> IR::F32 { + const IR::U32 base{v.ir.Imm32(0)}; + const IR::U32 count{v.ir.Imm32(has_lod_clamp ? 12 : 16)}; + const IR::U32 array_index{v.ir.BitFieldExtract(v.X(last_reg), base, count)}; + return v.ir.ConvertUToF(32, 16, array_index); + }}; + switch (txd.type) { + case TextureType::_1D: { + coords = v.F(base_reg); + num_derivates = 1; + last_reg = base_reg + 1; + break; + } + case TextureType::ARRAY_1D: { + last_reg = base_reg + 1; + coords = v.ir.CompositeConstruct(v.F(base_reg), read_array()); + num_derivates = 1; + break; + } + case TextureType::_2D: { + last_reg = base_reg + 2; + coords = v.ir.CompositeConstruct(v.F(base_reg), v.F(base_reg + 1)); + num_derivates = 2; + break; + } + case TextureType::ARRAY_2D: { + last_reg = base_reg + 2; + coords = v.ir.CompositeConstruct(v.F(base_reg), v.F(base_reg + 1), read_array()); + num_derivates = 2; + break; + } + default: + throw NotImplementedException("Invalid texture type"); + } + + const IR::Reg derivate_reg{txd.derivate_reg}; + IR::Value derivates; + switch (num_derivates) { + case 1: { + derivates = v.ir.CompositeConstruct(v.F(derivate_reg), v.F(derivate_reg + 1)); + break; + } + case 2: { + derivates = v.ir.CompositeConstruct(v.F(derivate_reg), v.F(derivate_reg + 1), + v.F(derivate_reg + 2), v.F(derivate_reg + 3)); + break; + } + default: + throw NotImplementedException("Invalid texture type"); + } + + IR::Value offset; + if (txd.aoffi != 0) { + offset = MakeOffset(v, last_reg, has_lod_clamp); + } + + IR::F32 lod_clamp; + if (has_lod_clamp) { + // Lod Clamp is a Fixed Point 4.8, we need to transform it to float. + // to convert a fixed point, float(value) / float(1 << fixed_point) + // in this case the fixed_point is 8. + const IR::F32 conv4_8fixp_f{v.ir.Imm32(static_cast(1U << 8))}; + const IR::F32 fixp_lc{v.ir.ConvertUToF( + 32, 16, v.ir.BitFieldExtract(v.X(last_reg), v.ir.Imm32(20), v.ir.Imm32(12)))}; + lod_clamp = v.ir.FPMul(fixp_lc, conv4_8fixp_f); + } + + IR::TextureInstInfo info{}; + info.type.Assign(GetType(txd.type)); + info.num_derivates.Assign(num_derivates); + info.has_lod_clamp.Assign(has_lod_clamp ? 1 : 0); + const IR::Value sample{v.ir.ImageGradient(handle, coords, derivates, offset, lod_clamp, info)}; + + IR::Reg dest_reg{txd.dest_reg}; + for (size_t element = 0; element < 4; ++element) { + if (((txd.mask >> element) & 1) == 0) { + continue; + } + v.F(dest_reg, IR::F32{v.ir.CompositeExtract(sample, element)}); + ++dest_reg; + } + if (txd.sparse_pred != IR::Pred::PT) { + v.ir.SetPred(txd.sparse_pred, v.ir.LogicalNot(v.ir.GetSparseFromOp(sample))); + } +} +} // Anonymous namespace + +void TranslatorVisitor::TXD(u64 insn) { + Impl(*this, insn, false); +} + +void TranslatorVisitor::TXD_b(u64 insn) { + Impl(*this, insn, true); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/texture_load.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_load.cpp new file mode 100755 index 000000000..983058303 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_load.cpp @@ -0,0 +1,165 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { + +enum class TextureType : u64 { + _1D, + ARRAY_1D, + _2D, + ARRAY_2D, + _3D, + ARRAY_3D, + CUBE, + ARRAY_CUBE, +}; + +Shader::TextureType GetType(TextureType type) { + switch (type) { + case TextureType::_1D: + return Shader::TextureType::Color1D; + case TextureType::ARRAY_1D: + return Shader::TextureType::ColorArray1D; + case TextureType::_2D: + return Shader::TextureType::Color2D; + case TextureType::ARRAY_2D: + return Shader::TextureType::ColorArray2D; + case TextureType::_3D: + return Shader::TextureType::Color3D; + case TextureType::ARRAY_3D: + throw NotImplementedException("3D array texture type"); + case TextureType::CUBE: + return Shader::TextureType::ColorCube; + case TextureType::ARRAY_CUBE: + return Shader::TextureType::ColorArrayCube; + } + throw NotImplementedException("Invalid texture type {}", type); +} + +IR::Value MakeCoords(TranslatorVisitor& v, IR::Reg reg, TextureType type) { + const auto read_array{ + [&]() -> IR::U32 { return v.ir.BitFieldExtract(v.X(reg), v.ir.Imm32(0), v.ir.Imm32(16)); }}; + switch (type) { + case TextureType::_1D: + return v.X(reg); + case TextureType::ARRAY_1D: + return v.ir.CompositeConstruct(v.X(reg + 1), read_array()); + case TextureType::_2D: + return v.ir.CompositeConstruct(v.X(reg), v.X(reg + 1)); + case TextureType::ARRAY_2D: + return v.ir.CompositeConstruct(v.X(reg + 1), v.X(reg + 2), read_array()); + case TextureType::_3D: + return v.ir.CompositeConstruct(v.X(reg), v.X(reg + 1), v.X(reg + 2)); + case TextureType::ARRAY_3D: + throw NotImplementedException("3D array texture type"); + case TextureType::CUBE: + return v.ir.CompositeConstruct(v.X(reg), v.X(reg + 1), v.X(reg + 2)); + case TextureType::ARRAY_CUBE: + return v.ir.CompositeConstruct(v.X(reg + 1), v.X(reg + 2), v.X(reg + 3), read_array()); + } + throw NotImplementedException("Invalid texture type {}", type); +} + +IR::Value MakeOffset(TranslatorVisitor& v, IR::Reg& reg, TextureType type) { + const IR::U32 value{v.X(reg++)}; + switch (type) { + case TextureType::_1D: + case TextureType::ARRAY_1D: + return v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(4), true); + case TextureType::_2D: + case TextureType::ARRAY_2D: + return v.ir.CompositeConstruct( + v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(4), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(4), v.ir.Imm32(4), true)); + case TextureType::_3D: + case TextureType::ARRAY_3D: + return v.ir.CompositeConstruct( + v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(4), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(4), v.ir.Imm32(4), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(8), v.ir.Imm32(4), true)); + case TextureType::CUBE: + case TextureType::ARRAY_CUBE: + throw NotImplementedException("Illegal offset on CUBE sample"); + } + throw NotImplementedException("Invalid texture type {}", type); +} + +void Impl(TranslatorVisitor& v, u64 insn, bool is_bindless) { + union { + u64 raw; + BitField<49, 1, u64> nodep; + BitField<55, 1, u64> lod; + BitField<50, 1, u64> multisample; + BitField<35, 1, u64> aoffi; + BitField<54, 1, u64> clamp; + BitField<51, 3, IR::Pred> sparse_pred; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> coord_reg; + BitField<20, 8, IR::Reg> meta_reg; + BitField<28, 3, TextureType> type; + BitField<31, 4, u64> mask; + BitField<36, 13, u64> cbuf_offset; + } const tld{insn}; + + const IR::Value coords{MakeCoords(v, tld.coord_reg, tld.type)}; + + IR::Reg meta_reg{tld.meta_reg}; + IR::Value handle; + IR::Value offset; + IR::U32 lod; + IR::U32 multisample; + if (is_bindless) { + handle = v.X(meta_reg++); + } else { + handle = v.ir.Imm32(static_cast(tld.cbuf_offset.Value() * 4)); + } + if (tld.lod != 0) { + lod = v.X(meta_reg++); + } else { + lod = v.ir.Imm32(0U); + } + if (tld.aoffi != 0) { + offset = MakeOffset(v, meta_reg, tld.type); + } + if (tld.multisample != 0) { + multisample = v.X(meta_reg++); + } + if (tld.clamp != 0) { + throw NotImplementedException("TLD.CL - CLAMP is not implmented"); + } + IR::TextureInstInfo info{}; + info.type.Assign(GetType(tld.type)); + const IR::Value sample{v.ir.ImageFetch(handle, coords, offset, lod, multisample, info)}; + + IR::Reg dest_reg{tld.dest_reg}; + for (size_t element = 0; element < 4; ++element) { + if (((tld.mask >> element) & 1) == 0) { + continue; + } + v.F(dest_reg, IR::F32{v.ir.CompositeExtract(sample, element)}); + ++dest_reg; + } + if (tld.sparse_pred != IR::Pred::PT) { + v.ir.SetPred(tld.sparse_pred, v.ir.LogicalNot(v.ir.GetSparseFromOp(sample))); + } +} +} // Anonymous namespace + +void TranslatorVisitor::TLD(u64 insn) { + Impl(*this, insn, false); +} + +void TranslatorVisitor::TLD_b(u64 insn) { + Impl(*this, insn, true); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/texture_load_swizzled.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_load_swizzled.cpp new file mode 100755 index 000000000..5dd7e31b2 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_load_swizzled.cpp @@ -0,0 +1,242 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Precision : u64 { + F16, + F32, +}; + +constexpr unsigned R = 1; +constexpr unsigned G = 2; +constexpr unsigned B = 4; +constexpr unsigned A = 8; + +constexpr std::array RG_LUT{ + R, // + G, // + B, // + A, // + R | G, // + R | A, // + G | A, // + B | A, // +}; + +constexpr std::array RGBA_LUT{ + R | G | B, // + R | G | A, // + R | B | A, // + G | B | A, // + R | G | B | A, // +}; + +union Encoding { + u64 raw; + BitField<59, 1, Precision> precision; + BitField<54, 1, u64> aoffi; + BitField<53, 1, u64> lod; + BitField<55, 1, u64> ms; + BitField<49, 1, u64> nodep; + BitField<28, 8, IR::Reg> dest_reg_b; + BitField<0, 8, IR::Reg> dest_reg_a; + BitField<8, 8, IR::Reg> src_reg_a; + BitField<20, 8, IR::Reg> src_reg_b; + BitField<36, 13, u64> cbuf_offset; + BitField<50, 3, u64> swizzle; + BitField<53, 4, u64> encoding; +}; + +void CheckAlignment(IR::Reg reg, size_t alignment) { + if (!IR::IsAligned(reg, alignment)) { + throw NotImplementedException("Unaligned source register {}", reg); + } +} + +IR::Value MakeOffset(TranslatorVisitor& v, IR::Reg reg) { + const IR::U32 value{v.X(reg)}; + return v.ir.CompositeConstruct(v.ir.BitFieldExtract(value, v.ir.Imm32(0), v.ir.Imm32(4), true), + v.ir.BitFieldExtract(value, v.ir.Imm32(4), v.ir.Imm32(4), true)); +} + +IR::Value Sample(TranslatorVisitor& v, u64 insn) { + const Encoding tlds{insn}; + const IR::U32 handle{v.ir.Imm32(static_cast(tlds.cbuf_offset * 4))}; + const IR::Reg reg_a{tlds.src_reg_a}; + const IR::Reg reg_b{tlds.src_reg_b}; + IR::Value coords; + IR::U32 lod{v.ir.Imm32(0U)}; + IR::Value offsets; + IR::U32 multisample; + Shader::TextureType texture_type{}; + switch (tlds.encoding) { + case 0: + texture_type = Shader::TextureType::Color1D; + coords = v.X(reg_a); + break; + case 1: + texture_type = Shader::TextureType::Color1D; + coords = v.X(reg_a); + lod = v.X(reg_b); + break; + case 2: + texture_type = Shader::TextureType::Color2D; + coords = v.ir.CompositeConstruct(v.X(reg_a), v.X(reg_b)); + break; + case 4: + CheckAlignment(reg_a, 2); + texture_type = Shader::TextureType::Color2D; + coords = v.ir.CompositeConstruct(v.X(reg_a), v.X(reg_a + 1)); + offsets = MakeOffset(v, reg_b); + break; + case 5: + CheckAlignment(reg_a, 2); + texture_type = Shader::TextureType::Color2D; + coords = v.ir.CompositeConstruct(v.X(reg_a), v.X(reg_a + 1)); + lod = v.X(reg_b); + break; + case 6: + CheckAlignment(reg_a, 2); + texture_type = Shader::TextureType::Color2D; + coords = v.ir.CompositeConstruct(v.X(reg_a), v.X(reg_a + 1)); + multisample = v.X(reg_b); + break; + case 7: + CheckAlignment(reg_a, 2); + texture_type = Shader::TextureType::Color3D; + coords = v.ir.CompositeConstruct(v.X(reg_a), v.X(reg_a + 1), v.X(reg_b)); + break; + case 8: { + CheckAlignment(reg_b, 2); + const IR::U32 array{v.ir.BitFieldExtract(v.X(reg_a), v.ir.Imm32(0), v.ir.Imm32(16))}; + texture_type = Shader::TextureType::ColorArray2D; + coords = v.ir.CompositeConstruct(v.X(reg_b), v.X(reg_b + 1), array); + break; + } + case 12: + CheckAlignment(reg_a, 2); + CheckAlignment(reg_b, 2); + texture_type = Shader::TextureType::Color2D; + coords = v.ir.CompositeConstruct(v.X(reg_a), v.X(reg_a + 1)); + lod = v.X(reg_b); + offsets = MakeOffset(v, reg_b + 1); + break; + default: + throw NotImplementedException("Illegal encoding {}", tlds.encoding.Value()); + } + IR::TextureInstInfo info{}; + if (tlds.precision == Precision::F16) { + info.relaxed_precision.Assign(1); + } + info.type.Assign(texture_type); + return v.ir.ImageFetch(handle, coords, offsets, lod, multisample, info); +} + +unsigned Swizzle(u64 insn) { + const Encoding tlds{insn}; + const size_t encoding{tlds.swizzle}; + if (tlds.dest_reg_b == IR::Reg::RZ) { + if (encoding >= RG_LUT.size()) { + throw NotImplementedException("Illegal RG encoding {}", encoding); + } + return RG_LUT[encoding]; + } else { + if (encoding >= RGBA_LUT.size()) { + throw NotImplementedException("Illegal RGBA encoding {}", encoding); + } + return RGBA_LUT[encoding]; + } +} + +IR::F32 Extract(TranslatorVisitor& v, const IR::Value& sample, unsigned component) { + return IR::F32{v.ir.CompositeExtract(sample, component)}; +} + +IR::Reg RegStoreComponent32(u64 insn, unsigned index) { + const Encoding tlds{insn}; + switch (index) { + case 0: + return tlds.dest_reg_a; + case 1: + CheckAlignment(tlds.dest_reg_a, 2); + return tlds.dest_reg_a + 1; + case 2: + return tlds.dest_reg_b; + case 3: + CheckAlignment(tlds.dest_reg_b, 2); + return tlds.dest_reg_b + 1; + } + throw LogicError("Invalid store index {}", index); +} + +void Store32(TranslatorVisitor& v, u64 insn, const IR::Value& sample) { + const unsigned swizzle{Swizzle(insn)}; + unsigned store_index{0}; + for (unsigned component = 0; component < 4; ++component) { + if (((swizzle >> component) & 1) == 0) { + continue; + } + const IR::Reg dest{RegStoreComponent32(insn, store_index)}; + v.F(dest, Extract(v, sample, component)); + ++store_index; + } +} + +IR::U32 Pack(TranslatorVisitor& v, const IR::F32& lhs, const IR::F32& rhs) { + return v.ir.PackHalf2x16(v.ir.CompositeConstruct(lhs, rhs)); +} + +void Store16(TranslatorVisitor& v, u64 insn, const IR::Value& sample) { + const unsigned swizzle{Swizzle(insn)}; + unsigned store_index{0}; + std::array swizzled; + for (unsigned component = 0; component < 4; ++component) { + if (((swizzle >> component) & 1) == 0) { + continue; + } + swizzled[store_index] = Extract(v, sample, component); + ++store_index; + } + const IR::F32 zero{v.ir.Imm32(0.0f)}; + const Encoding tlds{insn}; + switch (store_index) { + case 1: + v.X(tlds.dest_reg_a, Pack(v, swizzled[0], zero)); + break; + case 2: + case 3: + case 4: + v.X(tlds.dest_reg_a, Pack(v, swizzled[0], swizzled[1])); + switch (store_index) { + case 2: + break; + case 3: + v.X(tlds.dest_reg_b, Pack(v, swizzled[2], zero)); + break; + case 4: + v.X(tlds.dest_reg_b, Pack(v, swizzled[2], swizzled[3])); + break; + } + break; + } +} +} // Anonymous namespace + +void TranslatorVisitor::TLDS(u64 insn) { + const IR::Value sample{Sample(*this, insn)}; + if (Encoding{insn}.precision == Precision::F32) { + Store32(*this, insn, sample); + } else { + Store16(*this, insn, sample); + } +} +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/texture_mipmap_level.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_mipmap_level.cpp new file mode 100755 index 000000000..aea3c0e62 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_mipmap_level.cpp @@ -0,0 +1,131 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { + +enum class TextureType : u64 { + _1D, + ARRAY_1D, + _2D, + ARRAY_2D, + _3D, + ARRAY_3D, + CUBE, + ARRAY_CUBE, +}; + +Shader::TextureType GetType(TextureType type) { + switch (type) { + case TextureType::_1D: + return Shader::TextureType::Color1D; + case TextureType::ARRAY_1D: + return Shader::TextureType::ColorArray1D; + case TextureType::_2D: + return Shader::TextureType::Color2D; + case TextureType::ARRAY_2D: + return Shader::TextureType::ColorArray2D; + case TextureType::_3D: + return Shader::TextureType::Color3D; + case TextureType::ARRAY_3D: + throw NotImplementedException("3D array texture type"); + case TextureType::CUBE: + return Shader::TextureType::ColorCube; + case TextureType::ARRAY_CUBE: + return Shader::TextureType::ColorArrayCube; + } + throw NotImplementedException("Invalid texture type {}", type); +} + +IR::Value MakeCoords(TranslatorVisitor& v, IR::Reg reg, TextureType type) { + // The ISA reads an array component here, but this is not needed on high level shading languages + // We are dropping this information. + switch (type) { + case TextureType::_1D: + return v.F(reg); + case TextureType::ARRAY_1D: + return v.F(reg + 1); + case TextureType::_2D: + return v.ir.CompositeConstruct(v.F(reg), v.F(reg + 1)); + case TextureType::ARRAY_2D: + return v.ir.CompositeConstruct(v.F(reg + 1), v.F(reg + 2)); + case TextureType::_3D: + return v.ir.CompositeConstruct(v.F(reg), v.F(reg + 1), v.F(reg + 2)); + case TextureType::ARRAY_3D: + throw NotImplementedException("3D array texture type"); + case TextureType::CUBE: + return v.ir.CompositeConstruct(v.F(reg), v.F(reg + 1), v.F(reg + 2)); + case TextureType::ARRAY_CUBE: + return v.ir.CompositeConstruct(v.F(reg + 1), v.F(reg + 2), v.F(reg + 3)); + } + throw NotImplementedException("Invalid texture type {}", type); +} + +void Impl(TranslatorVisitor& v, u64 insn, bool is_bindless) { + union { + u64 raw; + BitField<49, 1, u64> nodep; + BitField<35, 1, u64> ndv; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> coord_reg; + BitField<20, 8, IR::Reg> meta_reg; + BitField<28, 3, TextureType> type; + BitField<31, 4, u64> mask; + BitField<36, 13, u64> cbuf_offset; + } const tmml{insn}; + + if ((tmml.mask & 0b1100) != 0) { + throw NotImplementedException("TMML BA results are not implmented"); + } + const IR::Value coords{MakeCoords(v, tmml.coord_reg, tmml.type)}; + + IR::U32 handle; + IR::Reg meta_reg{tmml.meta_reg}; + if (is_bindless) { + handle = v.X(meta_reg++); + } else { + handle = v.ir.Imm32(static_cast(tmml.cbuf_offset.Value() * 4)); + } + IR::TextureInstInfo info{}; + info.type.Assign(GetType(tmml.type)); + const IR::Value sample{v.ir.ImageQueryLod(handle, coords, info)}; + + IR::Reg dest_reg{tmml.dest_reg}; + for (size_t element = 0; element < 4; ++element) { + if (((tmml.mask >> element) & 1) == 0) { + continue; + } + IR::F32 value{v.ir.CompositeExtract(sample, element)}; + if (element < 2) { + IR::U32 casted_value; + if (element == 0) { + casted_value = v.ir.ConvertFToU(32, value); + } else { + casted_value = v.ir.ConvertFToS(16, value); + } + v.X(dest_reg, v.ir.ShiftLeftLogical(casted_value, v.ir.Imm32(8))); + } else { + v.F(dest_reg, value); + } + ++dest_reg; + } +} +} // Anonymous namespace + +void TranslatorVisitor::TMML(u64 insn) { + Impl(*this, insn, false); +} + +void TranslatorVisitor::TMML_b(u64 insn) { + Impl(*this, insn, true); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/texture_query.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_query.cpp new file mode 100755 index 000000000..0459e5473 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/texture_query.cpp @@ -0,0 +1,76 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class Mode : u64 { + Dimension = 1, + TextureType = 2, + SamplePos = 5, +}; + +IR::Value Query(TranslatorVisitor& v, const IR::U32& handle, Mode mode, IR::Reg src_reg) { + switch (mode) { + case Mode::Dimension: { + const IR::U32 lod{v.X(src_reg)}; + return v.ir.ImageQueryDimension(handle, lod); + } + case Mode::TextureType: + case Mode::SamplePos: + default: + throw NotImplementedException("Mode {}", mode); + } +} + +void Impl(TranslatorVisitor& v, u64 insn, std::optional cbuf_offset) { + union { + u64 raw; + BitField<49, 1, u64> nodep; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<22, 3, Mode> mode; + BitField<31, 4, u64> mask; + } const txq{insn}; + + IR::Reg src_reg{txq.src_reg}; + IR::U32 handle; + if (cbuf_offset) { + handle = v.ir.Imm32(*cbuf_offset); + } else { + handle = v.X(src_reg); + ++src_reg; + } + const IR::Value query{Query(v, handle, txq.mode, src_reg)}; + IR::Reg dest_reg{txq.dest_reg}; + for (int element = 0; element < 4; ++element) { + if (((txq.mask >> element) & 1) == 0) { + continue; + } + v.X(dest_reg, IR::U32{v.ir.CompositeExtract(query, static_cast(element))}); + ++dest_reg; + } +} +} // Anonymous namespace + +void TranslatorVisitor::TXQ(u64 insn) { + union { + u64 raw; + BitField<36, 13, u64> cbuf_offset; + } const txq{insn}; + + Impl(*this, insn, static_cast(txq.cbuf_offset * 4)); +} + +void TranslatorVisitor::TXQ_b(u64 insn) { + Impl(*this, insn, std::nullopt); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/video_helper.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/video_helper.cpp new file mode 100755 index 000000000..e1f4174cf --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/video_helper.cpp @@ -0,0 +1,30 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/video_helper.h" + +namespace Shader::Maxwell { + +IR::U32 ExtractVideoOperandValue(IR::IREmitter& ir, const IR::U32& value, VideoWidth width, + u32 selector, bool is_signed) { + switch (width) { + case VideoWidth::Byte: + case VideoWidth::Unknown: + return ir.BitFieldExtract(value, ir.Imm32(selector * 8), ir.Imm32(8), is_signed); + case VideoWidth::Short: + return ir.BitFieldExtract(value, ir.Imm32(selector * 16), ir.Imm32(16), is_signed); + case VideoWidth::Word: + return value; + default: + throw NotImplementedException("Unknown VideoWidth {}", width); + } +} + +VideoWidth GetVideoSourceWidth(VideoWidth width, bool is_immediate) { + // immediates must be 16-bit format. + return is_immediate ? VideoWidth::Short : width; +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/video_helper.h b/src/shader_recompiler/frontend/maxwell/translate/impl/video_helper.h new file mode 100755 index 000000000..40c0b907c --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/video_helper.h @@ -0,0 +1,23 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +enum class VideoWidth : u64 { + Byte, + Unknown, + Short, + Word, +}; + +[[nodiscard]] IR::U32 ExtractVideoOperandValue(IR::IREmitter& ir, const IR::U32& value, + VideoWidth width, u32 selector, bool is_signed); + +[[nodiscard]] VideoWidth GetVideoSourceWidth(VideoWidth width, bool is_immediate); + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/video_minimum_maximum.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/video_minimum_maximum.cpp new file mode 100755 index 000000000..78869601f --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/video_minimum_maximum.cpp @@ -0,0 +1,92 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/video_helper.h" + +namespace Shader::Maxwell { +namespace { +enum class VideoMinMaxOps : u64 { + MRG_16H, + MRG_16L, + MRG_8B0, + MRG_8B2, + ACC, + MIN, + MAX, +}; + +[[nodiscard]] IR::U32 ApplyVideoMinMaxOp(IR::IREmitter& ir, const IR::U32& lhs, const IR::U32& rhs, + VideoMinMaxOps op, bool is_signed) { + switch (op) { + case VideoMinMaxOps::MIN: + return ir.IMin(lhs, rhs, is_signed); + case VideoMinMaxOps::MAX: + return ir.IMax(lhs, rhs, is_signed); + default: + throw NotImplementedException("VMNMX op {}", op); + } +} +} // Anonymous namespace + +void TranslatorVisitor::VMNMX(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<20, 16, u64> src_b_imm; + BitField<28, 2, u64> src_b_selector; + BitField<29, 2, VideoWidth> src_b_width; + BitField<36, 2, u64> src_a_selector; + BitField<37, 2, VideoWidth> src_a_width; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> src_a_sign; + BitField<49, 1, u64> src_b_sign; + BitField<50, 1, u64> is_src_b_reg; + BitField<51, 3, VideoMinMaxOps> op; + BitField<54, 1, u64> dest_sign; + BitField<55, 1, u64> sat; + BitField<56, 1, u64> mx; + } const vmnmx{insn}; + + if (vmnmx.cc != 0) { + throw NotImplementedException("VMNMX CC"); + } + if (vmnmx.sat != 0) { + throw NotImplementedException("VMNMX SAT"); + } + // Selectors were shown to default to 2 in unit tests + if (vmnmx.src_a_selector != 2) { + throw NotImplementedException("VMNMX Selector {}", vmnmx.src_a_selector.Value()); + } + if (vmnmx.src_b_selector != 2) { + throw NotImplementedException("VMNMX Selector {}", vmnmx.src_b_selector.Value()); + } + if (vmnmx.src_a_width != VideoWidth::Word) { + throw NotImplementedException("VMNMX Source Width {}", vmnmx.src_a_width.Value()); + } + + const bool is_b_imm{vmnmx.is_src_b_reg == 0}; + const IR::U32 src_a{GetReg8(insn)}; + const IR::U32 src_b{is_b_imm ? ir.Imm32(static_cast(vmnmx.src_b_imm)) : GetReg20(insn)}; + const IR::U32 src_c{GetReg39(insn)}; + + const VideoWidth a_width{vmnmx.src_a_width}; + const VideoWidth b_width{GetVideoSourceWidth(vmnmx.src_b_width, is_b_imm)}; + + const bool src_a_signed{vmnmx.src_a_sign != 0}; + const bool src_b_signed{vmnmx.src_b_sign != 0}; + const IR::U32 op_a{ExtractVideoOperandValue(ir, src_a, a_width, 0, src_a_signed)}; + const IR::U32 op_b{ExtractVideoOperandValue(ir, src_b, b_width, 0, src_b_signed)}; + + // First operation's sign is only dependent on operand b's sign + const bool op_1_signed{src_b_signed}; + + const IR::U32 lhs{vmnmx.mx != 0 ? ir.IMax(op_a, op_b, op_1_signed) + : ir.IMin(op_a, op_b, op_1_signed)}; + X(vmnmx.dest_reg, ApplyVideoMinMaxOp(ir, lhs, src_c, vmnmx.op, vmnmx.dest_sign != 0)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/video_multiply_add.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/video_multiply_add.cpp new file mode 100755 index 000000000..cc2e6d6e6 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/video_multiply_add.cpp @@ -0,0 +1,64 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/video_helper.h" + +namespace Shader::Maxwell { +void TranslatorVisitor::VMAD(u64 insn) { + union { + u64 raw; + BitField<0, 8, IR::Reg> dest_reg; + BitField<20, 16, u64> src_b_imm; + BitField<28, 2, u64> src_b_selector; + BitField<29, 2, VideoWidth> src_b_width; + BitField<36, 2, u64> src_a_selector; + BitField<37, 2, VideoWidth> src_a_width; + BitField<47, 1, u64> cc; + BitField<48, 1, u64> src_a_sign; + BitField<49, 1, u64> src_b_sign; + BitField<50, 1, u64> is_src_b_reg; + BitField<51, 2, u64> scale; + BitField<53, 1, u64> src_c_neg; + BitField<54, 1, u64> src_a_neg; + BitField<55, 1, u64> sat; + } const vmad{insn}; + + if (vmad.cc != 0) { + throw NotImplementedException("VMAD CC"); + } + if (vmad.sat != 0) { + throw NotImplementedException("VMAD SAT"); + } + if (vmad.scale != 0) { + throw NotImplementedException("VMAD SCALE"); + } + if (vmad.src_a_neg != 0 && vmad.src_c_neg != 0) { + throw NotImplementedException("VMAD PO"); + } + if (vmad.src_a_neg != 0 || vmad.src_c_neg != 0) { + throw NotImplementedException("VMAD NEG"); + } + const bool is_b_imm{vmad.is_src_b_reg == 0}; + const IR::U32 src_a{GetReg8(insn)}; + const IR::U32 src_b{is_b_imm ? ir.Imm32(static_cast(vmad.src_b_imm)) : GetReg20(insn)}; + const IR::U32 src_c{GetReg39(insn)}; + + const u32 a_selector{static_cast(vmad.src_a_selector)}; + // Immediate values can't have a selector + const u32 b_selector{is_b_imm ? 0U : static_cast(vmad.src_b_selector)}; + const VideoWidth a_width{vmad.src_a_width}; + const VideoWidth b_width{GetVideoSourceWidth(vmad.src_b_width, is_b_imm)}; + + const bool src_a_signed{vmad.src_a_sign != 0}; + const bool src_b_signed{vmad.src_b_sign != 0}; + const IR::U32 op_a{ExtractVideoOperandValue(ir, src_a, a_width, a_selector, src_a_signed)}; + const IR::U32 op_b{ExtractVideoOperandValue(ir, src_b, b_width, b_selector, src_b_signed)}; + + X(vmad.dest_reg, ir.IAdd(ir.IMul(op_a, op_b), src_c)); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/video_set_predicate.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/video_set_predicate.cpp new file mode 100755 index 000000000..1b66abc33 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/video_set_predicate.cpp @@ -0,0 +1,92 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/common_types.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/common_funcs.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/video_helper.h" + +namespace Shader::Maxwell { +namespace { +enum class VsetpCompareOp : u64 { + False = 0, + LessThan, + Equal, + LessThanEqual, + GreaterThan = 16, + NotEqual, + GreaterThanEqual, + True, +}; + +CompareOp VsetpToShaderCompareOp(VsetpCompareOp op) { + switch (op) { + case VsetpCompareOp::False: + return CompareOp::False; + case VsetpCompareOp::LessThan: + return CompareOp::LessThan; + case VsetpCompareOp::Equal: + return CompareOp::Equal; + case VsetpCompareOp::LessThanEqual: + return CompareOp::LessThanEqual; + case VsetpCompareOp::GreaterThan: + return CompareOp::GreaterThan; + case VsetpCompareOp::NotEqual: + return CompareOp::NotEqual; + case VsetpCompareOp::GreaterThanEqual: + return CompareOp::GreaterThanEqual; + case VsetpCompareOp::True: + return CompareOp::True; + default: + throw NotImplementedException("Invalid compare op {}", op); + } +} +} // Anonymous namespace + +void TranslatorVisitor::VSETP(u64 insn) { + union { + u64 raw; + BitField<0, 3, IR::Pred> dest_pred_b; + BitField<3, 3, IR::Pred> dest_pred_a; + BitField<20, 16, u64> src_b_imm; + BitField<28, 2, u64> src_b_selector; + BitField<29, 2, VideoWidth> src_b_width; + BitField<36, 2, u64> src_a_selector; + BitField<37, 2, VideoWidth> src_a_width; + BitField<39, 3, IR::Pred> bop_pred; + BitField<42, 1, u64> neg_bop_pred; + BitField<43, 5, VsetpCompareOp> compare_op; + BitField<45, 2, BooleanOp> bop; + BitField<48, 1, u64> src_a_sign; + BitField<49, 1, u64> src_b_sign; + BitField<50, 1, u64> is_src_b_reg; + } const vsetp{insn}; + + const bool is_b_imm{vsetp.is_src_b_reg == 0}; + const IR::U32 src_a{GetReg8(insn)}; + const IR::U32 src_b{is_b_imm ? ir.Imm32(static_cast(vsetp.src_b_imm)) : GetReg20(insn)}; + + const u32 a_selector{static_cast(vsetp.src_a_selector)}; + const u32 b_selector{static_cast(vsetp.src_b_selector)}; + const VideoWidth a_width{vsetp.src_a_width}; + const VideoWidth b_width{GetVideoSourceWidth(vsetp.src_b_width, is_b_imm)}; + + const bool src_a_signed{vsetp.src_a_sign != 0}; + const bool src_b_signed{vsetp.src_b_sign != 0}; + const IR::U32 op_a{ExtractVideoOperandValue(ir, src_a, a_width, a_selector, src_a_signed)}; + const IR::U32 op_b{ExtractVideoOperandValue(ir, src_b, b_width, b_selector, src_b_signed)}; + + // Compare operation's sign is only dependent on operand b's sign + const bool compare_signed{src_b_signed}; + const CompareOp compare_op{VsetpToShaderCompareOp(vsetp.compare_op)}; + const IR::U1 comparison{IntegerCompare(ir, op_a, op_b, compare_op, compare_signed)}; + const IR::U1 bop_pred{ir.GetPred(vsetp.bop_pred, vsetp.neg_bop_pred != 0)}; + const IR::U1 result_a{PredicateCombine(ir, comparison, bop_pred, vsetp.bop)}; + const IR::U1 result_b{PredicateCombine(ir, ir.LogicalNot(comparison), bop_pred, vsetp.bop)}; + ir.SetPred(vsetp.dest_pred_a, result_a); + ir.SetPred(vsetp.dest_pred_b, result_b); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/vote.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/vote.cpp new file mode 100755 index 000000000..7ce370f09 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/vote.cpp @@ -0,0 +1,54 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class VoteOp : u64 { + ALL, + ANY, + EQ, +}; + +[[nodiscard]] IR::U1 VoteOperation(IR::IREmitter& ir, const IR::U1& pred, VoteOp vote_op) { + switch (vote_op) { + case VoteOp::ALL: + return ir.VoteAll(pred); + case VoteOp::ANY: + return ir.VoteAny(pred); + case VoteOp::EQ: + return ir.VoteEqual(pred); + default: + throw NotImplementedException("Invalid VOTE op {}", vote_op); + } +} + +void Vote(TranslatorVisitor& v, u64 insn) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<39, 3, IR::Pred> pred_a; + BitField<42, 1, u64> neg_pred_a; + BitField<45, 3, IR::Pred> pred_b; + BitField<48, 2, VoteOp> vote_op; + } const vote{insn}; + + const IR::U1 vote_pred{v.ir.GetPred(vote.pred_a, vote.neg_pred_a != 0)}; + v.ir.SetPred(vote.pred_b, VoteOperation(v.ir, vote_pred, vote.vote_op)); + v.X(vote.dest_reg, v.ir.SubgroupBallot(vote_pred)); +} +} // Anonymous namespace + +void TranslatorVisitor::VOTE(u64 insn) { + Vote(*this, insn); +} + +void TranslatorVisitor::VOTE_vtg(u64) { + LOG_WARNING(Shader, "(STUBBED) called"); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/impl/warp_shuffle.cpp b/src/shader_recompiler/frontend/maxwell/translate/impl/warp_shuffle.cpp new file mode 100755 index 000000000..550fed55c --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/impl/warp_shuffle.cpp @@ -0,0 +1,69 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/bit_field.h" +#include "common/common_types.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" + +namespace Shader::Maxwell { +namespace { +enum class ShuffleMode : u64 { + IDX, + UP, + DOWN, + BFLY, +}; + +[[nodiscard]] IR::U32 ShuffleOperation(IR::IREmitter& ir, const IR::U32& value, + const IR::U32& index, const IR::U32& mask, + ShuffleMode shfl_op) { + const IR::U32 clamp{ir.BitFieldExtract(mask, ir.Imm32(0), ir.Imm32(5))}; + const IR::U32 seg_mask{ir.BitFieldExtract(mask, ir.Imm32(8), ir.Imm32(5))}; + switch (shfl_op) { + case ShuffleMode::IDX: + return ir.ShuffleIndex(value, index, clamp, seg_mask); + case ShuffleMode::UP: + return ir.ShuffleUp(value, index, clamp, seg_mask); + case ShuffleMode::DOWN: + return ir.ShuffleDown(value, index, clamp, seg_mask); + case ShuffleMode::BFLY: + return ir.ShuffleButterfly(value, index, clamp, seg_mask); + default: + throw NotImplementedException("Invalid SHFL op {}", shfl_op); + } +} + +void Shuffle(TranslatorVisitor& v, u64 insn, const IR::U32& index, const IR::U32& mask) { + union { + u64 insn; + BitField<0, 8, IR::Reg> dest_reg; + BitField<8, 8, IR::Reg> src_reg; + BitField<30, 2, ShuffleMode> mode; + BitField<48, 3, IR::Pred> pred; + } const shfl{insn}; + + const IR::U32 result{ShuffleOperation(v.ir, v.X(shfl.src_reg), index, mask, shfl.mode)}; + v.ir.SetPred(shfl.pred, v.ir.GetInBoundsFromOp(result)); + v.X(shfl.dest_reg, result); +} +} // Anonymous namespace + +void TranslatorVisitor::SHFL(u64 insn) { + union { + u64 insn; + BitField<20, 5, u64> src_a_imm; + BitField<28, 1, u64> src_a_flag; + BitField<29, 1, u64> src_b_flag; + BitField<34, 13, u64> src_b_imm; + } const flags{insn}; + const IR::U32 src_a{flags.src_a_flag != 0 ? ir.Imm32(static_cast(flags.src_a_imm)) + : GetReg20(insn)}; + const IR::U32 src_b{flags.src_b_flag != 0 ? ir.Imm32(static_cast(flags.src_b_imm)) + : GetReg39(insn)}; + Shuffle(*this, insn, src_a, src_b); +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/translate.cpp b/src/shader_recompiler/frontend/maxwell/translate/translate.cpp new file mode 100755 index 000000000..8e3c4c5d5 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/translate.cpp @@ -0,0 +1,52 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/maxwell/decode.h" +#include "shader_recompiler/frontend/maxwell/location.h" +#include "shader_recompiler/frontend/maxwell/translate/impl/impl.h" +#include "shader_recompiler/frontend/maxwell/translate/translate.h" + +namespace Shader::Maxwell { + +template +static void Invoke(TranslatorVisitor& visitor, Location pc, u64 insn) { + using MethodType = decltype(method); + if constexpr (std::is_invocable_r_v) { + (visitor.*method)(pc, insn); + } else if constexpr (std::is_invocable_r_v) { + (visitor.*method)(insn); + } else { + (visitor.*method)(); + } +} + +void Translate(Environment& env, IR::Block* block, u32 location_begin, u32 location_end) { + if (location_begin == location_end) { + return; + } + TranslatorVisitor visitor{env, *block}; + for (Location pc = location_begin; pc != location_end; ++pc) { + const u64 insn{env.ReadInstruction(pc.Offset())}; + try { + const Opcode opcode{Decode(insn)}; + switch (opcode) { +#define INST(name, cute, mask) \ + case Opcode::name: \ + Invoke<&TranslatorVisitor::name>(visitor, pc, insn); \ + break; +#include "shader_recompiler/frontend/maxwell/maxwell.inc" +#undef OPCODE + default: + throw LogicError("Invalid opcode {}", opcode); + } + } catch (Exception& exception) { + exception.Prepend(fmt::format("Translate {}: ", Decode(insn))); + throw; + } + } +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate/translate.h b/src/shader_recompiler/frontend/maxwell/translate/translate.h new file mode 100755 index 000000000..a3edd2e46 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate/translate.h @@ -0,0 +1,14 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/basic_block.h" + +namespace Shader::Maxwell { + +void Translate(Environment& env, IR::Block* block, u32 location_begin, u32 location_end); + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate_program.cpp b/src/shader_recompiler/frontend/maxwell/translate_program.cpp new file mode 100755 index 000000000..2bb1d24a4 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate_program.cpp @@ -0,0 +1,216 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include + +#include "common/settings.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/post_order.h" +#include "shader_recompiler/frontend/maxwell/structured_control_flow.h" +#include "shader_recompiler/frontend/maxwell/translate/translate.h" +#include "shader_recompiler/frontend/maxwell/translate_program.h" +#include "shader_recompiler/host_translate_info.h" +#include "shader_recompiler/ir_opt/passes.h" + +namespace Shader::Maxwell { +namespace { +IR::BlockList GenerateBlocks(const IR::AbstractSyntaxList& syntax_list) { + auto syntax_blocks{syntax_list | std::views::filter([](const auto& node) { + return node.type == IR::AbstractSyntaxNode::Type::Block; + })}; + IR::BlockList blocks(std::ranges::distance(syntax_blocks)); + std::ranges::transform(syntax_blocks, blocks.begin(), + [](const IR::AbstractSyntaxNode& node) { return node.data.block; }); + return blocks; +} + +void RemoveUnreachableBlocks(IR::Program& program) { + // Some blocks might be unreachable if a function call exists unconditionally + // If this happens the number of blocks and post order blocks will mismatch + if (program.blocks.size() == program.post_order_blocks.size()) { + return; + } + const auto begin{program.blocks.begin() + 1}; + const auto end{program.blocks.end()}; + const auto pred{[](IR::Block* block) { return block->ImmPredecessors().empty(); }}; + program.blocks.erase(std::remove_if(begin, end, pred), end); +} + +void CollectInterpolationInfo(Environment& env, IR::Program& program) { + if (program.stage != Stage::Fragment) { + return; + } + const ProgramHeader& sph{env.SPH()}; + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + std::optional imap; + for (const PixelImap value : sph.ps.GenericInputMap(static_cast(index))) { + if (value == PixelImap::Unused) { + continue; + } + if (imap && imap != value) { + throw NotImplementedException("Per component interpolation"); + } + imap = value; + } + if (!imap) { + continue; + } + program.info.interpolation[index] = [&] { + switch (*imap) { + case PixelImap::Unused: + case PixelImap::Perspective: + return Interpolation::Smooth; + case PixelImap::Constant: + return Interpolation::Flat; + case PixelImap::ScreenLinear: + return Interpolation::NoPerspective; + } + throw NotImplementedException("Unknown interpolation {}", *imap); + }(); + } +} + +void AddNVNStorageBuffers(IR::Program& program) { + if (!program.info.uses_global_memory) { + return; + } + const u32 driver_cbuf{0}; + const u32 descriptor_size{0x10}; + const u32 num_buffers{16}; + const u32 base{[&] { + switch (program.stage) { + case Stage::VertexA: + case Stage::VertexB: + return 0x110u; + case Stage::TessellationControl: + return 0x210u; + case Stage::TessellationEval: + return 0x310u; + case Stage::Geometry: + return 0x410u; + case Stage::Fragment: + return 0x510u; + case Stage::Compute: + return 0x310u; + } + throw InvalidArgument("Invalid stage {}", program.stage); + }()}; + auto& descs{program.info.storage_buffers_descriptors}; + for (u32 index = 0; index < num_buffers; ++index) { + if (!program.info.nvn_buffer_used[index]) { + continue; + } + const u32 offset{base + index * descriptor_size}; + const auto it{std::ranges::find(descs, offset, &StorageBufferDescriptor::cbuf_offset)}; + if (it != descs.end()) { + it->is_written |= program.info.stores_global_memory; + continue; + } + descs.push_back({ + .cbuf_index = driver_cbuf, + .cbuf_offset = offset, + .count = 1, + .is_written = program.info.stores_global_memory, + }); + } +} +} // Anonymous namespace + +IR::Program TranslateProgram(ObjectPool& inst_pool, ObjectPool& block_pool, + Environment& env, Flow::CFG& cfg, const HostTranslateInfo& host_info) { + IR::Program program; + program.syntax_list = BuildASL(inst_pool, block_pool, env, cfg); + program.blocks = GenerateBlocks(program.syntax_list); + program.post_order_blocks = PostOrder(program.syntax_list.front()); + program.stage = env.ShaderStage(); + program.local_memory_size = env.LocalMemorySize(); + switch (program.stage) { + case Stage::TessellationControl: { + const ProgramHeader& sph{env.SPH()}; + program.invocations = sph.common2.threads_per_input_primitive; + break; + } + case Stage::Geometry: { + const ProgramHeader& sph{env.SPH()}; + program.output_topology = sph.common3.output_topology; + program.output_vertices = sph.common4.max_output_vertices; + program.invocations = sph.common2.threads_per_input_primitive; + program.is_geometry_passthrough = sph.common0.geometry_passthrough != 0; + if (program.is_geometry_passthrough) { + const auto& mask{env.GpPassthroughMask()}; + program.info.passthrough.mask |= ~std::bit_cast>(mask); + } + break; + } + case Stage::Compute: + program.workgroup_size = env.WorkgroupSize(); + program.shared_memory_size = env.SharedMemorySize(); + break; + default: + break; + } + RemoveUnreachableBlocks(program); + + // Replace instructions before the SSA rewrite + if (!host_info.support_float16) { + Optimization::LowerFp16ToFp32(program); + } + if (!host_info.support_int64) { + Optimization::LowerInt64ToInt32(program); + } + Optimization::SsaRewritePass(program); + + Optimization::GlobalMemoryToStorageBufferPass(program); + Optimization::TexturePass(env, program); + + Optimization::ConstantPropagationPass(program); + Optimization::DeadCodeEliminationPass(program); + if (Settings::values.renderer_debug) { + Optimization::VerificationPass(program); + } + Optimization::CollectShaderInfoPass(env, program); + CollectInterpolationInfo(env, program); + AddNVNStorageBuffers(program); + return program; +} + +IR::Program MergeDualVertexPrograms(IR::Program& vertex_a, IR::Program& vertex_b, + Environment& env_vertex_b) { + IR::Program result{}; + Optimization::VertexATransformPass(vertex_a); + Optimization::VertexBTransformPass(vertex_b); + for (const auto& term : vertex_a.syntax_list) { + if (term.type != IR::AbstractSyntaxNode::Type::Return) { + result.syntax_list.push_back(term); + } + } + result.syntax_list.insert(result.syntax_list.end(), vertex_b.syntax_list.begin(), + vertex_b.syntax_list.end()); + result.blocks = GenerateBlocks(result.syntax_list); + result.post_order_blocks = vertex_b.post_order_blocks; + for (const auto& block : vertex_a.post_order_blocks) { + result.post_order_blocks.push_back(block); + } + result.stage = Stage::VertexB; + result.info = vertex_a.info; + result.local_memory_size = std::max(vertex_a.local_memory_size, vertex_b.local_memory_size); + result.info.loads.mask |= vertex_b.info.loads.mask; + result.info.stores.mask |= vertex_b.info.stores.mask; + + Optimization::JoinTextureInfo(result.info, vertex_b.info); + Optimization::JoinStorageInfo(result.info, vertex_b.info); + Optimization::DeadCodeEliminationPass(result); + if (Settings::values.renderer_debug) { + Optimization::VerificationPass(result); + } + Optimization::CollectShaderInfoPass(env_vertex_b, result); + return result; +} + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/frontend/maxwell/translate_program.h b/src/shader_recompiler/frontend/maxwell/translate_program.h new file mode 100755 index 000000000..a84814811 --- /dev/null +++ b/src/shader_recompiler/frontend/maxwell/translate_program.h @@ -0,0 +1,23 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/frontend/maxwell/control_flow.h" +#include "shader_recompiler/host_translate_info.h" +#include "shader_recompiler/object_pool.h" + +namespace Shader::Maxwell { + +[[nodiscard]] IR::Program TranslateProgram(ObjectPool& inst_pool, + ObjectPool& block_pool, Environment& env, + Flow::CFG& cfg, const HostTranslateInfo& host_info); + +[[nodiscard]] IR::Program MergeDualVertexPrograms(IR::Program& vertex_a, IR::Program& vertex_b, + Environment& env_vertex_b); + +} // namespace Shader::Maxwell diff --git a/src/shader_recompiler/host_translate_info.h b/src/shader_recompiler/host_translate_info.h new file mode 100755 index 000000000..94a584219 --- /dev/null +++ b/src/shader_recompiler/host_translate_info.h @@ -0,0 +1,18 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +namespace Shader { + +// Try to keep entries here to a minimum +// They can accidentally change the cached information in a shader + +/// Misc information about the host +struct HostTranslateInfo { + bool support_float16{}; ///< True when the device supports 16-bit floats + bool support_int64{}; ///< True when the device supports 64-bit integers +}; + +} // namespace Shader diff --git a/src/shader_recompiler/ir_opt/collect_shader_info_pass.cpp b/src/shader_recompiler/ir_opt/collect_shader_info_pass.cpp new file mode 100755 index 000000000..5ead930f1 --- /dev/null +++ b/src/shader_recompiler/ir_opt/collect_shader_info_pass.cpp @@ -0,0 +1,928 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/alignment.h" +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/modifiers.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/ir_opt/passes.h" +#include "shader_recompiler/shader_info.h" + +namespace Shader::Optimization { +namespace { +void AddConstantBufferDescriptor(Info& info, u32 index, u32 count) { + if (count != 1) { + throw NotImplementedException("Constant buffer descriptor indexing"); + } + if ((info.constant_buffer_mask & (1U << index)) != 0) { + return; + } + info.constant_buffer_mask |= 1U << index; + + auto& cbufs{info.constant_buffer_descriptors}; + cbufs.insert(std::ranges::lower_bound(cbufs, index, {}, &ConstantBufferDescriptor::index), + ConstantBufferDescriptor{ + .index = index, + .count = 1, + }); +} + +void GetPatch(Info& info, IR::Patch patch) { + if (!IR::IsGeneric(patch)) { + throw NotImplementedException("Reading non-generic patch {}", patch); + } + info.uses_patches.at(IR::GenericPatchIndex(patch)) = true; +} + +void SetPatch(Info& info, IR::Patch patch) { + if (IR::IsGeneric(patch)) { + info.uses_patches.at(IR::GenericPatchIndex(patch)) = true; + return; + } + switch (patch) { + case IR::Patch::TessellationLodLeft: + case IR::Patch::TessellationLodTop: + case IR::Patch::TessellationLodRight: + case IR::Patch::TessellationLodBottom: + info.stores_tess_level_outer = true; + break; + case IR::Patch::TessellationLodInteriorU: + case IR::Patch::TessellationLodInteriorV: + info.stores_tess_level_inner = true; + break; + default: + throw NotImplementedException("Set patch {}", patch); + } +} + +void CheckCBufNVN(Info& info, IR::Inst& inst) { + const IR::Value cbuf_index{inst.Arg(0)}; + if (!cbuf_index.IsImmediate()) { + info.nvn_buffer_used.set(); + return; + } + const u32 index{cbuf_index.U32()}; + if (index != 0) { + return; + } + const IR::Value cbuf_offset{inst.Arg(1)}; + if (!cbuf_offset.IsImmediate()) { + info.nvn_buffer_used.set(); + return; + } + const u32 offset{cbuf_offset.U32()}; + const u32 descriptor_size{0x10}; + const u32 upper_limit{info.nvn_buffer_base + descriptor_size * 16}; + if (offset >= info.nvn_buffer_base && offset < upper_limit) { + const std::size_t nvn_index{(offset - info.nvn_buffer_base) / descriptor_size}; + info.nvn_buffer_used.set(nvn_index, true); + } +} + +void VisitUsages(Info& info, IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::CompositeConstructF16x2: + case IR::Opcode::CompositeConstructF16x3: + case IR::Opcode::CompositeConstructF16x4: + case IR::Opcode::CompositeExtractF16x2: + case IR::Opcode::CompositeExtractF16x3: + case IR::Opcode::CompositeExtractF16x4: + case IR::Opcode::CompositeInsertF16x2: + case IR::Opcode::CompositeInsertF16x3: + case IR::Opcode::CompositeInsertF16x4: + case IR::Opcode::SelectF16: + case IR::Opcode::BitCastU16F16: + case IR::Opcode::BitCastF16U16: + case IR::Opcode::PackFloat2x16: + case IR::Opcode::UnpackFloat2x16: + case IR::Opcode::ConvertS16F16: + case IR::Opcode::ConvertS32F16: + case IR::Opcode::ConvertS64F16: + case IR::Opcode::ConvertU16F16: + case IR::Opcode::ConvertU32F16: + case IR::Opcode::ConvertU64F16: + case IR::Opcode::ConvertF16S8: + case IR::Opcode::ConvertF16S16: + case IR::Opcode::ConvertF16S32: + case IR::Opcode::ConvertF16S64: + case IR::Opcode::ConvertF16U8: + case IR::Opcode::ConvertF16U16: + case IR::Opcode::ConvertF16U32: + case IR::Opcode::ConvertF16U64: + case IR::Opcode::FPAbs16: + case IR::Opcode::FPAdd16: + case IR::Opcode::FPCeil16: + case IR::Opcode::FPFloor16: + case IR::Opcode::FPFma16: + case IR::Opcode::FPMul16: + case IR::Opcode::FPNeg16: + case IR::Opcode::FPRoundEven16: + case IR::Opcode::FPSaturate16: + case IR::Opcode::FPClamp16: + case IR::Opcode::FPTrunc16: + case IR::Opcode::FPOrdEqual16: + case IR::Opcode::FPUnordEqual16: + case IR::Opcode::FPOrdNotEqual16: + case IR::Opcode::FPUnordNotEqual16: + case IR::Opcode::FPOrdLessThan16: + case IR::Opcode::FPUnordLessThan16: + case IR::Opcode::FPOrdGreaterThan16: + case IR::Opcode::FPUnordGreaterThan16: + case IR::Opcode::FPOrdLessThanEqual16: + case IR::Opcode::FPUnordLessThanEqual16: + case IR::Opcode::FPOrdGreaterThanEqual16: + case IR::Opcode::FPUnordGreaterThanEqual16: + case IR::Opcode::FPIsNan16: + case IR::Opcode::GlobalAtomicAddF16x2: + case IR::Opcode::GlobalAtomicMinF16x2: + case IR::Opcode::GlobalAtomicMaxF16x2: + case IR::Opcode::StorageAtomicAddF16x2: + case IR::Opcode::StorageAtomicMinF16x2: + case IR::Opcode::StorageAtomicMaxF16x2: + info.uses_fp16 = true; + break; + case IR::Opcode::CompositeConstructF64x2: + case IR::Opcode::CompositeConstructF64x3: + case IR::Opcode::CompositeConstructF64x4: + case IR::Opcode::CompositeExtractF64x2: + case IR::Opcode::CompositeExtractF64x3: + case IR::Opcode::CompositeExtractF64x4: + case IR::Opcode::CompositeInsertF64x2: + case IR::Opcode::CompositeInsertF64x3: + case IR::Opcode::CompositeInsertF64x4: + case IR::Opcode::SelectF64: + case IR::Opcode::BitCastU64F64: + case IR::Opcode::BitCastF64U64: + case IR::Opcode::PackDouble2x32: + case IR::Opcode::UnpackDouble2x32: + case IR::Opcode::FPAbs64: + case IR::Opcode::FPAdd64: + case IR::Opcode::FPCeil64: + case IR::Opcode::FPFloor64: + case IR::Opcode::FPFma64: + case IR::Opcode::FPMax64: + case IR::Opcode::FPMin64: + case IR::Opcode::FPMul64: + case IR::Opcode::FPNeg64: + case IR::Opcode::FPRecip64: + case IR::Opcode::FPRecipSqrt64: + case IR::Opcode::FPRoundEven64: + case IR::Opcode::FPSaturate64: + case IR::Opcode::FPClamp64: + case IR::Opcode::FPTrunc64: + case IR::Opcode::FPOrdEqual64: + case IR::Opcode::FPUnordEqual64: + case IR::Opcode::FPOrdNotEqual64: + case IR::Opcode::FPUnordNotEqual64: + case IR::Opcode::FPOrdLessThan64: + case IR::Opcode::FPUnordLessThan64: + case IR::Opcode::FPOrdGreaterThan64: + case IR::Opcode::FPUnordGreaterThan64: + case IR::Opcode::FPOrdLessThanEqual64: + case IR::Opcode::FPUnordLessThanEqual64: + case IR::Opcode::FPOrdGreaterThanEqual64: + case IR::Opcode::FPUnordGreaterThanEqual64: + case IR::Opcode::FPIsNan64: + case IR::Opcode::ConvertS16F64: + case IR::Opcode::ConvertS32F64: + case IR::Opcode::ConvertS64F64: + case IR::Opcode::ConvertU16F64: + case IR::Opcode::ConvertU32F64: + case IR::Opcode::ConvertU64F64: + case IR::Opcode::ConvertF32F64: + case IR::Opcode::ConvertF64F32: + case IR::Opcode::ConvertF64S8: + case IR::Opcode::ConvertF64S16: + case IR::Opcode::ConvertF64S32: + case IR::Opcode::ConvertF64S64: + case IR::Opcode::ConvertF64U8: + case IR::Opcode::ConvertF64U16: + case IR::Opcode::ConvertF64U32: + case IR::Opcode::ConvertF64U64: + info.uses_fp64 = true; + break; + default: + break; + } + switch (inst.GetOpcode()) { + case IR::Opcode::GetCbufU8: + case IR::Opcode::GetCbufS8: + case IR::Opcode::UndefU8: + case IR::Opcode::LoadGlobalU8: + case IR::Opcode::LoadGlobalS8: + case IR::Opcode::WriteGlobalU8: + case IR::Opcode::WriteGlobalS8: + case IR::Opcode::LoadStorageU8: + case IR::Opcode::LoadStorageS8: + case IR::Opcode::WriteStorageU8: + case IR::Opcode::WriteStorageS8: + case IR::Opcode::LoadSharedU8: + case IR::Opcode::LoadSharedS8: + case IR::Opcode::WriteSharedU8: + case IR::Opcode::SelectU8: + case IR::Opcode::ConvertF16S8: + case IR::Opcode::ConvertF16U8: + case IR::Opcode::ConvertF32S8: + case IR::Opcode::ConvertF32U8: + case IR::Opcode::ConvertF64S8: + case IR::Opcode::ConvertF64U8: + info.uses_int8 = true; + break; + default: + break; + } + switch (inst.GetOpcode()) { + case IR::Opcode::GetCbufU16: + case IR::Opcode::GetCbufS16: + case IR::Opcode::UndefU16: + case IR::Opcode::LoadGlobalU16: + case IR::Opcode::LoadGlobalS16: + case IR::Opcode::WriteGlobalU16: + case IR::Opcode::WriteGlobalS16: + case IR::Opcode::LoadStorageU16: + case IR::Opcode::LoadStorageS16: + case IR::Opcode::WriteStorageU16: + case IR::Opcode::WriteStorageS16: + case IR::Opcode::LoadSharedU16: + case IR::Opcode::LoadSharedS16: + case IR::Opcode::WriteSharedU16: + case IR::Opcode::SelectU16: + case IR::Opcode::BitCastU16F16: + case IR::Opcode::BitCastF16U16: + case IR::Opcode::ConvertS16F16: + case IR::Opcode::ConvertS16F32: + case IR::Opcode::ConvertS16F64: + case IR::Opcode::ConvertU16F16: + case IR::Opcode::ConvertU16F32: + case IR::Opcode::ConvertU16F64: + case IR::Opcode::ConvertF16S16: + case IR::Opcode::ConvertF16U16: + case IR::Opcode::ConvertF32S16: + case IR::Opcode::ConvertF32U16: + case IR::Opcode::ConvertF64S16: + case IR::Opcode::ConvertF64U16: + info.uses_int16 = true; + break; + default: + break; + } + switch (inst.GetOpcode()) { + case IR::Opcode::UndefU64: + case IR::Opcode::LoadGlobalU8: + case IR::Opcode::LoadGlobalS8: + case IR::Opcode::LoadGlobalU16: + case IR::Opcode::LoadGlobalS16: + case IR::Opcode::LoadGlobal32: + case IR::Opcode::LoadGlobal64: + case IR::Opcode::LoadGlobal128: + case IR::Opcode::WriteGlobalU8: + case IR::Opcode::WriteGlobalS8: + case IR::Opcode::WriteGlobalU16: + case IR::Opcode::WriteGlobalS16: + case IR::Opcode::WriteGlobal32: + case IR::Opcode::WriteGlobal64: + case IR::Opcode::WriteGlobal128: + case IR::Opcode::SelectU64: + case IR::Opcode::BitCastU64F64: + case IR::Opcode::BitCastF64U64: + case IR::Opcode::PackUint2x32: + case IR::Opcode::UnpackUint2x32: + case IR::Opcode::IAdd64: + case IR::Opcode::ISub64: + case IR::Opcode::INeg64: + case IR::Opcode::ShiftLeftLogical64: + case IR::Opcode::ShiftRightLogical64: + case IR::Opcode::ShiftRightArithmetic64: + case IR::Opcode::ConvertS64F16: + case IR::Opcode::ConvertS64F32: + case IR::Opcode::ConvertS64F64: + case IR::Opcode::ConvertU64F16: + case IR::Opcode::ConvertU64F32: + case IR::Opcode::ConvertU64F64: + case IR::Opcode::ConvertU64U32: + case IR::Opcode::ConvertU32U64: + case IR::Opcode::ConvertF16U64: + case IR::Opcode::ConvertF32U64: + case IR::Opcode::ConvertF64U64: + case IR::Opcode::SharedAtomicExchange64: + case IR::Opcode::GlobalAtomicIAdd64: + case IR::Opcode::GlobalAtomicSMin64: + case IR::Opcode::GlobalAtomicUMin64: + case IR::Opcode::GlobalAtomicSMax64: + case IR::Opcode::GlobalAtomicUMax64: + case IR::Opcode::GlobalAtomicAnd64: + case IR::Opcode::GlobalAtomicOr64: + case IR::Opcode::GlobalAtomicXor64: + case IR::Opcode::GlobalAtomicExchange64: + case IR::Opcode::StorageAtomicIAdd64: + case IR::Opcode::StorageAtomicSMin64: + case IR::Opcode::StorageAtomicUMin64: + case IR::Opcode::StorageAtomicSMax64: + case IR::Opcode::StorageAtomicUMax64: + case IR::Opcode::StorageAtomicAnd64: + case IR::Opcode::StorageAtomicOr64: + case IR::Opcode::StorageAtomicXor64: + case IR::Opcode::StorageAtomicExchange64: + info.uses_int64 = true; + break; + default: + break; + } + switch (inst.GetOpcode()) { + case IR::Opcode::WriteGlobalU8: + case IR::Opcode::WriteGlobalS8: + case IR::Opcode::WriteGlobalU16: + case IR::Opcode::WriteGlobalS16: + case IR::Opcode::WriteGlobal32: + case IR::Opcode::WriteGlobal64: + case IR::Opcode::WriteGlobal128: + case IR::Opcode::GlobalAtomicIAdd32: + case IR::Opcode::GlobalAtomicSMin32: + case IR::Opcode::GlobalAtomicUMin32: + case IR::Opcode::GlobalAtomicSMax32: + case IR::Opcode::GlobalAtomicUMax32: + case IR::Opcode::GlobalAtomicInc32: + case IR::Opcode::GlobalAtomicDec32: + case IR::Opcode::GlobalAtomicAnd32: + case IR::Opcode::GlobalAtomicOr32: + case IR::Opcode::GlobalAtomicXor32: + case IR::Opcode::GlobalAtomicExchange32: + case IR::Opcode::GlobalAtomicIAdd64: + case IR::Opcode::GlobalAtomicSMin64: + case IR::Opcode::GlobalAtomicUMin64: + case IR::Opcode::GlobalAtomicSMax64: + case IR::Opcode::GlobalAtomicUMax64: + case IR::Opcode::GlobalAtomicAnd64: + case IR::Opcode::GlobalAtomicOr64: + case IR::Opcode::GlobalAtomicXor64: + case IR::Opcode::GlobalAtomicExchange64: + case IR::Opcode::GlobalAtomicAddF32: + case IR::Opcode::GlobalAtomicAddF16x2: + case IR::Opcode::GlobalAtomicAddF32x2: + case IR::Opcode::GlobalAtomicMinF16x2: + case IR::Opcode::GlobalAtomicMinF32x2: + case IR::Opcode::GlobalAtomicMaxF16x2: + case IR::Opcode::GlobalAtomicMaxF32x2: + info.stores_global_memory = true; + [[fallthrough]]; + case IR::Opcode::LoadGlobalU8: + case IR::Opcode::LoadGlobalS8: + case IR::Opcode::LoadGlobalU16: + case IR::Opcode::LoadGlobalS16: + case IR::Opcode::LoadGlobal32: + case IR::Opcode::LoadGlobal64: + case IR::Opcode::LoadGlobal128: + info.uses_int64 = true; + info.uses_global_memory = true; + info.used_constant_buffer_types |= IR::Type::U32 | IR::Type::U32x2; + info.used_storage_buffer_types |= IR::Type::U32 | IR::Type::U32x2 | IR::Type::U32x4; + break; + default: + break; + } + switch (inst.GetOpcode()) { + case IR::Opcode::DemoteToHelperInvocation: + info.uses_demote_to_helper_invocation = true; + break; + case IR::Opcode::GetAttribute: + info.loads.mask[static_cast(inst.Arg(0).Attribute())] = true; + break; + case IR::Opcode::SetAttribute: + info.stores.mask[static_cast(inst.Arg(0).Attribute())] = true; + break; + case IR::Opcode::GetPatch: + GetPatch(info, inst.Arg(0).Patch()); + break; + case IR::Opcode::SetPatch: + SetPatch(info, inst.Arg(0).Patch()); + break; + case IR::Opcode::GetAttributeIndexed: + info.loads_indexed_attributes = true; + break; + case IR::Opcode::SetAttributeIndexed: + info.stores_indexed_attributes = true; + break; + case IR::Opcode::SetFragColor: + info.stores_frag_color[inst.Arg(0).U32()] = true; + break; + case IR::Opcode::SetSampleMask: + info.stores_sample_mask = true; + break; + case IR::Opcode::SetFragDepth: + info.stores_frag_depth = true; + break; + case IR::Opcode::WorkgroupId: + info.uses_workgroup_id = true; + break; + case IR::Opcode::LocalInvocationId: + info.uses_local_invocation_id = true; + break; + case IR::Opcode::InvocationId: + info.uses_invocation_id = true; + break; + case IR::Opcode::SampleId: + info.uses_sample_id = true; + break; + case IR::Opcode::IsHelperInvocation: + info.uses_is_helper_invocation = true; + break; + case IR::Opcode::LaneId: + info.uses_subgroup_invocation_id = true; + break; + case IR::Opcode::ShuffleIndex: + case IR::Opcode::ShuffleUp: + case IR::Opcode::ShuffleDown: + case IR::Opcode::ShuffleButterfly: + info.uses_subgroup_shuffles = true; + break; + case IR::Opcode::GetCbufU8: + case IR::Opcode::GetCbufS8: + case IR::Opcode::GetCbufU16: + case IR::Opcode::GetCbufS16: + case IR::Opcode::GetCbufU32: + case IR::Opcode::GetCbufF32: + case IR::Opcode::GetCbufU32x2: { + const IR::Value index{inst.Arg(0)}; + const IR::Value offset{inst.Arg(1)}; + if (!index.IsImmediate()) { + throw NotImplementedException("Constant buffer with non-immediate index"); + } + AddConstantBufferDescriptor(info, index.U32(), 1); + u32 element_size{}; + switch (inst.GetOpcode()) { + case IR::Opcode::GetCbufU8: + case IR::Opcode::GetCbufS8: + info.used_constant_buffer_types |= IR::Type::U8; + element_size = 1; + break; + case IR::Opcode::GetCbufU16: + case IR::Opcode::GetCbufS16: + info.used_constant_buffer_types |= IR::Type::U16; + element_size = 2; + break; + case IR::Opcode::GetCbufU32: + info.used_constant_buffer_types |= IR::Type::U32; + element_size = 4; + break; + case IR::Opcode::GetCbufF32: + info.used_constant_buffer_types |= IR::Type::F32; + element_size = 4; + break; + case IR::Opcode::GetCbufU32x2: + info.used_constant_buffer_types |= IR::Type::U32x2; + element_size = 8; + break; + default: + break; + } + u32& size{info.constant_buffer_used_sizes[index.U32()]}; + if (offset.IsImmediate()) { + size = Common::AlignUp(std::max(size, offset.U32() + element_size), 16u); + } else { + size = 0x10'000; + } + break; + } + case IR::Opcode::BindlessImageSampleImplicitLod: + case IR::Opcode::BindlessImageSampleExplicitLod: + case IR::Opcode::BindlessImageSampleDrefImplicitLod: + case IR::Opcode::BindlessImageSampleDrefExplicitLod: + case IR::Opcode::BindlessImageGather: + case IR::Opcode::BindlessImageGatherDref: + case IR::Opcode::BindlessImageFetch: + case IR::Opcode::BindlessImageQueryDimensions: + case IR::Opcode::BindlessImageQueryLod: + case IR::Opcode::BindlessImageGradient: + case IR::Opcode::BoundImageSampleImplicitLod: + case IR::Opcode::BoundImageSampleExplicitLod: + case IR::Opcode::BoundImageSampleDrefImplicitLod: + case IR::Opcode::BoundImageSampleDrefExplicitLod: + case IR::Opcode::BoundImageGather: + case IR::Opcode::BoundImageGatherDref: + case IR::Opcode::BoundImageFetch: + case IR::Opcode::BoundImageQueryDimensions: + case IR::Opcode::BoundImageQueryLod: + case IR::Opcode::BoundImageGradient: + case IR::Opcode::ImageGather: + case IR::Opcode::ImageGatherDref: + case IR::Opcode::ImageFetch: + case IR::Opcode::ImageQueryDimensions: + case IR::Opcode::ImageGradient: { + const TextureType type{inst.Flags().type}; + info.uses_sampled_1d |= type == TextureType::Color1D || type == TextureType::ColorArray1D; + info.uses_sparse_residency |= + inst.GetAssociatedPseudoOperation(IR::Opcode::GetSparseFromOp) != nullptr; + break; + } + case IR::Opcode::ImageSampleImplicitLod: + case IR::Opcode::ImageSampleExplicitLod: + case IR::Opcode::ImageSampleDrefImplicitLod: + case IR::Opcode::ImageSampleDrefExplicitLod: + case IR::Opcode::ImageQueryLod: { + const auto flags{inst.Flags()}; + const TextureType type{flags.type}; + info.uses_sampled_1d |= type == TextureType::Color1D || type == TextureType::ColorArray1D; + info.uses_shadow_lod |= flags.is_depth != 0; + info.uses_sparse_residency |= + inst.GetAssociatedPseudoOperation(IR::Opcode::GetSparseFromOp) != nullptr; + break; + } + case IR::Opcode::ImageRead: { + const auto flags{inst.Flags()}; + info.uses_typeless_image_reads |= flags.image_format == ImageFormat::Typeless; + info.uses_sparse_residency |= + inst.GetAssociatedPseudoOperation(IR::Opcode::GetSparseFromOp) != nullptr; + break; + } + case IR::Opcode::ImageWrite: { + const auto flags{inst.Flags()}; + info.uses_typeless_image_writes |= flags.image_format == ImageFormat::Typeless; + info.uses_image_buffers |= flags.type == TextureType::Buffer; + break; + } + case IR::Opcode::SubgroupEqMask: + case IR::Opcode::SubgroupLtMask: + case IR::Opcode::SubgroupLeMask: + case IR::Opcode::SubgroupGtMask: + case IR::Opcode::SubgroupGeMask: + info.uses_subgroup_mask = true; + break; + case IR::Opcode::VoteAll: + case IR::Opcode::VoteAny: + case IR::Opcode::VoteEqual: + case IR::Opcode::SubgroupBallot: + info.uses_subgroup_vote = true; + break; + case IR::Opcode::FSwizzleAdd: + info.uses_fswzadd = true; + break; + case IR::Opcode::DPdxFine: + case IR::Opcode::DPdyFine: + case IR::Opcode::DPdxCoarse: + case IR::Opcode::DPdyCoarse: + info.uses_derivatives = true; + break; + case IR::Opcode::LoadStorageU8: + case IR::Opcode::LoadStorageS8: + case IR::Opcode::WriteStorageU8: + case IR::Opcode::WriteStorageS8: + info.used_storage_buffer_types |= IR::Type::U8; + break; + case IR::Opcode::LoadStorageU16: + case IR::Opcode::LoadStorageS16: + case IR::Opcode::WriteStorageU16: + case IR::Opcode::WriteStorageS16: + info.used_storage_buffer_types |= IR::Type::U16; + break; + case IR::Opcode::LoadStorage32: + case IR::Opcode::WriteStorage32: + case IR::Opcode::StorageAtomicIAdd32: + case IR::Opcode::StorageAtomicUMin32: + case IR::Opcode::StorageAtomicUMax32: + case IR::Opcode::StorageAtomicAnd32: + case IR::Opcode::StorageAtomicOr32: + case IR::Opcode::StorageAtomicXor32: + case IR::Opcode::StorageAtomicExchange32: + info.used_storage_buffer_types |= IR::Type::U32; + break; + case IR::Opcode::LoadStorage64: + case IR::Opcode::WriteStorage64: + info.used_storage_buffer_types |= IR::Type::U32x2; + break; + case IR::Opcode::LoadStorage128: + case IR::Opcode::WriteStorage128: + info.used_storage_buffer_types |= IR::Type::U32x4; + break; + case IR::Opcode::SharedAtomicSMin32: + info.uses_atomic_s32_min = true; + break; + case IR::Opcode::SharedAtomicSMax32: + info.uses_atomic_s32_max = true; + break; + case IR::Opcode::SharedAtomicInc32: + info.uses_shared_increment = true; + break; + case IR::Opcode::SharedAtomicDec32: + info.uses_shared_decrement = true; + break; + case IR::Opcode::SharedAtomicExchange64: + info.uses_int64_bit_atomics = true; + break; + case IR::Opcode::GlobalAtomicInc32: + case IR::Opcode::StorageAtomicInc32: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_global_increment = true; + break; + case IR::Opcode::GlobalAtomicDec32: + case IR::Opcode::StorageAtomicDec32: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_global_decrement = true; + break; + case IR::Opcode::GlobalAtomicAddF32: + case IR::Opcode::StorageAtomicAddF32: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_atomic_f32_add = true; + break; + case IR::Opcode::GlobalAtomicAddF16x2: + case IR::Opcode::StorageAtomicAddF16x2: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_atomic_f16x2_add = true; + break; + case IR::Opcode::GlobalAtomicAddF32x2: + case IR::Opcode::StorageAtomicAddF32x2: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_atomic_f32x2_add = true; + break; + case IR::Opcode::GlobalAtomicMinF16x2: + case IR::Opcode::StorageAtomicMinF16x2: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_atomic_f16x2_min = true; + break; + case IR::Opcode::GlobalAtomicMinF32x2: + case IR::Opcode::StorageAtomicMinF32x2: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_atomic_f32x2_min = true; + break; + case IR::Opcode::GlobalAtomicMaxF16x2: + case IR::Opcode::StorageAtomicMaxF16x2: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_atomic_f16x2_max = true; + break; + case IR::Opcode::GlobalAtomicMaxF32x2: + case IR::Opcode::StorageAtomicMaxF32x2: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_atomic_f32x2_max = true; + break; + case IR::Opcode::StorageAtomicSMin32: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_atomic_s32_min = true; + break; + case IR::Opcode::StorageAtomicSMax32: + info.used_storage_buffer_types |= IR::Type::U32; + info.uses_atomic_s32_max = true; + break; + case IR::Opcode::GlobalAtomicIAdd64: + case IR::Opcode::GlobalAtomicSMin64: + case IR::Opcode::GlobalAtomicUMin64: + case IR::Opcode::GlobalAtomicSMax64: + case IR::Opcode::GlobalAtomicUMax64: + case IR::Opcode::GlobalAtomicAnd64: + case IR::Opcode::GlobalAtomicOr64: + case IR::Opcode::GlobalAtomicXor64: + case IR::Opcode::GlobalAtomicExchange64: + case IR::Opcode::StorageAtomicIAdd64: + case IR::Opcode::StorageAtomicSMin64: + case IR::Opcode::StorageAtomicUMin64: + case IR::Opcode::StorageAtomicSMax64: + case IR::Opcode::StorageAtomicUMax64: + case IR::Opcode::StorageAtomicAnd64: + case IR::Opcode::StorageAtomicOr64: + case IR::Opcode::StorageAtomicXor64: + info.used_storage_buffer_types |= IR::Type::U64; + info.uses_int64_bit_atomics = true; + break; + case IR::Opcode::BindlessImageAtomicIAdd32: + case IR::Opcode::BindlessImageAtomicSMin32: + case IR::Opcode::BindlessImageAtomicUMin32: + case IR::Opcode::BindlessImageAtomicSMax32: + case IR::Opcode::BindlessImageAtomicUMax32: + case IR::Opcode::BindlessImageAtomicInc32: + case IR::Opcode::BindlessImageAtomicDec32: + case IR::Opcode::BindlessImageAtomicAnd32: + case IR::Opcode::BindlessImageAtomicOr32: + case IR::Opcode::BindlessImageAtomicXor32: + case IR::Opcode::BindlessImageAtomicExchange32: + case IR::Opcode::BoundImageAtomicIAdd32: + case IR::Opcode::BoundImageAtomicSMin32: + case IR::Opcode::BoundImageAtomicUMin32: + case IR::Opcode::BoundImageAtomicSMax32: + case IR::Opcode::BoundImageAtomicUMax32: + case IR::Opcode::BoundImageAtomicInc32: + case IR::Opcode::BoundImageAtomicDec32: + case IR::Opcode::BoundImageAtomicAnd32: + case IR::Opcode::BoundImageAtomicOr32: + case IR::Opcode::BoundImageAtomicXor32: + case IR::Opcode::BoundImageAtomicExchange32: + case IR::Opcode::ImageAtomicIAdd32: + case IR::Opcode::ImageAtomicSMin32: + case IR::Opcode::ImageAtomicUMin32: + case IR::Opcode::ImageAtomicSMax32: + case IR::Opcode::ImageAtomicUMax32: + case IR::Opcode::ImageAtomicInc32: + case IR::Opcode::ImageAtomicDec32: + case IR::Opcode::ImageAtomicAnd32: + case IR::Opcode::ImageAtomicOr32: + case IR::Opcode::ImageAtomicXor32: + case IR::Opcode::ImageAtomicExchange32: + info.uses_atomic_image_u32 = true; + break; + default: + break; + } +} + +void VisitFpModifiers(Info& info, IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::FPAdd16: + case IR::Opcode::FPFma16: + case IR::Opcode::FPMul16: + case IR::Opcode::FPRoundEven16: + case IR::Opcode::FPFloor16: + case IR::Opcode::FPCeil16: + case IR::Opcode::FPTrunc16: { + const auto control{inst.Flags()}; + switch (control.fmz_mode) { + case IR::FmzMode::DontCare: + break; + case IR::FmzMode::FTZ: + case IR::FmzMode::FMZ: + info.uses_fp16_denorms_flush = true; + break; + case IR::FmzMode::None: + info.uses_fp16_denorms_preserve = true; + break; + } + break; + } + case IR::Opcode::FPAdd32: + case IR::Opcode::FPFma32: + case IR::Opcode::FPMul32: + case IR::Opcode::FPRoundEven32: + case IR::Opcode::FPFloor32: + case IR::Opcode::FPCeil32: + case IR::Opcode::FPTrunc32: + case IR::Opcode::FPOrdEqual32: + case IR::Opcode::FPUnordEqual32: + case IR::Opcode::FPOrdNotEqual32: + case IR::Opcode::FPUnordNotEqual32: + case IR::Opcode::FPOrdLessThan32: + case IR::Opcode::FPUnordLessThan32: + case IR::Opcode::FPOrdGreaterThan32: + case IR::Opcode::FPUnordGreaterThan32: + case IR::Opcode::FPOrdLessThanEqual32: + case IR::Opcode::FPUnordLessThanEqual32: + case IR::Opcode::FPOrdGreaterThanEqual32: + case IR::Opcode::FPUnordGreaterThanEqual32: + case IR::Opcode::ConvertF16F32: + case IR::Opcode::ConvertF64F32: { + const auto control{inst.Flags()}; + switch (control.fmz_mode) { + case IR::FmzMode::DontCare: + break; + case IR::FmzMode::FTZ: + case IR::FmzMode::FMZ: + info.uses_fp32_denorms_flush = true; + break; + case IR::FmzMode::None: + info.uses_fp32_denorms_preserve = true; + break; + } + break; + } + default: + break; + } +} + +void VisitCbufs(Info& info, IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::GetCbufU8: + case IR::Opcode::GetCbufS8: + case IR::Opcode::GetCbufU16: + case IR::Opcode::GetCbufS16: + case IR::Opcode::GetCbufU32: + case IR::Opcode::GetCbufF32: + case IR::Opcode::GetCbufU32x2: { + CheckCBufNVN(info, inst); + break; + } + default: + break; + } +} + +void Visit(Info& info, IR::Inst& inst) { + VisitUsages(info, inst); + VisitFpModifiers(info, inst); + VisitCbufs(info, inst); +} + +void GatherInfoFromHeader(Environment& env, Info& info) { + Stage stage{env.ShaderStage()}; + if (stage == Stage::Compute) { + return; + } + const auto& header{env.SPH()}; + if (stage == Stage::Fragment) { + if (!info.loads_indexed_attributes) { + return; + } + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + const size_t offset{static_cast(IR::Attribute::Generic0X) + index * 4}; + const auto vector{header.ps.imap_generic_vector[index]}; + info.loads.mask[offset + 0] = vector.x != PixelImap::Unused; + info.loads.mask[offset + 1] = vector.y != PixelImap::Unused; + info.loads.mask[offset + 2] = vector.z != PixelImap::Unused; + info.loads.mask[offset + 3] = vector.w != PixelImap::Unused; + } + return; + } + if (info.loads_indexed_attributes) { + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + const IR::Attribute attribute{IR::Attribute::Generic0X + index * 4}; + const auto mask = header.vtg.InputGeneric(index); + for (size_t i = 0; i < 4; ++i) { + info.loads.Set(attribute + i, mask[i]); + } + } + for (size_t index = 0; index < 8; ++index) { + const u16 mask{header.vtg.clip_distances}; + info.loads.Set(IR::Attribute::ClipDistance0 + index, ((mask >> index) & 1) != 0); + } + info.loads.Set(IR::Attribute::PrimitiveId, header.vtg.imap_systemb.primitive_array_id != 0); + info.loads.Set(IR::Attribute::Layer, header.vtg.imap_systemb.rt_array_index != 0); + info.loads.Set(IR::Attribute::ViewportIndex, header.vtg.imap_systemb.viewport_index != 0); + info.loads.Set(IR::Attribute::PointSize, header.vtg.imap_systemb.point_size != 0); + info.loads.Set(IR::Attribute::PositionX, header.vtg.imap_systemb.position_x != 0); + info.loads.Set(IR::Attribute::PositionY, header.vtg.imap_systemb.position_y != 0); + info.loads.Set(IR::Attribute::PositionZ, header.vtg.imap_systemb.position_z != 0); + info.loads.Set(IR::Attribute::PositionW, header.vtg.imap_systemb.position_w != 0); + info.loads.Set(IR::Attribute::PointSpriteS, header.vtg.point_sprite_s != 0); + info.loads.Set(IR::Attribute::PointSpriteT, header.vtg.point_sprite_t != 0); + info.loads.Set(IR::Attribute::FogCoordinate, header.vtg.fog_coordinate != 0); + info.loads.Set(IR::Attribute::TessellationEvaluationPointU, + header.vtg.tessellation_eval_point_u != 0); + info.loads.Set(IR::Attribute::TessellationEvaluationPointV, + header.vtg.tessellation_eval_point_v != 0); + info.loads.Set(IR::Attribute::InstanceId, header.vtg.instance_id != 0); + info.loads.Set(IR::Attribute::VertexId, header.vtg.vertex_id != 0); + // TODO: Legacy varyings + } + if (info.stores_indexed_attributes) { + for (size_t index = 0; index < IR::NUM_GENERICS; ++index) { + const IR::Attribute attribute{IR::Attribute::Generic0X + index * 4}; + const auto mask{header.vtg.OutputGeneric(index)}; + for (size_t i = 0; i < 4; ++i) { + info.stores.Set(attribute + i, mask[i]); + } + } + for (size_t index = 0; index < 8; ++index) { + const u16 mask{header.vtg.omap_systemc.clip_distances}; + info.stores.Set(IR::Attribute::ClipDistance0 + index, ((mask >> index) & 1) != 0); + } + info.stores.Set(IR::Attribute::PrimitiveId, + header.vtg.omap_systemb.primitive_array_id != 0); + info.stores.Set(IR::Attribute::Layer, header.vtg.omap_systemb.rt_array_index != 0); + info.stores.Set(IR::Attribute::ViewportIndex, header.vtg.omap_systemb.viewport_index != 0); + info.stores.Set(IR::Attribute::PointSize, header.vtg.omap_systemb.point_size != 0); + info.stores.Set(IR::Attribute::PositionX, header.vtg.omap_systemb.position_x != 0); + info.stores.Set(IR::Attribute::PositionY, header.vtg.omap_systemb.position_y != 0); + info.stores.Set(IR::Attribute::PositionZ, header.vtg.omap_systemb.position_z != 0); + info.stores.Set(IR::Attribute::PositionW, header.vtg.omap_systemb.position_w != 0); + info.stores.Set(IR::Attribute::PointSpriteS, header.vtg.omap_systemc.point_sprite_s != 0); + info.stores.Set(IR::Attribute::PointSpriteT, header.vtg.omap_systemc.point_sprite_t != 0); + info.stores.Set(IR::Attribute::FogCoordinate, header.vtg.omap_systemc.fog_coordinate != 0); + info.stores.Set(IR::Attribute::TessellationEvaluationPointU, + header.vtg.omap_systemc.tessellation_eval_point_u != 0); + info.stores.Set(IR::Attribute::TessellationEvaluationPointV, + header.vtg.omap_systemc.tessellation_eval_point_v != 0); + info.stores.Set(IR::Attribute::InstanceId, header.vtg.omap_systemc.instance_id != 0); + info.stores.Set(IR::Attribute::VertexId, header.vtg.omap_systemc.vertex_id != 0); + // TODO: Legacy varyings + } +} +} // Anonymous namespace + +void CollectShaderInfoPass(Environment& env, IR::Program& program) { + Info& info{program.info}; + const u32 base{[&] { + switch (program.stage) { + case Stage::VertexA: + case Stage::VertexB: + return 0x110u; + case Stage::TessellationControl: + return 0x210u; + case Stage::TessellationEval: + return 0x310u; + case Stage::Geometry: + return 0x410u; + case Stage::Fragment: + return 0x510u; + case Stage::Compute: + return 0x310u; + } + throw InvalidArgument("Invalid stage {}", program.stage); + }()}; + info.nvn_buffer_base = base; + + for (IR::Block* const block : program.post_order_blocks) { + for (IR::Inst& inst : block->Instructions()) { + Visit(info, inst); + } + } + GatherInfoFromHeader(env, info); +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp b/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp new file mode 100755 index 000000000..3c72203ad --- /dev/null +++ b/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp @@ -0,0 +1,609 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include + +#include "common/bit_cast.h" +#include "common/bit_util.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/ir_opt/passes.h" + +namespace Shader::Optimization { +namespace { +// Metaprogramming stuff to get arguments information out of a lambda +template +struct LambdaTraits : LambdaTraits::operator())> {}; + +template +struct LambdaTraits { + template + using ArgType = std::tuple_element_t>; + + static constexpr size_t NUM_ARGS{sizeof...(Args)}; +}; + +template +[[nodiscard]] T Arg(const IR::Value& value) { + if constexpr (std::is_same_v) { + return value.U1(); + } else if constexpr (std::is_same_v) { + return value.U32(); + } else if constexpr (std::is_same_v) { + return static_cast(value.U32()); + } else if constexpr (std::is_same_v) { + return value.F32(); + } else if constexpr (std::is_same_v) { + return value.U64(); + } +} + +template +bool FoldCommutative(IR::Inst& inst, ImmFn&& imm_fn) { + const IR::Value lhs{inst.Arg(0)}; + const IR::Value rhs{inst.Arg(1)}; + + const bool is_lhs_immediate{lhs.IsImmediate()}; + const bool is_rhs_immediate{rhs.IsImmediate()}; + + if (is_lhs_immediate && is_rhs_immediate) { + const auto result{imm_fn(Arg(lhs), Arg(rhs))}; + inst.ReplaceUsesWith(IR::Value{result}); + return false; + } + if (is_lhs_immediate && !is_rhs_immediate) { + IR::Inst* const rhs_inst{rhs.InstRecursive()}; + if (rhs_inst->GetOpcode() == inst.GetOpcode() && rhs_inst->Arg(1).IsImmediate()) { + const auto combined{imm_fn(Arg(lhs), Arg(rhs_inst->Arg(1)))}; + inst.SetArg(0, rhs_inst->Arg(0)); + inst.SetArg(1, IR::Value{combined}); + } else { + // Normalize + inst.SetArg(0, rhs); + inst.SetArg(1, lhs); + } + } + if (!is_lhs_immediate && is_rhs_immediate) { + const IR::Inst* const lhs_inst{lhs.InstRecursive()}; + if (lhs_inst->GetOpcode() == inst.GetOpcode() && lhs_inst->Arg(1).IsImmediate()) { + const auto combined{imm_fn(Arg(rhs), Arg(lhs_inst->Arg(1)))}; + inst.SetArg(0, lhs_inst->Arg(0)); + inst.SetArg(1, IR::Value{combined}); + } + } + return true; +} + +template +bool FoldWhenAllImmediates(IR::Inst& inst, Func&& func) { + if (!inst.AreAllArgsImmediates() || inst.HasAssociatedPseudoOperation()) { + return false; + } + using Indices = std::make_index_sequence::NUM_ARGS>; + inst.ReplaceUsesWith(EvalImmediates(inst, func, Indices{})); + return true; +} + +void FoldGetRegister(IR::Inst& inst) { + if (inst.Arg(0).Reg() == IR::Reg::RZ) { + inst.ReplaceUsesWith(IR::Value{u32{0}}); + } +} + +void FoldGetPred(IR::Inst& inst) { + if (inst.Arg(0).Pred() == IR::Pred::PT) { + inst.ReplaceUsesWith(IR::Value{true}); + } +} + +/// Replaces the pattern generated by two XMAD multiplications +bool FoldXmadMultiply(IR::Block& block, IR::Inst& inst) { + /* + * We are looking for this pattern: + * %rhs_bfe = BitFieldUExtract %factor_a, #0, #16 + * %rhs_mul = IMul32 %rhs_bfe, %factor_b + * %lhs_bfe = BitFieldUExtract %factor_a, #16, #16 + * %rhs_mul = IMul32 %lhs_bfe, %factor_b + * %lhs_shl = ShiftLeftLogical32 %rhs_mul, #16 + * %result = IAdd32 %lhs_shl, %rhs_mul + * + * And replacing it with + * %result = IMul32 %factor_a, %factor_b + * + * This optimization has been proven safe by LLVM and MSVC. + */ + const IR::Value lhs_arg{inst.Arg(0)}; + const IR::Value rhs_arg{inst.Arg(1)}; + if (lhs_arg.IsImmediate() || rhs_arg.IsImmediate()) { + return false; + } + IR::Inst* const lhs_shl{lhs_arg.InstRecursive()}; + if (lhs_shl->GetOpcode() != IR::Opcode::ShiftLeftLogical32 || + lhs_shl->Arg(1) != IR::Value{16U}) { + return false; + } + if (lhs_shl->Arg(0).IsImmediate()) { + return false; + } + IR::Inst* const lhs_mul{lhs_shl->Arg(0).InstRecursive()}; + IR::Inst* const rhs_mul{rhs_arg.InstRecursive()}; + if (lhs_mul->GetOpcode() != IR::Opcode::IMul32 || rhs_mul->GetOpcode() != IR::Opcode::IMul32) { + return false; + } + if (lhs_mul->Arg(1).Resolve() != rhs_mul->Arg(1).Resolve()) { + return false; + } + const IR::U32 factor_b{lhs_mul->Arg(1)}; + if (lhs_mul->Arg(0).IsImmediate() || rhs_mul->Arg(0).IsImmediate()) { + return false; + } + IR::Inst* const lhs_bfe{lhs_mul->Arg(0).InstRecursive()}; + IR::Inst* const rhs_bfe{rhs_mul->Arg(0).InstRecursive()}; + if (lhs_bfe->GetOpcode() != IR::Opcode::BitFieldUExtract) { + return false; + } + if (rhs_bfe->GetOpcode() != IR::Opcode::BitFieldUExtract) { + return false; + } + if (lhs_bfe->Arg(1) != IR::Value{16U} || lhs_bfe->Arg(2) != IR::Value{16U}) { + return false; + } + if (rhs_bfe->Arg(1) != IR::Value{0U} || rhs_bfe->Arg(2) != IR::Value{16U}) { + return false; + } + if (lhs_bfe->Arg(0).Resolve() != rhs_bfe->Arg(0).Resolve()) { + return false; + } + const IR::U32 factor_a{lhs_bfe->Arg(0)}; + IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)}; + inst.ReplaceUsesWith(ir.IMul(factor_a, factor_b)); + return true; +} + +template +void FoldAdd(IR::Block& block, IR::Inst& inst) { + if (inst.HasAssociatedPseudoOperation()) { + return; + } + if (!FoldCommutative(inst, [](T a, T b) { return a + b; })) { + return; + } + const IR::Value rhs{inst.Arg(1)}; + if (rhs.IsImmediate() && Arg(rhs) == 0) { + inst.ReplaceUsesWith(inst.Arg(0)); + return; + } + if constexpr (std::is_same_v) { + if (FoldXmadMultiply(block, inst)) { + return; + } + } +} + +void FoldISub32(IR::Inst& inst) { + if (FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a - b; })) { + return; + } + if (inst.Arg(0).IsImmediate() || inst.Arg(1).IsImmediate()) { + return; + } + // ISub32 is generally used to subtract two constant buffers, compare and replace this with + // zero if they equal. + const auto equal_cbuf{[](IR::Inst* a, IR::Inst* b) { + return a->GetOpcode() == IR::Opcode::GetCbufU32 && + b->GetOpcode() == IR::Opcode::GetCbufU32 && a->Arg(0) == b->Arg(0) && + a->Arg(1) == b->Arg(1); + }}; + IR::Inst* op_a{inst.Arg(0).InstRecursive()}; + IR::Inst* op_b{inst.Arg(1).InstRecursive()}; + if (equal_cbuf(op_a, op_b)) { + inst.ReplaceUsesWith(IR::Value{u32{0}}); + return; + } + // It's also possible a value is being added to a cbuf and then subtracted + if (op_b->GetOpcode() == IR::Opcode::IAdd32) { + // Canonicalize local variables to simplify the following logic + std::swap(op_a, op_b); + } + if (op_b->GetOpcode() != IR::Opcode::GetCbufU32) { + return; + } + IR::Inst* const inst_cbuf{op_b}; + if (op_a->GetOpcode() != IR::Opcode::IAdd32) { + return; + } + IR::Value add_op_a{op_a->Arg(0)}; + IR::Value add_op_b{op_a->Arg(1)}; + if (add_op_b.IsImmediate()) { + // Canonicalize + std::swap(add_op_a, add_op_b); + } + if (add_op_b.IsImmediate()) { + return; + } + IR::Inst* const add_cbuf{add_op_b.InstRecursive()}; + if (equal_cbuf(add_cbuf, inst_cbuf)) { + inst.ReplaceUsesWith(add_op_a); + } +} + +void FoldSelect(IR::Inst& inst) { + const IR::Value cond{inst.Arg(0)}; + if (cond.IsImmediate()) { + inst.ReplaceUsesWith(cond.U1() ? inst.Arg(1) : inst.Arg(2)); + } +} + +void FoldFPMul32(IR::Inst& inst) { + const auto control{inst.Flags()}; + if (control.no_contraction) { + return; + } + // Fold interpolation operations + const IR::Value lhs_value{inst.Arg(0)}; + const IR::Value rhs_value{inst.Arg(1)}; + if (lhs_value.IsImmediate() || rhs_value.IsImmediate()) { + return; + } + IR::Inst* const lhs_op{lhs_value.InstRecursive()}; + IR::Inst* const rhs_op{rhs_value.InstRecursive()}; + if (lhs_op->GetOpcode() != IR::Opcode::FPMul32 || + rhs_op->GetOpcode() != IR::Opcode::FPRecip32) { + return; + } + const IR::Value recip_source{rhs_op->Arg(0)}; + const IR::Value lhs_mul_source{lhs_op->Arg(1).Resolve()}; + if (recip_source.IsImmediate() || lhs_mul_source.IsImmediate()) { + return; + } + IR::Inst* const attr_a{recip_source.InstRecursive()}; + IR::Inst* const attr_b{lhs_mul_source.InstRecursive()}; + if (attr_a->GetOpcode() != IR::Opcode::GetAttribute || + attr_b->GetOpcode() != IR::Opcode::GetAttribute) { + return; + } + if (attr_a->Arg(0).Attribute() == attr_b->Arg(0).Attribute()) { + inst.ReplaceUsesWith(lhs_op->Arg(0)); + } +} + +void FoldLogicalAnd(IR::Inst& inst) { + if (!FoldCommutative(inst, [](bool a, bool b) { return a && b; })) { + return; + } + const IR::Value rhs{inst.Arg(1)}; + if (rhs.IsImmediate()) { + if (rhs.U1()) { + inst.ReplaceUsesWith(inst.Arg(0)); + } else { + inst.ReplaceUsesWith(IR::Value{false}); + } + } +} + +void FoldLogicalOr(IR::Inst& inst) { + if (!FoldCommutative(inst, [](bool a, bool b) { return a || b; })) { + return; + } + const IR::Value rhs{inst.Arg(1)}; + if (rhs.IsImmediate()) { + if (rhs.U1()) { + inst.ReplaceUsesWith(IR::Value{true}); + } else { + inst.ReplaceUsesWith(inst.Arg(0)); + } + } +} + +void FoldLogicalNot(IR::Inst& inst) { + const IR::U1 value{inst.Arg(0)}; + if (value.IsImmediate()) { + inst.ReplaceUsesWith(IR::Value{!value.U1()}); + return; + } + IR::Inst* const arg{value.InstRecursive()}; + if (arg->GetOpcode() == IR::Opcode::LogicalNot) { + inst.ReplaceUsesWith(arg->Arg(0)); + } +} + +template +void FoldBitCast(IR::Inst& inst, IR::Opcode reverse) { + const IR::Value value{inst.Arg(0)}; + if (value.IsImmediate()) { + inst.ReplaceUsesWith(IR::Value{Common::BitCast(Arg(value))}); + return; + } + IR::Inst* const arg_inst{value.InstRecursive()}; + if (arg_inst->GetOpcode() == reverse) { + inst.ReplaceUsesWith(arg_inst->Arg(0)); + return; + } + if constexpr (op == IR::Opcode::BitCastF32U32) { + if (arg_inst->GetOpcode() == IR::Opcode::GetCbufU32) { + // Replace the bitcast with a typed constant buffer read + inst.ReplaceOpcode(IR::Opcode::GetCbufF32); + inst.SetArg(0, arg_inst->Arg(0)); + inst.SetArg(1, arg_inst->Arg(1)); + return; + } + } +} + +void FoldInverseFunc(IR::Inst& inst, IR::Opcode reverse) { + const IR::Value value{inst.Arg(0)}; + if (value.IsImmediate()) { + return; + } + IR::Inst* const arg_inst{value.InstRecursive()}; + if (arg_inst->GetOpcode() == reverse) { + inst.ReplaceUsesWith(arg_inst->Arg(0)); + return; + } +} + +template +IR::Value EvalImmediates(const IR::Inst& inst, Func&& func, std::index_sequence) { + using Traits = LambdaTraits; + return IR::Value{func(Arg>(inst.Arg(I))...)}; +} + +std::optional FoldCompositeExtractImpl(IR::Value inst_value, IR::Opcode insert, + IR::Opcode construct, u32 first_index) { + IR::Inst* const inst{inst_value.InstRecursive()}; + if (inst->GetOpcode() == construct) { + return inst->Arg(first_index); + } + if (inst->GetOpcode() != insert) { + return std::nullopt; + } + IR::Value value_index{inst->Arg(2)}; + if (!value_index.IsImmediate()) { + return std::nullopt; + } + const u32 second_index{value_index.U32()}; + if (first_index != second_index) { + IR::Value value_composite{inst->Arg(0)}; + if (value_composite.IsImmediate()) { + return std::nullopt; + } + return FoldCompositeExtractImpl(value_composite, insert, construct, first_index); + } + return inst->Arg(1); +} + +void FoldCompositeExtract(IR::Inst& inst, IR::Opcode construct, IR::Opcode insert) { + const IR::Value value_1{inst.Arg(0)}; + const IR::Value value_2{inst.Arg(1)}; + if (value_1.IsImmediate()) { + return; + } + if (!value_2.IsImmediate()) { + return; + } + const u32 first_index{value_2.U32()}; + const std::optional result{FoldCompositeExtractImpl(value_1, insert, construct, first_index)}; + if (!result) { + return; + } + inst.ReplaceUsesWith(*result); +} + +IR::Value GetThroughCast(IR::Value value, IR::Opcode expected_cast) { + if (value.IsImmediate()) { + return value; + } + IR::Inst* const inst{value.InstRecursive()}; + if (inst->GetOpcode() == expected_cast) { + return inst->Arg(0).Resolve(); + } + return value; +} + +void FoldFSwizzleAdd(IR::Block& block, IR::Inst& inst) { + const IR::Value swizzle{inst.Arg(2)}; + if (!swizzle.IsImmediate()) { + return; + } + const IR::Value value_1{GetThroughCast(inst.Arg(0).Resolve(), IR::Opcode::BitCastF32U32)}; + const IR::Value value_2{GetThroughCast(inst.Arg(1).Resolve(), IR::Opcode::BitCastF32U32)}; + if (value_1.IsImmediate()) { + return; + } + const u32 swizzle_value{swizzle.U32()}; + if (swizzle_value != 0x99 && swizzle_value != 0xA5) { + return; + } + IR::Inst* const inst2{value_1.InstRecursive()}; + if (inst2->GetOpcode() != IR::Opcode::ShuffleButterfly) { + return; + } + const IR::Value value_3{GetThroughCast(inst2->Arg(0).Resolve(), IR::Opcode::BitCastU32F32)}; + if (value_2 != value_3) { + return; + } + const IR::Value index{inst2->Arg(1)}; + const IR::Value clamp{inst2->Arg(2)}; + const IR::Value segmentation_mask{inst2->Arg(3)}; + if (!index.IsImmediate() || !clamp.IsImmediate() || !segmentation_mask.IsImmediate()) { + return; + } + if (clamp.U32() != 3 || segmentation_mask.U32() != 28) { + return; + } + if (swizzle_value == 0x99) { + // DPdxFine + if (index.U32() == 1) { + IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)}; + inst.ReplaceUsesWith(ir.DPdxFine(IR::F32{inst.Arg(1)})); + } + } else if (swizzle_value == 0xA5) { + // DPdyFine + if (index.U32() == 2) { + IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)}; + inst.ReplaceUsesWith(ir.DPdyFine(IR::F32{inst.Arg(1)})); + } + } +} + +void ConstantPropagation(IR::Block& block, IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::GetRegister: + return FoldGetRegister(inst); + case IR::Opcode::GetPred: + return FoldGetPred(inst); + case IR::Opcode::IAdd32: + return FoldAdd(block, inst); + case IR::Opcode::ISub32: + return FoldISub32(inst); + case IR::Opcode::IMul32: + FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a * b; }); + return; + case IR::Opcode::ShiftRightArithmetic32: + FoldWhenAllImmediates(inst, [](s32 a, s32 b) { return static_cast(a >> b); }); + return; + case IR::Opcode::BitCastF32U32: + return FoldBitCast(inst, IR::Opcode::BitCastU32F32); + case IR::Opcode::BitCastU32F32: + return FoldBitCast(inst, IR::Opcode::BitCastF32U32); + case IR::Opcode::IAdd64: + return FoldAdd(block, inst); + case IR::Opcode::PackHalf2x16: + return FoldInverseFunc(inst, IR::Opcode::UnpackHalf2x16); + case IR::Opcode::UnpackHalf2x16: + return FoldInverseFunc(inst, IR::Opcode::PackHalf2x16); + case IR::Opcode::SelectU1: + case IR::Opcode::SelectU8: + case IR::Opcode::SelectU16: + case IR::Opcode::SelectU32: + case IR::Opcode::SelectU64: + case IR::Opcode::SelectF16: + case IR::Opcode::SelectF32: + case IR::Opcode::SelectF64: + return FoldSelect(inst); + case IR::Opcode::FPMul32: + return FoldFPMul32(inst); + case IR::Opcode::LogicalAnd: + return FoldLogicalAnd(inst); + case IR::Opcode::LogicalOr: + return FoldLogicalOr(inst); + case IR::Opcode::LogicalNot: + return FoldLogicalNot(inst); + case IR::Opcode::SLessThan: + FoldWhenAllImmediates(inst, [](s32 a, s32 b) { return a < b; }); + return; + case IR::Opcode::ULessThan: + FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a < b; }); + return; + case IR::Opcode::SLessThanEqual: + FoldWhenAllImmediates(inst, [](s32 a, s32 b) { return a <= b; }); + return; + case IR::Opcode::ULessThanEqual: + FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a <= b; }); + return; + case IR::Opcode::SGreaterThan: + FoldWhenAllImmediates(inst, [](s32 a, s32 b) { return a > b; }); + return; + case IR::Opcode::UGreaterThan: + FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a > b; }); + return; + case IR::Opcode::SGreaterThanEqual: + FoldWhenAllImmediates(inst, [](s32 a, s32 b) { return a >= b; }); + return; + case IR::Opcode::UGreaterThanEqual: + FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a >= b; }); + return; + case IR::Opcode::IEqual: + FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a == b; }); + return; + case IR::Opcode::INotEqual: + FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a != b; }); + return; + case IR::Opcode::BitwiseAnd32: + FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a & b; }); + return; + case IR::Opcode::BitwiseOr32: + FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a | b; }); + return; + case IR::Opcode::BitwiseXor32: + FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a ^ b; }); + return; + case IR::Opcode::BitFieldUExtract: + FoldWhenAllImmediates(inst, [](u32 base, u32 shift, u32 count) { + if (static_cast(shift) + static_cast(count) > 32) { + throw LogicError("Undefined result in {}({}, {}, {})", IR::Opcode::BitFieldUExtract, + base, shift, count); + } + return (base >> shift) & ((1U << count) - 1); + }); + return; + case IR::Opcode::BitFieldSExtract: + FoldWhenAllImmediates(inst, [](s32 base, u32 shift, u32 count) { + const size_t back_shift{static_cast(shift) + static_cast(count)}; + const size_t left_shift{32 - back_shift}; + const size_t right_shift{static_cast(32 - count)}; + if (back_shift > 32 || left_shift >= 32 || right_shift >= 32) { + throw LogicError("Undefined result in {}({}, {}, {})", IR::Opcode::BitFieldSExtract, + base, shift, count); + } + return static_cast((base << left_shift) >> right_shift); + }); + return; + case IR::Opcode::BitFieldInsert: + FoldWhenAllImmediates(inst, [](u32 base, u32 insert, u32 offset, u32 bits) { + if (bits >= 32 || offset >= 32) { + throw LogicError("Undefined result in {}({}, {}, {}, {})", + IR::Opcode::BitFieldInsert, base, insert, offset, bits); + } + return (base & ~(~(~0u << bits) << offset)) | (insert << offset); + }); + return; + case IR::Opcode::CompositeExtractU32x2: + return FoldCompositeExtract(inst, IR::Opcode::CompositeConstructU32x2, + IR::Opcode::CompositeInsertU32x2); + case IR::Opcode::CompositeExtractU32x3: + return FoldCompositeExtract(inst, IR::Opcode::CompositeConstructU32x3, + IR::Opcode::CompositeInsertU32x3); + case IR::Opcode::CompositeExtractU32x4: + return FoldCompositeExtract(inst, IR::Opcode::CompositeConstructU32x4, + IR::Opcode::CompositeInsertU32x4); + case IR::Opcode::CompositeExtractF32x2: + return FoldCompositeExtract(inst, IR::Opcode::CompositeConstructF32x2, + IR::Opcode::CompositeInsertF32x2); + case IR::Opcode::CompositeExtractF32x3: + return FoldCompositeExtract(inst, IR::Opcode::CompositeConstructF32x3, + IR::Opcode::CompositeInsertF32x3); + case IR::Opcode::CompositeExtractF32x4: + return FoldCompositeExtract(inst, IR::Opcode::CompositeConstructF32x4, + IR::Opcode::CompositeInsertF32x4); + case IR::Opcode::CompositeExtractF16x2: + return FoldCompositeExtract(inst, IR::Opcode::CompositeConstructF16x2, + IR::Opcode::CompositeInsertF16x2); + case IR::Opcode::CompositeExtractF16x3: + return FoldCompositeExtract(inst, IR::Opcode::CompositeConstructF16x3, + IR::Opcode::CompositeInsertF16x3); + case IR::Opcode::CompositeExtractF16x4: + return FoldCompositeExtract(inst, IR::Opcode::CompositeConstructF16x4, + IR::Opcode::CompositeInsertF16x4); + case IR::Opcode::FSwizzleAdd: + return FoldFSwizzleAdd(block, inst); + default: + break; + } +} +} // Anonymous namespace + +void ConstantPropagationPass(IR::Program& program) { + for (IR::Block* const block : program.post_order_blocks | std::views::reverse) { + for (IR::Inst& inst : block->Instructions()) { + ConstantPropagation(*block, inst); + } + } +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/dead_code_elimination_pass.cpp b/src/shader_recompiler/ir_opt/dead_code_elimination_pass.cpp new file mode 100755 index 000000000..1e4a3fdae --- /dev/null +++ b/src/shader_recompiler/ir_opt/dead_code_elimination_pass.cpp @@ -0,0 +1,28 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/ir_opt/passes.h" + +namespace Shader::Optimization { + +void DeadCodeEliminationPass(IR::Program& program) { + // We iterate over the instructions in reverse order. + // This is because removing an instruction reduces the number of uses for earlier instructions. + for (IR::Block* const block : program.post_order_blocks) { + auto it{block->end()}; + while (it != block->begin()) { + --it; + if (!it->HasUses() && !it->MayHaveSideEffects()) { + it->Invalidate(); + it = block->Instructions().erase(it); + } + } + } +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/dual_vertex_pass.cpp b/src/shader_recompiler/ir_opt/dual_vertex_pass.cpp new file mode 100755 index 000000000..3d2c205c2 --- /dev/null +++ b/src/shader_recompiler/ir_opt/dual_vertex_pass.cpp @@ -0,0 +1,36 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include "common/bit_cast.h" +#include "common/bit_util.h" +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/ir_opt/passes.h" + +namespace Shader::Optimization { + +void VertexATransformPass(IR::Program& program) { + for (IR::Block* const block : program.blocks) { + for (IR::Inst& inst : block->Instructions()) { + if (inst.GetOpcode() == IR::Opcode::Epilogue) { + return inst.Invalidate(); + } + } + } +} + +void VertexBTransformPass(IR::Program& program) { + for (IR::Block* const block : program.blocks) { + for (IR::Inst& inst : block->Instructions()) { + if (inst.GetOpcode() == IR::Opcode::Prologue) { + return inst.Invalidate(); + } + } + } +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp b/src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp new file mode 100755 index 000000000..70449eeca --- /dev/null +++ b/src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp @@ -0,0 +1,527 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include + +#include +#include + +#include "common/alignment.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/breadth_first_search.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/ir_opt/passes.h" + +namespace Shader::Optimization { +namespace { +/// Address in constant buffers to the storage buffer descriptor +struct StorageBufferAddr { + auto operator<=>(const StorageBufferAddr&) const noexcept = default; + + u32 index; + u32 offset; +}; + +/// Block iterator to a global memory instruction and the storage buffer it uses +struct StorageInst { + StorageBufferAddr storage_buffer; + IR::Inst* inst; + IR::Block* block; +}; + +/// Bias towards a certain range of constant buffers when looking for storage buffers +struct Bias { + u32 index; + u32 offset_begin; + u32 offset_end; +}; + +using boost::container::flat_set; +using boost::container::small_vector; +using StorageBufferSet = + flat_set, small_vector>; +using StorageInstVector = small_vector; +using StorageWritesSet = + flat_set, small_vector>; + +struct StorageInfo { + StorageBufferSet set; + StorageInstVector to_replace; + StorageWritesSet writes; +}; + +/// Returns true when the instruction is a global memory instruction +bool IsGlobalMemory(const IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::LoadGlobalS8: + case IR::Opcode::LoadGlobalU8: + case IR::Opcode::LoadGlobalS16: + case IR::Opcode::LoadGlobalU16: + case IR::Opcode::LoadGlobal32: + case IR::Opcode::LoadGlobal64: + case IR::Opcode::LoadGlobal128: + case IR::Opcode::WriteGlobalS8: + case IR::Opcode::WriteGlobalU8: + case IR::Opcode::WriteGlobalS16: + case IR::Opcode::WriteGlobalU16: + case IR::Opcode::WriteGlobal32: + case IR::Opcode::WriteGlobal64: + case IR::Opcode::WriteGlobal128: + case IR::Opcode::GlobalAtomicIAdd32: + case IR::Opcode::GlobalAtomicSMin32: + case IR::Opcode::GlobalAtomicUMin32: + case IR::Opcode::GlobalAtomicSMax32: + case IR::Opcode::GlobalAtomicUMax32: + case IR::Opcode::GlobalAtomicInc32: + case IR::Opcode::GlobalAtomicDec32: + case IR::Opcode::GlobalAtomicAnd32: + case IR::Opcode::GlobalAtomicOr32: + case IR::Opcode::GlobalAtomicXor32: + case IR::Opcode::GlobalAtomicExchange32: + case IR::Opcode::GlobalAtomicIAdd64: + case IR::Opcode::GlobalAtomicSMin64: + case IR::Opcode::GlobalAtomicUMin64: + case IR::Opcode::GlobalAtomicSMax64: + case IR::Opcode::GlobalAtomicUMax64: + case IR::Opcode::GlobalAtomicAnd64: + case IR::Opcode::GlobalAtomicOr64: + case IR::Opcode::GlobalAtomicXor64: + case IR::Opcode::GlobalAtomicExchange64: + case IR::Opcode::GlobalAtomicAddF32: + case IR::Opcode::GlobalAtomicAddF16x2: + case IR::Opcode::GlobalAtomicAddF32x2: + case IR::Opcode::GlobalAtomicMinF16x2: + case IR::Opcode::GlobalAtomicMinF32x2: + case IR::Opcode::GlobalAtomicMaxF16x2: + case IR::Opcode::GlobalAtomicMaxF32x2: + return true; + default: + return false; + } +} + +/// Returns true when the instruction is a global memory instruction +bool IsGlobalMemoryWrite(const IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::WriteGlobalS8: + case IR::Opcode::WriteGlobalU8: + case IR::Opcode::WriteGlobalS16: + case IR::Opcode::WriteGlobalU16: + case IR::Opcode::WriteGlobal32: + case IR::Opcode::WriteGlobal64: + case IR::Opcode::WriteGlobal128: + case IR::Opcode::GlobalAtomicIAdd32: + case IR::Opcode::GlobalAtomicSMin32: + case IR::Opcode::GlobalAtomicUMin32: + case IR::Opcode::GlobalAtomicSMax32: + case IR::Opcode::GlobalAtomicUMax32: + case IR::Opcode::GlobalAtomicInc32: + case IR::Opcode::GlobalAtomicDec32: + case IR::Opcode::GlobalAtomicAnd32: + case IR::Opcode::GlobalAtomicOr32: + case IR::Opcode::GlobalAtomicXor32: + case IR::Opcode::GlobalAtomicExchange32: + case IR::Opcode::GlobalAtomicIAdd64: + case IR::Opcode::GlobalAtomicSMin64: + case IR::Opcode::GlobalAtomicUMin64: + case IR::Opcode::GlobalAtomicSMax64: + case IR::Opcode::GlobalAtomicUMax64: + case IR::Opcode::GlobalAtomicAnd64: + case IR::Opcode::GlobalAtomicOr64: + case IR::Opcode::GlobalAtomicXor64: + case IR::Opcode::GlobalAtomicExchange64: + case IR::Opcode::GlobalAtomicAddF32: + case IR::Opcode::GlobalAtomicAddF16x2: + case IR::Opcode::GlobalAtomicAddF32x2: + case IR::Opcode::GlobalAtomicMinF16x2: + case IR::Opcode::GlobalAtomicMinF32x2: + case IR::Opcode::GlobalAtomicMaxF16x2: + case IR::Opcode::GlobalAtomicMaxF32x2: + return true; + default: + return false; + } +} + +/// Converts a global memory opcode to its storage buffer equivalent +IR::Opcode GlobalToStorage(IR::Opcode opcode) { + switch (opcode) { + case IR::Opcode::LoadGlobalS8: + return IR::Opcode::LoadStorageS8; + case IR::Opcode::LoadGlobalU8: + return IR::Opcode::LoadStorageU8; + case IR::Opcode::LoadGlobalS16: + return IR::Opcode::LoadStorageS16; + case IR::Opcode::LoadGlobalU16: + return IR::Opcode::LoadStorageU16; + case IR::Opcode::LoadGlobal32: + return IR::Opcode::LoadStorage32; + case IR::Opcode::LoadGlobal64: + return IR::Opcode::LoadStorage64; + case IR::Opcode::LoadGlobal128: + return IR::Opcode::LoadStorage128; + case IR::Opcode::WriteGlobalS8: + return IR::Opcode::WriteStorageS8; + case IR::Opcode::WriteGlobalU8: + return IR::Opcode::WriteStorageU8; + case IR::Opcode::WriteGlobalS16: + return IR::Opcode::WriteStorageS16; + case IR::Opcode::WriteGlobalU16: + return IR::Opcode::WriteStorageU16; + case IR::Opcode::WriteGlobal32: + return IR::Opcode::WriteStorage32; + case IR::Opcode::WriteGlobal64: + return IR::Opcode::WriteStorage64; + case IR::Opcode::WriteGlobal128: + return IR::Opcode::WriteStorage128; + case IR::Opcode::GlobalAtomicIAdd32: + return IR::Opcode::StorageAtomicIAdd32; + case IR::Opcode::GlobalAtomicSMin32: + return IR::Opcode::StorageAtomicSMin32; + case IR::Opcode::GlobalAtomicUMin32: + return IR::Opcode::StorageAtomicUMin32; + case IR::Opcode::GlobalAtomicSMax32: + return IR::Opcode::StorageAtomicSMax32; + case IR::Opcode::GlobalAtomicUMax32: + return IR::Opcode::StorageAtomicUMax32; + case IR::Opcode::GlobalAtomicInc32: + return IR::Opcode::StorageAtomicInc32; + case IR::Opcode::GlobalAtomicDec32: + return IR::Opcode::StorageAtomicDec32; + case IR::Opcode::GlobalAtomicAnd32: + return IR::Opcode::StorageAtomicAnd32; + case IR::Opcode::GlobalAtomicOr32: + return IR::Opcode::StorageAtomicOr32; + case IR::Opcode::GlobalAtomicXor32: + return IR::Opcode::StorageAtomicXor32; + case IR::Opcode::GlobalAtomicIAdd64: + return IR::Opcode::StorageAtomicIAdd64; + case IR::Opcode::GlobalAtomicSMin64: + return IR::Opcode::StorageAtomicSMin64; + case IR::Opcode::GlobalAtomicUMin64: + return IR::Opcode::StorageAtomicUMin64; + case IR::Opcode::GlobalAtomicSMax64: + return IR::Opcode::StorageAtomicSMax64; + case IR::Opcode::GlobalAtomicUMax64: + return IR::Opcode::StorageAtomicUMax64; + case IR::Opcode::GlobalAtomicAnd64: + return IR::Opcode::StorageAtomicAnd64; + case IR::Opcode::GlobalAtomicOr64: + return IR::Opcode::StorageAtomicOr64; + case IR::Opcode::GlobalAtomicXor64: + return IR::Opcode::StorageAtomicXor64; + case IR::Opcode::GlobalAtomicExchange32: + return IR::Opcode::StorageAtomicExchange32; + case IR::Opcode::GlobalAtomicExchange64: + return IR::Opcode::StorageAtomicExchange64; + case IR::Opcode::GlobalAtomicAddF32: + return IR::Opcode::StorageAtomicAddF32; + case IR::Opcode::GlobalAtomicAddF16x2: + return IR::Opcode::StorageAtomicAddF16x2; + case IR::Opcode::GlobalAtomicMinF16x2: + return IR::Opcode::StorageAtomicMinF16x2; + case IR::Opcode::GlobalAtomicMaxF16x2: + return IR::Opcode::StorageAtomicMaxF16x2; + case IR::Opcode::GlobalAtomicAddF32x2: + return IR::Opcode::StorageAtomicAddF32x2; + case IR::Opcode::GlobalAtomicMinF32x2: + return IR::Opcode::StorageAtomicMinF32x2; + case IR::Opcode::GlobalAtomicMaxF32x2: + return IR::Opcode::StorageAtomicMaxF32x2; + default: + throw InvalidArgument("Invalid global memory opcode {}", opcode); + } +} + +/// Returns true when a storage buffer address satisfies a bias +bool MeetsBias(const StorageBufferAddr& storage_buffer, const Bias& bias) noexcept { + return storage_buffer.index == bias.index && storage_buffer.offset >= bias.offset_begin && + storage_buffer.offset < bias.offset_end; +} + +struct LowAddrInfo { + IR::U32 value; + s32 imm_offset; +}; + +/// Tries to track the first 32-bits of a global memory instruction +std::optional TrackLowAddress(IR::Inst* inst) { + // The first argument is the low level GPU pointer to the global memory instruction + const IR::Value addr{inst->Arg(0)}; + if (addr.IsImmediate()) { + // Not much we can do if it's an immediate + return std::nullopt; + } + // This address is expected to either be a PackUint2x32, a IAdd64, or a CompositeConstructU32x2 + IR::Inst* addr_inst{addr.InstRecursive()}; + s32 imm_offset{0}; + if (addr_inst->GetOpcode() == IR::Opcode::IAdd64) { + // If it's an IAdd64, get the immediate offset it is applying and grab the address + // instruction. This expects for the instruction to be canonicalized having the address on + // the first argument and the immediate offset on the second one. + const IR::U64 imm_offset_value{addr_inst->Arg(1)}; + if (!imm_offset_value.IsImmediate()) { + return std::nullopt; + } + imm_offset = static_cast(static_cast(imm_offset_value.U64())); + const IR::U64 iadd_addr{addr_inst->Arg(0)}; + if (iadd_addr.IsImmediate()) { + return std::nullopt; + } + addr_inst = iadd_addr.InstRecursive(); + } + // With IAdd64 handled, now PackUint2x32 is expected + if (addr_inst->GetOpcode() == IR::Opcode::PackUint2x32) { + // PackUint2x32 is expected to be generated from a vector + const IR::Value vector{addr_inst->Arg(0)}; + if (vector.IsImmediate()) { + return std::nullopt; + } + addr_inst = vector.InstRecursive(); + } + // The vector is expected to be a CompositeConstructU32x2 + if (addr_inst->GetOpcode() != IR::Opcode::CompositeConstructU32x2) { + return std::nullopt; + } + // Grab the first argument from the CompositeConstructU32x2, this is the low address. + return LowAddrInfo{ + .value{IR::U32{addr_inst->Arg(0)}}, + .imm_offset = imm_offset, + }; +} + +/// Tries to track the storage buffer address used by a global memory instruction +std::optional Track(const IR::Value& value, const Bias* bias) { + const auto pred{[bias](const IR::Inst* inst) -> std::optional { + if (inst->GetOpcode() != IR::Opcode::GetCbufU32) { + return std::nullopt; + } + const IR::Value index{inst->Arg(0)}; + const IR::Value offset{inst->Arg(1)}; + if (!index.IsImmediate()) { + // Definitely not a storage buffer if it's read from a + // non-immediate index + return std::nullopt; + } + if (!offset.IsImmediate()) { + // TODO: Support SSBO arrays + return std::nullopt; + } + const StorageBufferAddr storage_buffer{ + .index{index.U32()}, + .offset{offset.U32()}, + }; + if (!Common::IsAligned(storage_buffer.offset, 16)) { + // The SSBO pointer has to be aligned + return std::nullopt; + } + if (bias && !MeetsBias(storage_buffer, *bias)) { + // We have to blacklist some addresses in case we wrongly + // point to them + return std::nullopt; + } + return storage_buffer; + }}; + return BreadthFirstSearch(value, pred); +} + +/// Collects the storage buffer used by a global memory instruction and the instruction itself +void CollectStorageBuffers(IR::Block& block, IR::Inst& inst, StorageInfo& info) { + // NVN puts storage buffers in a specific range, we have to bias towards these addresses to + // avoid getting false positives + static constexpr Bias nvn_bias{ + .index = 0, + .offset_begin = 0x110, + .offset_end = 0x610, + }; + // Track the low address of the instruction + const std::optional low_addr_info{TrackLowAddress(&inst)}; + if (!low_addr_info) { + // Failed to track the low address, use NVN fallbacks + return; + } + // First try to find storage buffers in the NVN address + const IR::U32 low_addr{low_addr_info->value}; + std::optional storage_buffer{Track(low_addr, &nvn_bias)}; + if (!storage_buffer) { + // If it fails, track without a bias + storage_buffer = Track(low_addr, nullptr); + if (!storage_buffer) { + // If that also fails, use NVN fallbacks + return; + } + } + // Collect storage buffer and the instruction + if (IsGlobalMemoryWrite(inst)) { + info.writes.insert(*storage_buffer); + } + info.set.insert(*storage_buffer); + info.to_replace.push_back(StorageInst{ + .storage_buffer{*storage_buffer}, + .inst = &inst, + .block = &block, + }); +} + +/// Returns the offset in indices (not bytes) for an equivalent storage instruction +IR::U32 StorageOffset(IR::Block& block, IR::Inst& inst, StorageBufferAddr buffer) { + IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)}; + IR::U32 offset; + if (const std::optional low_addr{TrackLowAddress(&inst)}) { + offset = low_addr->value; + if (low_addr->imm_offset != 0) { + offset = ir.IAdd(offset, ir.Imm32(low_addr->imm_offset)); + } + } else { + offset = ir.UConvert(32, IR::U64{inst.Arg(0)}); + } + // Subtract the least significant 32 bits from the guest offset. The result is the storage + // buffer offset in bytes. + const IR::U32 low_cbuf{ir.GetCbuf(ir.Imm32(buffer.index), ir.Imm32(buffer.offset))}; + return ir.ISub(offset, low_cbuf); +} + +/// Replace a global memory load instruction with its storage buffer equivalent +void ReplaceLoad(IR::Block& block, IR::Inst& inst, const IR::U32& storage_index, + const IR::U32& offset) { + const IR::Opcode new_opcode{GlobalToStorage(inst.GetOpcode())}; + const auto it{IR::Block::InstructionList::s_iterator_to(inst)}; + const IR::Value value{&*block.PrependNewInst(it, new_opcode, {storage_index, offset})}; + inst.ReplaceUsesWith(value); +} + +/// Replace a global memory write instruction with its storage buffer equivalent +void ReplaceWrite(IR::Block& block, IR::Inst& inst, const IR::U32& storage_index, + const IR::U32& offset) { + const IR::Opcode new_opcode{GlobalToStorage(inst.GetOpcode())}; + const auto it{IR::Block::InstructionList::s_iterator_to(inst)}; + block.PrependNewInst(it, new_opcode, {storage_index, offset, inst.Arg(1)}); + inst.Invalidate(); +} + +/// Replace an atomic operation on global memory instruction with its storage buffer equivalent +void ReplaceAtomic(IR::Block& block, IR::Inst& inst, const IR::U32& storage_index, + const IR::U32& offset) { + const IR::Opcode new_opcode{GlobalToStorage(inst.GetOpcode())}; + const auto it{IR::Block::InstructionList::s_iterator_to(inst)}; + const IR::Value value{ + &*block.PrependNewInst(it, new_opcode, {storage_index, offset, inst.Arg(1)})}; + inst.ReplaceUsesWith(value); +} + +/// Replace a global memory instruction with its storage buffer equivalent +void Replace(IR::Block& block, IR::Inst& inst, const IR::U32& storage_index, + const IR::U32& offset) { + switch (inst.GetOpcode()) { + case IR::Opcode::LoadGlobalS8: + case IR::Opcode::LoadGlobalU8: + case IR::Opcode::LoadGlobalS16: + case IR::Opcode::LoadGlobalU16: + case IR::Opcode::LoadGlobal32: + case IR::Opcode::LoadGlobal64: + case IR::Opcode::LoadGlobal128: + return ReplaceLoad(block, inst, storage_index, offset); + case IR::Opcode::WriteGlobalS8: + case IR::Opcode::WriteGlobalU8: + case IR::Opcode::WriteGlobalS16: + case IR::Opcode::WriteGlobalU16: + case IR::Opcode::WriteGlobal32: + case IR::Opcode::WriteGlobal64: + case IR::Opcode::WriteGlobal128: + return ReplaceWrite(block, inst, storage_index, offset); + case IR::Opcode::GlobalAtomicIAdd32: + case IR::Opcode::GlobalAtomicSMin32: + case IR::Opcode::GlobalAtomicUMin32: + case IR::Opcode::GlobalAtomicSMax32: + case IR::Opcode::GlobalAtomicUMax32: + case IR::Opcode::GlobalAtomicInc32: + case IR::Opcode::GlobalAtomicDec32: + case IR::Opcode::GlobalAtomicAnd32: + case IR::Opcode::GlobalAtomicOr32: + case IR::Opcode::GlobalAtomicXor32: + case IR::Opcode::GlobalAtomicExchange32: + case IR::Opcode::GlobalAtomicIAdd64: + case IR::Opcode::GlobalAtomicSMin64: + case IR::Opcode::GlobalAtomicUMin64: + case IR::Opcode::GlobalAtomicSMax64: + case IR::Opcode::GlobalAtomicUMax64: + case IR::Opcode::GlobalAtomicAnd64: + case IR::Opcode::GlobalAtomicOr64: + case IR::Opcode::GlobalAtomicXor64: + case IR::Opcode::GlobalAtomicExchange64: + case IR::Opcode::GlobalAtomicAddF32: + case IR::Opcode::GlobalAtomicAddF16x2: + case IR::Opcode::GlobalAtomicAddF32x2: + case IR::Opcode::GlobalAtomicMinF16x2: + case IR::Opcode::GlobalAtomicMinF32x2: + case IR::Opcode::GlobalAtomicMaxF16x2: + case IR::Opcode::GlobalAtomicMaxF32x2: + return ReplaceAtomic(block, inst, storage_index, offset); + default: + throw InvalidArgument("Invalid global memory opcode {}", inst.GetOpcode()); + } +} +} // Anonymous namespace + +void GlobalMemoryToStorageBufferPass(IR::Program& program) { + StorageInfo info; + for (IR::Block* const block : program.post_order_blocks) { + for (IR::Inst& inst : block->Instructions()) { + if (!IsGlobalMemory(inst)) { + continue; + } + CollectStorageBuffers(*block, inst, info); + } + } + for (const StorageBufferAddr& storage_buffer : info.set) { + program.info.storage_buffers_descriptors.push_back({ + .cbuf_index = storage_buffer.index, + .cbuf_offset = storage_buffer.offset, + .count = 1, + .is_written{info.writes.contains(storage_buffer)}, + }); + } + for (const StorageInst& storage_inst : info.to_replace) { + const StorageBufferAddr storage_buffer{storage_inst.storage_buffer}; + const auto it{info.set.find(storage_inst.storage_buffer)}; + const IR::U32 index{IR::Value{static_cast(info.set.index_of(it))}}; + IR::Block* const block{storage_inst.block}; + IR::Inst* const inst{storage_inst.inst}; + const IR::U32 offset{StorageOffset(*block, *inst, storage_buffer)}; + Replace(*block, *inst, index, offset); + } +} + +template +static u32 Add(Descriptors& descriptors, const Descriptor& desc, Func&& pred) { + // TODO: Handle arrays + const auto it{std::ranges::find_if(descriptors, pred)}; + if (it != descriptors.end()) { + return static_cast(std::distance(descriptors.begin(), it)); + } + descriptors.push_back(desc); + return static_cast(descriptors.size()) - 1; +} + +void JoinStorageInfo(Info& base, Info& source) { + auto& descriptors = base.storage_buffers_descriptors; + for (auto& desc : source.storage_buffers_descriptors) { + auto it{std::ranges::find_if(descriptors, [&desc](const auto& existing) { + return desc.cbuf_index == existing.cbuf_index && + desc.cbuf_offset == existing.cbuf_offset && desc.count == existing.count; + })}; + if (it != descriptors.end()) { + it->is_written |= desc.is_written; + continue; + } + descriptors.push_back(desc); + } +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/identity_removal_pass.cpp b/src/shader_recompiler/ir_opt/identity_removal_pass.cpp new file mode 100755 index 000000000..e9b55f835 --- /dev/null +++ b/src/shader_recompiler/ir_opt/identity_removal_pass.cpp @@ -0,0 +1,38 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/ir_opt/passes.h" + +namespace Shader::Optimization { + +void IdentityRemovalPass(IR::Program& program) { + std::vector to_invalidate; + for (IR::Block* const block : program.blocks) { + for (auto inst = block->begin(); inst != block->end();) { + const size_t num_args{inst->NumArgs()}; + for (size_t i = 0; i < num_args; ++i) { + IR::Value arg; + while ((arg = inst->Arg(i)).IsIdentity()) { + inst->SetArg(i, arg.Inst()->Arg(0)); + } + } + if (inst->GetOpcode() == IR::Opcode::Identity || + inst->GetOpcode() == IR::Opcode::Void) { + to_invalidate.push_back(&*inst); + inst = block->Instructions().erase(inst); + } else { + ++inst; + } + } + } + for (IR::Inst* const inst : to_invalidate) { + inst->Invalidate(); + } +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/lower_fp16_to_fp32.cpp b/src/shader_recompiler/ir_opt/lower_fp16_to_fp32.cpp new file mode 100755 index 000000000..773e1f961 --- /dev/null +++ b/src/shader_recompiler/ir_opt/lower_fp16_to_fp32.cpp @@ -0,0 +1,143 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/ir_opt/passes.h" + +namespace Shader::Optimization { +namespace { +IR::Opcode Replace(IR::Opcode op) { + switch (op) { + case IR::Opcode::FPAbs16: + return IR::Opcode::FPAbs32; + case IR::Opcode::FPAdd16: + return IR::Opcode::FPAdd32; + case IR::Opcode::FPCeil16: + return IR::Opcode::FPCeil32; + case IR::Opcode::FPFloor16: + return IR::Opcode::FPFloor32; + case IR::Opcode::FPFma16: + return IR::Opcode::FPFma32; + case IR::Opcode::FPMul16: + return IR::Opcode::FPMul32; + case IR::Opcode::FPNeg16: + return IR::Opcode::FPNeg32; + case IR::Opcode::FPRoundEven16: + return IR::Opcode::FPRoundEven32; + case IR::Opcode::FPSaturate16: + return IR::Opcode::FPSaturate32; + case IR::Opcode::FPClamp16: + return IR::Opcode::FPClamp32; + case IR::Opcode::FPTrunc16: + return IR::Opcode::FPTrunc32; + case IR::Opcode::CompositeConstructF16x2: + return IR::Opcode::CompositeConstructF32x2; + case IR::Opcode::CompositeConstructF16x3: + return IR::Opcode::CompositeConstructF32x3; + case IR::Opcode::CompositeConstructF16x4: + return IR::Opcode::CompositeConstructF32x4; + case IR::Opcode::CompositeExtractF16x2: + return IR::Opcode::CompositeExtractF32x2; + case IR::Opcode::CompositeExtractF16x3: + return IR::Opcode::CompositeExtractF32x3; + case IR::Opcode::CompositeExtractF16x4: + return IR::Opcode::CompositeExtractF32x4; + case IR::Opcode::CompositeInsertF16x2: + return IR::Opcode::CompositeInsertF32x2; + case IR::Opcode::CompositeInsertF16x3: + return IR::Opcode::CompositeInsertF32x3; + case IR::Opcode::CompositeInsertF16x4: + return IR::Opcode::CompositeInsertF32x4; + case IR::Opcode::FPOrdEqual16: + return IR::Opcode::FPOrdEqual32; + case IR::Opcode::FPUnordEqual16: + return IR::Opcode::FPUnordEqual32; + case IR::Opcode::FPOrdNotEqual16: + return IR::Opcode::FPOrdNotEqual32; + case IR::Opcode::FPUnordNotEqual16: + return IR::Opcode::FPUnordNotEqual32; + case IR::Opcode::FPOrdLessThan16: + return IR::Opcode::FPOrdLessThan32; + case IR::Opcode::FPUnordLessThan16: + return IR::Opcode::FPUnordLessThan32; + case IR::Opcode::FPOrdGreaterThan16: + return IR::Opcode::FPOrdGreaterThan32; + case IR::Opcode::FPUnordGreaterThan16: + return IR::Opcode::FPUnordGreaterThan32; + case IR::Opcode::FPOrdLessThanEqual16: + return IR::Opcode::FPOrdLessThanEqual32; + case IR::Opcode::FPUnordLessThanEqual16: + return IR::Opcode::FPUnordLessThanEqual32; + case IR::Opcode::FPOrdGreaterThanEqual16: + return IR::Opcode::FPOrdGreaterThanEqual32; + case IR::Opcode::FPUnordGreaterThanEqual16: + return IR::Opcode::FPUnordGreaterThanEqual32; + case IR::Opcode::FPIsNan16: + return IR::Opcode::FPIsNan32; + case IR::Opcode::ConvertS16F16: + return IR::Opcode::ConvertS16F32; + case IR::Opcode::ConvertS32F16: + return IR::Opcode::ConvertS32F32; + case IR::Opcode::ConvertS64F16: + return IR::Opcode::ConvertS64F32; + case IR::Opcode::ConvertU16F16: + return IR::Opcode::ConvertU16F32; + case IR::Opcode::ConvertU32F16: + return IR::Opcode::ConvertU32F32; + case IR::Opcode::ConvertU64F16: + return IR::Opcode::ConvertU64F32; + case IR::Opcode::PackFloat2x16: + return IR::Opcode::PackHalf2x16; + case IR::Opcode::UnpackFloat2x16: + return IR::Opcode::UnpackHalf2x16; + case IR::Opcode::ConvertF32F16: + return IR::Opcode::Identity; + case IR::Opcode::ConvertF16F32: + return IR::Opcode::Identity; + case IR::Opcode::ConvertF16S8: + return IR::Opcode::ConvertF32S8; + case IR::Opcode::ConvertF16S16: + return IR::Opcode::ConvertF32S16; + case IR::Opcode::ConvertF16S32: + return IR::Opcode::ConvertF32S32; + case IR::Opcode::ConvertF16S64: + return IR::Opcode::ConvertF32S64; + case IR::Opcode::ConvertF16U8: + return IR::Opcode::ConvertF32U8; + case IR::Opcode::ConvertF16U16: + return IR::Opcode::ConvertF32U16; + case IR::Opcode::ConvertF16U32: + return IR::Opcode::ConvertF32U32; + case IR::Opcode::ConvertF16U64: + return IR::Opcode::ConvertF32U64; + case IR::Opcode::GlobalAtomicAddF16x2: + return IR::Opcode::GlobalAtomicAddF32x2; + case IR::Opcode::StorageAtomicAddF16x2: + return IR::Opcode::StorageAtomicAddF32x2; + case IR::Opcode::GlobalAtomicMinF16x2: + return IR::Opcode::GlobalAtomicMinF32x2; + case IR::Opcode::StorageAtomicMinF16x2: + return IR::Opcode::StorageAtomicMinF32x2; + case IR::Opcode::GlobalAtomicMaxF16x2: + return IR::Opcode::GlobalAtomicMaxF32x2; + case IR::Opcode::StorageAtomicMaxF16x2: + return IR::Opcode::StorageAtomicMaxF32x2; + default: + return op; + } +} +} // Anonymous namespace + +void LowerFp16ToFp32(IR::Program& program) { + for (IR::Block* const block : program.blocks) { + for (IR::Inst& inst : block->Instructions()) { + inst.ReplaceOpcode(Replace(inst.GetOpcode())); + } + } +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/lower_int64_to_int32.cpp b/src/shader_recompiler/ir_opt/lower_int64_to_int32.cpp new file mode 100755 index 000000000..abf7c87c7 --- /dev/null +++ b/src/shader_recompiler/ir_opt/lower_int64_to_int32.cpp @@ -0,0 +1,217 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/ir_opt/passes.h" + +namespace Shader::Optimization { +namespace { +std::pair Unpack(IR::IREmitter& ir, const IR::Value& packed) { + if (packed.IsImmediate()) { + const u64 value{packed.U64()}; + return { + ir.Imm32(static_cast(value)), + ir.Imm32(static_cast(value >> 32)), + }; + } else { + return std::pair{ + ir.CompositeExtract(packed, 0u), + ir.CompositeExtract(packed, 1u), + }; + } +} + +void IAdd64To32(IR::Block& block, IR::Inst& inst) { + if (inst.HasAssociatedPseudoOperation()) { + throw NotImplementedException("IAdd64 emulation with pseudo instructions"); + } + IR::IREmitter ir(block, IR::Block::InstructionList::s_iterator_to(inst)); + const auto [a_lo, a_hi]{Unpack(ir, inst.Arg(0))}; + const auto [b_lo, b_hi]{Unpack(ir, inst.Arg(1))}; + + const IR::U32 ret_lo{ir.IAdd(a_lo, b_lo)}; + const IR::U32 carry{ir.Select(ir.GetCarryFromOp(ret_lo), ir.Imm32(1u), ir.Imm32(0u))}; + + const IR::U32 ret_hi{ir.IAdd(ir.IAdd(a_hi, b_hi), carry)}; + inst.ReplaceUsesWith(ir.CompositeConstruct(ret_lo, ret_hi)); +} + +void ISub64To32(IR::Block& block, IR::Inst& inst) { + if (inst.HasAssociatedPseudoOperation()) { + throw NotImplementedException("ISub64 emulation with pseudo instructions"); + } + IR::IREmitter ir(block, IR::Block::InstructionList::s_iterator_to(inst)); + const auto [a_lo, a_hi]{Unpack(ir, inst.Arg(0))}; + const auto [b_lo, b_hi]{Unpack(ir, inst.Arg(1))}; + + const IR::U32 ret_lo{ir.ISub(a_lo, b_lo)}; + const IR::U1 underflow{ir.IGreaterThan(ret_lo, a_lo, false)}; + const IR::U32 underflow_bit{ir.Select(underflow, ir.Imm32(1u), ir.Imm32(0u))}; + + const IR::U32 ret_hi{ir.ISub(ir.ISub(a_hi, b_hi), underflow_bit)}; + inst.ReplaceUsesWith(ir.CompositeConstruct(ret_lo, ret_hi)); +} + +void INeg64To32(IR::Block& block, IR::Inst& inst) { + if (inst.HasAssociatedPseudoOperation()) { + throw NotImplementedException("INeg64 emulation with pseudo instructions"); + } + IR::IREmitter ir(block, IR::Block::InstructionList::s_iterator_to(inst)); + auto [lo, hi]{Unpack(ir, inst.Arg(0))}; + lo = ir.BitwiseNot(lo); + hi = ir.BitwiseNot(hi); + + lo = ir.IAdd(lo, ir.Imm32(1)); + + const IR::U32 carry{ir.Select(ir.GetCarryFromOp(lo), ir.Imm32(1u), ir.Imm32(0u))}; + hi = ir.IAdd(hi, carry); + + inst.ReplaceUsesWith(ir.CompositeConstruct(lo, hi)); +} + +void ShiftLeftLogical64To32(IR::Block& block, IR::Inst& inst) { + if (inst.HasAssociatedPseudoOperation()) { + throw NotImplementedException("ShiftLeftLogical64 emulation with pseudo instructions"); + } + IR::IREmitter ir(block, IR::Block::InstructionList::s_iterator_to(inst)); + const auto [lo, hi]{Unpack(ir, inst.Arg(0))}; + const IR::U32 shift{inst.Arg(1)}; + + const IR::U32 shifted_lo{ir.ShiftLeftLogical(lo, shift)}; + const IR::U32 shifted_hi{ir.ShiftLeftLogical(hi, shift)}; + + const IR::U32 inv_shift{ir.ISub(shift, ir.Imm32(32))}; + const IR::U1 is_long{ir.IGreaterThanEqual(inv_shift, ir.Imm32(0), true)}; + const IR::U1 is_zero{ir.IEqual(shift, ir.Imm32(0))}; + + const IR::U32 long_ret_lo{ir.Imm32(0)}; + const IR::U32 long_ret_hi{ir.ShiftLeftLogical(lo, inv_shift)}; + + const IR::U32 shift_complement{ir.ISub(ir.Imm32(32), shift)}; + const IR::U32 lo_extract{ir.BitFieldExtract(lo, shift_complement, shift, false)}; + const IR::U32 short_ret_lo{shifted_lo}; + const IR::U32 short_ret_hi{ir.BitwiseOr(shifted_hi, lo_extract)}; + + const IR::U32 zero_ret_lo{lo}; + const IR::U32 zero_ret_hi{hi}; + + const IR::U32 non_zero_lo{ir.Select(is_long, long_ret_lo, short_ret_lo)}; + const IR::U32 non_zero_hi{ir.Select(is_long, long_ret_hi, short_ret_hi)}; + + const IR::U32 ret_lo{ir.Select(is_zero, zero_ret_lo, non_zero_lo)}; + const IR::U32 ret_hi{ir.Select(is_zero, zero_ret_hi, non_zero_hi)}; + inst.ReplaceUsesWith(ir.CompositeConstruct(ret_lo, ret_hi)); +} + +void ShiftRightLogical64To32(IR::Block& block, IR::Inst& inst) { + if (inst.HasAssociatedPseudoOperation()) { + throw NotImplementedException("ShiftRightLogical64 emulation with pseudo instructions"); + } + IR::IREmitter ir(block, IR::Block::InstructionList::s_iterator_to(inst)); + const auto [lo, hi]{Unpack(ir, inst.Arg(0))}; + const IR::U32 shift{inst.Arg(1)}; + + const IR::U32 shifted_lo{ir.ShiftRightLogical(lo, shift)}; + const IR::U32 shifted_hi{ir.ShiftRightLogical(hi, shift)}; + + const IR::U32 inv_shift{ir.ISub(shift, ir.Imm32(32))}; + const IR::U1 is_long{ir.IGreaterThanEqual(inv_shift, ir.Imm32(0), true)}; + const IR::U1 is_zero{ir.IEqual(shift, ir.Imm32(0))}; + + const IR::U32 long_ret_hi{ir.Imm32(0)}; + const IR::U32 long_ret_lo{ir.ShiftRightLogical(hi, inv_shift)}; + + const IR::U32 shift_complement{ir.ISub(ir.Imm32(32), shift)}; + const IR::U32 short_hi_extract{ir.BitFieldExtract(hi, ir.Imm32(0), shift)}; + const IR::U32 short_ret_hi{shifted_hi}; + const IR::U32 short_ret_lo{ + ir.BitFieldInsert(shifted_lo, short_hi_extract, shift_complement, shift)}; + + const IR::U32 zero_ret_lo{lo}; + const IR::U32 zero_ret_hi{hi}; + + const IR::U32 non_zero_lo{ir.Select(is_long, long_ret_lo, short_ret_lo)}; + const IR::U32 non_zero_hi{ir.Select(is_long, long_ret_hi, short_ret_hi)}; + + const IR::U32 ret_lo{ir.Select(is_zero, zero_ret_lo, non_zero_lo)}; + const IR::U32 ret_hi{ir.Select(is_zero, zero_ret_hi, non_zero_hi)}; + inst.ReplaceUsesWith(ir.CompositeConstruct(ret_lo, ret_hi)); +} + +void ShiftRightArithmetic64To32(IR::Block& block, IR::Inst& inst) { + if (inst.HasAssociatedPseudoOperation()) { + throw NotImplementedException("ShiftRightArithmetic64 emulation with pseudo instructions"); + } + IR::IREmitter ir(block, IR::Block::InstructionList::s_iterator_to(inst)); + const auto [lo, hi]{Unpack(ir, inst.Arg(0))}; + const IR::U32 shift{inst.Arg(1)}; + + const IR::U32 shifted_lo{ir.ShiftRightLogical(lo, shift)}; + const IR::U32 shifted_hi{ir.ShiftRightArithmetic(hi, shift)}; + + const IR::U32 sign_extension{ir.ShiftRightArithmetic(hi, ir.Imm32(31))}; + + const IR::U32 inv_shift{ir.ISub(shift, ir.Imm32(32))}; + const IR::U1 is_long{ir.IGreaterThanEqual(inv_shift, ir.Imm32(0), true)}; + const IR::U1 is_zero{ir.IEqual(shift, ir.Imm32(0))}; + + const IR::U32 long_ret_hi{sign_extension}; + const IR::U32 long_ret_lo{ir.ShiftRightArithmetic(hi, inv_shift)}; + + const IR::U32 shift_complement{ir.ISub(ir.Imm32(32), shift)}; + const IR::U32 short_hi_extract(ir.BitFieldExtract(hi, ir.Imm32(0), shift)); + const IR::U32 short_ret_hi{shifted_hi}; + const IR::U32 short_ret_lo{ + ir.BitFieldInsert(shifted_lo, short_hi_extract, shift_complement, shift)}; + + const IR::U32 zero_ret_lo{lo}; + const IR::U32 zero_ret_hi{hi}; + + const IR::U32 non_zero_lo{ir.Select(is_long, long_ret_lo, short_ret_lo)}; + const IR::U32 non_zero_hi{ir.Select(is_long, long_ret_hi, short_ret_hi)}; + + const IR::U32 ret_lo{ir.Select(is_zero, zero_ret_lo, non_zero_lo)}; + const IR::U32 ret_hi{ir.Select(is_zero, zero_ret_hi, non_zero_hi)}; + inst.ReplaceUsesWith(ir.CompositeConstruct(ret_lo, ret_hi)); +} + +void Lower(IR::Block& block, IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::PackUint2x32: + case IR::Opcode::UnpackUint2x32: + return inst.ReplaceOpcode(IR::Opcode::Identity); + case IR::Opcode::IAdd64: + return IAdd64To32(block, inst); + case IR::Opcode::ISub64: + return ISub64To32(block, inst); + case IR::Opcode::INeg64: + return INeg64To32(block, inst); + case IR::Opcode::ShiftLeftLogical64: + return ShiftLeftLogical64To32(block, inst); + case IR::Opcode::ShiftRightLogical64: + return ShiftRightLogical64To32(block, inst); + case IR::Opcode::ShiftRightArithmetic64: + return ShiftRightArithmetic64To32(block, inst); + default: + break; + } +} +} // Anonymous namespace + +void LowerInt64ToInt32(IR::Program& program) { + for (IR::Block* const block : program.post_order_blocks | std::views::reverse) { + for (IR::Inst& inst : block->Instructions()) { + Lower(*block, inst); + } + } +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/passes.h b/src/shader_recompiler/ir_opt/passes.h new file mode 100755 index 000000000..2f89b1ea0 --- /dev/null +++ b/src/shader_recompiler/ir_opt/passes.h @@ -0,0 +1,32 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/program.h" + +namespace Shader::Optimization { + +void CollectShaderInfoPass(Environment& env, IR::Program& program); +void ConstantPropagationPass(IR::Program& program); +void DeadCodeEliminationPass(IR::Program& program); +void GlobalMemoryToStorageBufferPass(IR::Program& program); +void IdentityRemovalPass(IR::Program& program); +void LowerFp16ToFp32(IR::Program& program); +void LowerInt64ToInt32(IR::Program& program); +void SsaRewritePass(IR::Program& program); +void TexturePass(Environment& env, IR::Program& program); +void VerificationPass(const IR::Program& program); + +// Dual Vertex +void VertexATransformPass(IR::Program& program); +void VertexBTransformPass(IR::Program& program); +void JoinTextureInfo(Info& base, Info& source); +void JoinStorageInfo(Info& base, Info& source); + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/ssa_rewrite_pass.cpp b/src/shader_recompiler/ir_opt/ssa_rewrite_pass.cpp new file mode 100755 index 000000000..dcaced83f --- /dev/null +++ b/src/shader_recompiler/ir_opt/ssa_rewrite_pass.cpp @@ -0,0 +1,381 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +// This file implements the SSA rewriting algorithm proposed in +// +// Simple and Efficient Construction of Static Single Assignment Form. +// Braun M., Buchwald S., Hack S., Leiba R., Mallon C., Zwinkau A. (2013) +// In: Jhala R., De Bosschere K. (eds) +// Compiler Construction. CC 2013. +// Lecture Notes in Computer Science, vol 7791. +// Springer, Berlin, Heidelberg +// +// https://link.springer.com/chapter/10.1007/978-3-642-37051-9_6 +// + +#include +#include +#include +#include + +#include +#include + +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/opcodes.h" +#include "shader_recompiler/frontend/ir/pred.h" +#include "shader_recompiler/frontend/ir/reg.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/ir_opt/passes.h" + +namespace Shader::Optimization { +namespace { +struct FlagTag { + auto operator<=>(const FlagTag&) const noexcept = default; +}; +struct ZeroFlagTag : FlagTag {}; +struct SignFlagTag : FlagTag {}; +struct CarryFlagTag : FlagTag {}; +struct OverflowFlagTag : FlagTag {}; + +struct GotoVariable : FlagTag { + GotoVariable() = default; + explicit GotoVariable(u32 index_) : index{index_} {} + + auto operator<=>(const GotoVariable&) const noexcept = default; + + u32 index; +}; + +struct IndirectBranchVariable { + auto operator<=>(const IndirectBranchVariable&) const noexcept = default; +}; + +using Variant = std::variant; +using ValueMap = boost::container::flat_map; + +struct DefTable { + const IR::Value& Def(IR::Block* block, IR::Reg variable) { + return block->SsaRegValue(variable); + } + void SetDef(IR::Block* block, IR::Reg variable, const IR::Value& value) { + block->SetSsaRegValue(variable, value); + } + + const IR::Value& Def(IR::Block* block, IR::Pred variable) { + return preds[IR::PredIndex(variable)][block]; + } + void SetDef(IR::Block* block, IR::Pred variable, const IR::Value& value) { + preds[IR::PredIndex(variable)].insert_or_assign(block, value); + } + + const IR::Value& Def(IR::Block* block, GotoVariable variable) { + return goto_vars[variable.index][block]; + } + void SetDef(IR::Block* block, GotoVariable variable, const IR::Value& value) { + goto_vars[variable.index].insert_or_assign(block, value); + } + + const IR::Value& Def(IR::Block* block, IndirectBranchVariable) { + return indirect_branch_var[block]; + } + void SetDef(IR::Block* block, IndirectBranchVariable, const IR::Value& value) { + indirect_branch_var.insert_or_assign(block, value); + } + + const IR::Value& Def(IR::Block* block, ZeroFlagTag) { + return zero_flag[block]; + } + void SetDef(IR::Block* block, ZeroFlagTag, const IR::Value& value) { + zero_flag.insert_or_assign(block, value); + } + + const IR::Value& Def(IR::Block* block, SignFlagTag) { + return sign_flag[block]; + } + void SetDef(IR::Block* block, SignFlagTag, const IR::Value& value) { + sign_flag.insert_or_assign(block, value); + } + + const IR::Value& Def(IR::Block* block, CarryFlagTag) { + return carry_flag[block]; + } + void SetDef(IR::Block* block, CarryFlagTag, const IR::Value& value) { + carry_flag.insert_or_assign(block, value); + } + + const IR::Value& Def(IR::Block* block, OverflowFlagTag) { + return overflow_flag[block]; + } + void SetDef(IR::Block* block, OverflowFlagTag, const IR::Value& value) { + overflow_flag.insert_or_assign(block, value); + } + + std::array preds; + boost::container::flat_map goto_vars; + ValueMap indirect_branch_var; + ValueMap zero_flag; + ValueMap sign_flag; + ValueMap carry_flag; + ValueMap overflow_flag; +}; + +IR::Opcode UndefOpcode(IR::Reg) noexcept { + return IR::Opcode::UndefU32; +} + +IR::Opcode UndefOpcode(IR::Pred) noexcept { + return IR::Opcode::UndefU1; +} + +IR::Opcode UndefOpcode(const FlagTag&) noexcept { + return IR::Opcode::UndefU1; +} + +IR::Opcode UndefOpcode(IndirectBranchVariable) noexcept { + return IR::Opcode::UndefU32; +} + +enum class Status { + Start, + SetValue, + PreparePhiArgument, + PushPhiArgument, +}; + +template +struct ReadState { + ReadState(IR::Block* block_) : block{block_} {} + ReadState() = default; + + IR::Block* block{}; + IR::Value result{}; + IR::Inst* phi{}; + IR::Block* const* pred_it{}; + IR::Block* const* pred_end{}; + Status pc{Status::Start}; +}; + +class Pass { +public: + template + void WriteVariable(Type variable, IR::Block* block, const IR::Value& value) { + current_def.SetDef(block, variable, value); + } + + template + IR::Value ReadVariable(Type variable, IR::Block* root_block) { + boost::container::small_vector, 64> stack{ + ReadState(nullptr), + ReadState(root_block), + }; + const auto prepare_phi_operand{[&] { + if (stack.back().pred_it == stack.back().pred_end) { + IR::Inst* const phi{stack.back().phi}; + IR::Block* const block{stack.back().block}; + const IR::Value result{TryRemoveTrivialPhi(*phi, block, UndefOpcode(variable))}; + stack.pop_back(); + stack.back().result = result; + WriteVariable(variable, block, result); + } else { + IR::Block* const imm_pred{*stack.back().pred_it}; + stack.back().pc = Status::PushPhiArgument; + stack.emplace_back(imm_pred); + } + }}; + do { + IR::Block* const block{stack.back().block}; + switch (stack.back().pc) { + case Status::Start: { + if (const IR::Value& def = current_def.Def(block, variable); !def.IsEmpty()) { + stack.back().result = def; + } else if (!block->IsSsaSealed()) { + // Incomplete CFG + IR::Inst* phi{&*block->PrependNewInst(block->begin(), IR::Opcode::Phi)}; + phi->SetFlags(IR::TypeOf(UndefOpcode(variable))); + + incomplete_phis[block].insert_or_assign(variable, phi); + stack.back().result = IR::Value{&*phi}; + } else if (const std::span imm_preds = block->ImmPredecessors(); + imm_preds.size() == 1) { + // Optimize the common case of one predecessor: no phi needed + stack.back().pc = Status::SetValue; + stack.emplace_back(imm_preds.front()); + break; + } else { + // Break potential cycles with operandless phi + IR::Inst* const phi{&*block->PrependNewInst(block->begin(), IR::Opcode::Phi)}; + phi->SetFlags(IR::TypeOf(UndefOpcode(variable))); + + WriteVariable(variable, block, IR::Value{phi}); + + stack.back().phi = phi; + stack.back().pred_it = imm_preds.data(); + stack.back().pred_end = imm_preds.data() + imm_preds.size(); + prepare_phi_operand(); + break; + } + } + [[fallthrough]]; + case Status::SetValue: { + const IR::Value result{stack.back().result}; + WriteVariable(variable, block, result); + stack.pop_back(); + stack.back().result = result; + break; + } + case Status::PushPhiArgument: { + IR::Inst* const phi{stack.back().phi}; + phi->AddPhiOperand(*stack.back().pred_it, stack.back().result); + ++stack.back().pred_it; + } + [[fallthrough]]; + case Status::PreparePhiArgument: + prepare_phi_operand(); + break; + } + } while (stack.size() > 1); + return stack.back().result; + } + + void SealBlock(IR::Block* block) { + const auto it{incomplete_phis.find(block)}; + if (it != incomplete_phis.end()) { + for (auto& [variant, phi] : it->second) { + std::visit([&](auto& variable) { AddPhiOperands(variable, *phi, block); }, variant); + } + } + block->SsaSeal(); + } + +private: + template + IR::Value AddPhiOperands(Type variable, IR::Inst& phi, IR::Block* block) { + for (IR::Block* const imm_pred : block->ImmPredecessors()) { + phi.AddPhiOperand(imm_pred, ReadVariable(variable, imm_pred)); + } + return TryRemoveTrivialPhi(phi, block, UndefOpcode(variable)); + } + + IR::Value TryRemoveTrivialPhi(IR::Inst& phi, IR::Block* block, IR::Opcode undef_opcode) { + IR::Value same; + const size_t num_args{phi.NumArgs()}; + for (size_t arg_index = 0; arg_index < num_args; ++arg_index) { + const IR::Value& op{phi.Arg(arg_index)}; + if (op.Resolve() == same.Resolve() || op == IR::Value{&phi}) { + // Unique value or self-reference + continue; + } + if (!same.IsEmpty()) { + // The phi merges at least two values: not trivial + return IR::Value{&phi}; + } + same = op; + } + // Remove the phi node from the block, it will be reinserted + IR::Block::InstructionList& list{block->Instructions()}; + list.erase(IR::Block::InstructionList::s_iterator_to(phi)); + + // Find the first non-phi instruction and use it as an insertion point + IR::Block::iterator reinsert_point{std::ranges::find_if_not(list, IR::IsPhi)}; + if (same.IsEmpty()) { + // The phi is unreachable or in the start block + // Insert an undefined instruction and make it the phi node replacement + // The "phi" node reinsertion point is specified after this instruction + reinsert_point = block->PrependNewInst(reinsert_point, undef_opcode); + same = IR::Value{&*reinsert_point}; + ++reinsert_point; + } + // Reinsert the phi node and reroute all its uses to the "same" value + list.insert(reinsert_point, phi); + phi.ReplaceUsesWith(same); + // TODO: Try to recursively remove all phi users, which might have become trivial + return same; + } + + boost::container::flat_map> + incomplete_phis; + DefTable current_def; +}; + +void VisitInst(Pass& pass, IR::Block* block, IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::SetRegister: + if (const IR::Reg reg{inst.Arg(0).Reg()}; reg != IR::Reg::RZ) { + pass.WriteVariable(reg, block, inst.Arg(1)); + } + break; + case IR::Opcode::SetPred: + if (const IR::Pred pred{inst.Arg(0).Pred()}; pred != IR::Pred::PT) { + pass.WriteVariable(pred, block, inst.Arg(1)); + } + break; + case IR::Opcode::SetGotoVariable: + pass.WriteVariable(GotoVariable{inst.Arg(0).U32()}, block, inst.Arg(1)); + break; + case IR::Opcode::SetIndirectBranchVariable: + pass.WriteVariable(IndirectBranchVariable{}, block, inst.Arg(0)); + break; + case IR::Opcode::SetZFlag: + pass.WriteVariable(ZeroFlagTag{}, block, inst.Arg(0)); + break; + case IR::Opcode::SetSFlag: + pass.WriteVariable(SignFlagTag{}, block, inst.Arg(0)); + break; + case IR::Opcode::SetCFlag: + pass.WriteVariable(CarryFlagTag{}, block, inst.Arg(0)); + break; + case IR::Opcode::SetOFlag: + pass.WriteVariable(OverflowFlagTag{}, block, inst.Arg(0)); + break; + case IR::Opcode::GetRegister: + if (const IR::Reg reg{inst.Arg(0).Reg()}; reg != IR::Reg::RZ) { + inst.ReplaceUsesWith(pass.ReadVariable(reg, block)); + } + break; + case IR::Opcode::GetPred: + if (const IR::Pred pred{inst.Arg(0).Pred()}; pred != IR::Pred::PT) { + inst.ReplaceUsesWith(pass.ReadVariable(pred, block)); + } + break; + case IR::Opcode::GetGotoVariable: + inst.ReplaceUsesWith(pass.ReadVariable(GotoVariable{inst.Arg(0).U32()}, block)); + break; + case IR::Opcode::GetIndirectBranchVariable: + inst.ReplaceUsesWith(pass.ReadVariable(IndirectBranchVariable{}, block)); + break; + case IR::Opcode::GetZFlag: + inst.ReplaceUsesWith(pass.ReadVariable(ZeroFlagTag{}, block)); + break; + case IR::Opcode::GetSFlag: + inst.ReplaceUsesWith(pass.ReadVariable(SignFlagTag{}, block)); + break; + case IR::Opcode::GetCFlag: + inst.ReplaceUsesWith(pass.ReadVariable(CarryFlagTag{}, block)); + break; + case IR::Opcode::GetOFlag: + inst.ReplaceUsesWith(pass.ReadVariable(OverflowFlagTag{}, block)); + break; + default: + break; + } +} + +void VisitBlock(Pass& pass, IR::Block* block) { + for (IR::Inst& inst : block->Instructions()) { + VisitInst(pass, block, inst); + } + pass.SealBlock(block); +} +} // Anonymous namespace + +void SsaRewritePass(IR::Program& program) { + Pass pass; + for (IR::Block* const block : program.post_order_blocks | std::views::reverse) { + VisitBlock(pass, block); + } +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/texture_pass.cpp b/src/shader_recompiler/ir_opt/texture_pass.cpp new file mode 100755 index 000000000..44ad10d43 --- /dev/null +++ b/src/shader_recompiler/ir_opt/texture_pass.cpp @@ -0,0 +1,523 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include + +#include + +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/breadth_first_search.h" +#include "shader_recompiler/frontend/ir/ir_emitter.h" +#include "shader_recompiler/ir_opt/passes.h" +#include "shader_recompiler/shader_info.h" + +namespace Shader::Optimization { +namespace { +struct ConstBufferAddr { + u32 index; + u32 offset; + u32 secondary_index; + u32 secondary_offset; + IR::U32 dynamic_offset; + u32 count; + bool has_secondary; +}; + +struct TextureInst { + ConstBufferAddr cbuf; + IR::Inst* inst; + IR::Block* block; +}; + +using TextureInstVector = boost::container::small_vector; + +constexpr u32 DESCRIPTOR_SIZE = 8; +constexpr u32 DESCRIPTOR_SIZE_SHIFT = static_cast(std::countr_zero(DESCRIPTOR_SIZE)); + +IR::Opcode IndexedInstruction(const IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::BindlessImageSampleImplicitLod: + case IR::Opcode::BoundImageSampleImplicitLod: + return IR::Opcode::ImageSampleImplicitLod; + case IR::Opcode::BoundImageSampleExplicitLod: + case IR::Opcode::BindlessImageSampleExplicitLod: + return IR::Opcode::ImageSampleExplicitLod; + case IR::Opcode::BoundImageSampleDrefImplicitLod: + case IR::Opcode::BindlessImageSampleDrefImplicitLod: + return IR::Opcode::ImageSampleDrefImplicitLod; + case IR::Opcode::BoundImageSampleDrefExplicitLod: + case IR::Opcode::BindlessImageSampleDrefExplicitLod: + return IR::Opcode::ImageSampleDrefExplicitLod; + case IR::Opcode::BindlessImageGather: + case IR::Opcode::BoundImageGather: + return IR::Opcode::ImageGather; + case IR::Opcode::BindlessImageGatherDref: + case IR::Opcode::BoundImageGatherDref: + return IR::Opcode::ImageGatherDref; + case IR::Opcode::BindlessImageFetch: + case IR::Opcode::BoundImageFetch: + return IR::Opcode::ImageFetch; + case IR::Opcode::BoundImageQueryDimensions: + case IR::Opcode::BindlessImageQueryDimensions: + return IR::Opcode::ImageQueryDimensions; + case IR::Opcode::BoundImageQueryLod: + case IR::Opcode::BindlessImageQueryLod: + return IR::Opcode::ImageQueryLod; + case IR::Opcode::BoundImageGradient: + case IR::Opcode::BindlessImageGradient: + return IR::Opcode::ImageGradient; + case IR::Opcode::BoundImageRead: + case IR::Opcode::BindlessImageRead: + return IR::Opcode::ImageRead; + case IR::Opcode::BoundImageWrite: + case IR::Opcode::BindlessImageWrite: + return IR::Opcode::ImageWrite; + case IR::Opcode::BoundImageAtomicIAdd32: + case IR::Opcode::BindlessImageAtomicIAdd32: + return IR::Opcode::ImageAtomicIAdd32; + case IR::Opcode::BoundImageAtomicSMin32: + case IR::Opcode::BindlessImageAtomicSMin32: + return IR::Opcode::ImageAtomicSMin32; + case IR::Opcode::BoundImageAtomicUMin32: + case IR::Opcode::BindlessImageAtomicUMin32: + return IR::Opcode::ImageAtomicUMin32; + case IR::Opcode::BoundImageAtomicSMax32: + case IR::Opcode::BindlessImageAtomicSMax32: + return IR::Opcode::ImageAtomicSMax32; + case IR::Opcode::BoundImageAtomicUMax32: + case IR::Opcode::BindlessImageAtomicUMax32: + return IR::Opcode::ImageAtomicUMax32; + case IR::Opcode::BoundImageAtomicInc32: + case IR::Opcode::BindlessImageAtomicInc32: + return IR::Opcode::ImageAtomicInc32; + case IR::Opcode::BoundImageAtomicDec32: + case IR::Opcode::BindlessImageAtomicDec32: + return IR::Opcode::ImageAtomicDec32; + case IR::Opcode::BoundImageAtomicAnd32: + case IR::Opcode::BindlessImageAtomicAnd32: + return IR::Opcode::ImageAtomicAnd32; + case IR::Opcode::BoundImageAtomicOr32: + case IR::Opcode::BindlessImageAtomicOr32: + return IR::Opcode::ImageAtomicOr32; + case IR::Opcode::BoundImageAtomicXor32: + case IR::Opcode::BindlessImageAtomicXor32: + return IR::Opcode::ImageAtomicXor32; + case IR::Opcode::BoundImageAtomicExchange32: + case IR::Opcode::BindlessImageAtomicExchange32: + return IR::Opcode::ImageAtomicExchange32; + default: + return IR::Opcode::Void; + } +} + +bool IsBindless(const IR::Inst& inst) { + switch (inst.GetOpcode()) { + case IR::Opcode::BindlessImageSampleImplicitLod: + case IR::Opcode::BindlessImageSampleExplicitLod: + case IR::Opcode::BindlessImageSampleDrefImplicitLod: + case IR::Opcode::BindlessImageSampleDrefExplicitLod: + case IR::Opcode::BindlessImageGather: + case IR::Opcode::BindlessImageGatherDref: + case IR::Opcode::BindlessImageFetch: + case IR::Opcode::BindlessImageQueryDimensions: + case IR::Opcode::BindlessImageQueryLod: + case IR::Opcode::BindlessImageGradient: + case IR::Opcode::BindlessImageRead: + case IR::Opcode::BindlessImageWrite: + case IR::Opcode::BindlessImageAtomicIAdd32: + case IR::Opcode::BindlessImageAtomicSMin32: + case IR::Opcode::BindlessImageAtomicUMin32: + case IR::Opcode::BindlessImageAtomicSMax32: + case IR::Opcode::BindlessImageAtomicUMax32: + case IR::Opcode::BindlessImageAtomicInc32: + case IR::Opcode::BindlessImageAtomicDec32: + case IR::Opcode::BindlessImageAtomicAnd32: + case IR::Opcode::BindlessImageAtomicOr32: + case IR::Opcode::BindlessImageAtomicXor32: + case IR::Opcode::BindlessImageAtomicExchange32: + return true; + case IR::Opcode::BoundImageSampleImplicitLod: + case IR::Opcode::BoundImageSampleExplicitLod: + case IR::Opcode::BoundImageSampleDrefImplicitLod: + case IR::Opcode::BoundImageSampleDrefExplicitLod: + case IR::Opcode::BoundImageGather: + case IR::Opcode::BoundImageGatherDref: + case IR::Opcode::BoundImageFetch: + case IR::Opcode::BoundImageQueryDimensions: + case IR::Opcode::BoundImageQueryLod: + case IR::Opcode::BoundImageGradient: + case IR::Opcode::BoundImageRead: + case IR::Opcode::BoundImageWrite: + case IR::Opcode::BoundImageAtomicIAdd32: + case IR::Opcode::BoundImageAtomicSMin32: + case IR::Opcode::BoundImageAtomicUMin32: + case IR::Opcode::BoundImageAtomicSMax32: + case IR::Opcode::BoundImageAtomicUMax32: + case IR::Opcode::BoundImageAtomicInc32: + case IR::Opcode::BoundImageAtomicDec32: + case IR::Opcode::BoundImageAtomicAnd32: + case IR::Opcode::BoundImageAtomicOr32: + case IR::Opcode::BoundImageAtomicXor32: + case IR::Opcode::BoundImageAtomicExchange32: + return false; + default: + throw InvalidArgument("Invalid opcode {}", inst.GetOpcode()); + } +} + +bool IsTextureInstruction(const IR::Inst& inst) { + return IndexedInstruction(inst) != IR::Opcode::Void; +} + +std::optional TryGetConstBuffer(const IR::Inst* inst); + +std::optional Track(const IR::Value& value) { + return IR::BreadthFirstSearch(value, TryGetConstBuffer); +} + +std::optional TryGetConstBuffer(const IR::Inst* inst) { + switch (inst->GetOpcode()) { + default: + return std::nullopt; + case IR::Opcode::BitwiseOr32: { + std::optional lhs{Track(inst->Arg(0))}; + std::optional rhs{Track(inst->Arg(1))}; + if (!lhs || !rhs) { + return std::nullopt; + } + if (lhs->has_secondary || rhs->has_secondary) { + return std::nullopt; + } + if (lhs->count > 1 || rhs->count > 1) { + return std::nullopt; + } + if (lhs->index > rhs->index || lhs->offset > rhs->offset) { + std::swap(lhs, rhs); + } + return ConstBufferAddr{ + .index = lhs->index, + .offset = lhs->offset, + .secondary_index = rhs->index, + .secondary_offset = rhs->offset, + .dynamic_offset = {}, + .count = 1, + .has_secondary = true, + }; + } + case IR::Opcode::GetCbufU32x2: + case IR::Opcode::GetCbufU32: + break; + } + const IR::Value index{inst->Arg(0)}; + const IR::Value offset{inst->Arg(1)}; + if (!index.IsImmediate()) { + // Reading a bindless texture from variable indices is valid + // but not supported here at the moment + return std::nullopt; + } + if (offset.IsImmediate()) { + return ConstBufferAddr{ + .index = index.U32(), + .offset = offset.U32(), + .secondary_index = 0, + .secondary_offset = 0, + .dynamic_offset = {}, + .count = 1, + .has_secondary = false, + }; + } + IR::Inst* const offset_inst{offset.InstRecursive()}; + if (offset_inst->GetOpcode() != IR::Opcode::IAdd32) { + return std::nullopt; + } + u32 base_offset{}; + IR::U32 dynamic_offset; + if (offset_inst->Arg(0).IsImmediate()) { + base_offset = offset_inst->Arg(0).U32(); + dynamic_offset = IR::U32{offset_inst->Arg(1)}; + } else if (offset_inst->Arg(1).IsImmediate()) { + base_offset = offset_inst->Arg(1).U32(); + dynamic_offset = IR::U32{offset_inst->Arg(0)}; + } else { + return std::nullopt; + } + return ConstBufferAddr{ + .index = index.U32(), + .offset = base_offset, + .secondary_index = 0, + .secondary_offset = 0, + .dynamic_offset = dynamic_offset, + .count = 8, + .has_secondary = false, + }; +} + +TextureInst MakeInst(Environment& env, IR::Block* block, IR::Inst& inst) { + ConstBufferAddr addr; + if (IsBindless(inst)) { + const std::optional track_addr{Track(inst.Arg(0))}; + if (!track_addr) { + throw NotImplementedException("Failed to track bindless texture constant buffer"); + } + addr = *track_addr; + } else { + addr = ConstBufferAddr{ + .index = env.TextureBoundBuffer(), + .offset = inst.Arg(0).U32(), + .secondary_index = 0, + .secondary_offset = 0, + .dynamic_offset = {}, + .count = 1, + .has_secondary = false, + }; + } + return TextureInst{ + .cbuf = addr, + .inst = &inst, + .block = block, + }; +} + +TextureType ReadTextureType(Environment& env, const ConstBufferAddr& cbuf) { + const u32 secondary_index{cbuf.has_secondary ? cbuf.secondary_index : cbuf.index}; + const u32 secondary_offset{cbuf.has_secondary ? cbuf.secondary_offset : cbuf.offset}; + const u32 lhs_raw{env.ReadCbufValue(cbuf.index, cbuf.offset)}; + const u32 rhs_raw{env.ReadCbufValue(secondary_index, secondary_offset)}; + return env.ReadTextureType(lhs_raw | rhs_raw); +} + +class Descriptors { +public: + explicit Descriptors(TextureBufferDescriptors& texture_buffer_descriptors_, + ImageBufferDescriptors& image_buffer_descriptors_, + TextureDescriptors& texture_descriptors_, + ImageDescriptors& image_descriptors_) + : texture_buffer_descriptors{texture_buffer_descriptors_}, + image_buffer_descriptors{image_buffer_descriptors_}, + texture_descriptors{texture_descriptors_}, image_descriptors{image_descriptors_} {} + + u32 Add(const TextureBufferDescriptor& desc) { + return Add(texture_buffer_descriptors, desc, [&desc](const auto& existing) { + return desc.cbuf_index == existing.cbuf_index && + desc.cbuf_offset == existing.cbuf_offset && + desc.secondary_cbuf_index == existing.secondary_cbuf_index && + desc.secondary_cbuf_offset == existing.secondary_cbuf_offset && + desc.count == existing.count && desc.size_shift == existing.size_shift && + desc.has_secondary == existing.has_secondary; + }); + } + + u32 Add(const ImageBufferDescriptor& desc) { + const u32 index{Add(image_buffer_descriptors, desc, [&desc](const auto& existing) { + return desc.format == existing.format && desc.cbuf_index == existing.cbuf_index && + desc.cbuf_offset == existing.cbuf_offset && desc.count == existing.count && + desc.size_shift == existing.size_shift; + })}; + image_buffer_descriptors[index].is_written |= desc.is_written; + image_buffer_descriptors[index].is_read |= desc.is_read; + return index; + } + + u32 Add(const TextureDescriptor& desc) { + return Add(texture_descriptors, desc, [&desc](const auto& existing) { + return desc.type == existing.type && desc.is_depth == existing.is_depth && + desc.has_secondary == existing.has_secondary && + desc.cbuf_index == existing.cbuf_index && + desc.cbuf_offset == existing.cbuf_offset && + desc.secondary_cbuf_index == existing.secondary_cbuf_index && + desc.secondary_cbuf_offset == existing.secondary_cbuf_offset && + desc.count == existing.count && desc.size_shift == existing.size_shift; + }); + } + + u32 Add(const ImageDescriptor& desc) { + const u32 index{Add(image_descriptors, desc, [&desc](const auto& existing) { + return desc.type == existing.type && desc.format == existing.format && + desc.cbuf_index == existing.cbuf_index && + desc.cbuf_offset == existing.cbuf_offset && desc.count == existing.count && + desc.size_shift == existing.size_shift; + })}; + image_descriptors[index].is_written |= desc.is_written; + image_descriptors[index].is_read |= desc.is_read; + return index; + } + +private: + template + static u32 Add(Descriptors& descriptors, const Descriptor& desc, Func&& pred) { + // TODO: Handle arrays + const auto it{std::ranges::find_if(descriptors, pred)}; + if (it != descriptors.end()) { + return static_cast(std::distance(descriptors.begin(), it)); + } + descriptors.push_back(desc); + return static_cast(descriptors.size()) - 1; + } + + TextureBufferDescriptors& texture_buffer_descriptors; + ImageBufferDescriptors& image_buffer_descriptors; + TextureDescriptors& texture_descriptors; + ImageDescriptors& image_descriptors; +}; +} // Anonymous namespace + +void TexturePass(Environment& env, IR::Program& program) { + TextureInstVector to_replace; + for (IR::Block* const block : program.post_order_blocks) { + for (IR::Inst& inst : block->Instructions()) { + if (!IsTextureInstruction(inst)) { + continue; + } + to_replace.push_back(MakeInst(env, block, inst)); + } + } + // Sort instructions to visit textures by constant buffer index, then by offset + std::ranges::sort(to_replace, [](const auto& lhs, const auto& rhs) { + return lhs.cbuf.offset < rhs.cbuf.offset; + }); + std::stable_sort(to_replace.begin(), to_replace.end(), [](const auto& lhs, const auto& rhs) { + return lhs.cbuf.index < rhs.cbuf.index; + }); + Descriptors descriptors{ + program.info.texture_buffer_descriptors, + program.info.image_buffer_descriptors, + program.info.texture_descriptors, + program.info.image_descriptors, + }; + for (TextureInst& texture_inst : to_replace) { + // TODO: Handle arrays + IR::Inst* const inst{texture_inst.inst}; + inst->ReplaceOpcode(IndexedInstruction(*inst)); + + const auto& cbuf{texture_inst.cbuf}; + auto flags{inst->Flags()}; + switch (inst->GetOpcode()) { + case IR::Opcode::ImageQueryDimensions: + flags.type.Assign(ReadTextureType(env, cbuf)); + inst->SetFlags(flags); + break; + case IR::Opcode::ImageFetch: + if (flags.type != TextureType::Color1D) { + break; + } + if (ReadTextureType(env, cbuf) == TextureType::Buffer) { + // Replace with the bound texture type only when it's a texture buffer + // If the instruction is 1D and the bound type is 2D, don't change the code and let + // the rasterizer robustness handle it + // This happens on Fire Emblem: Three Houses + flags.type.Assign(TextureType::Buffer); + } + break; + default: + break; + } + u32 index; + switch (inst->GetOpcode()) { + case IR::Opcode::ImageRead: + case IR::Opcode::ImageAtomicIAdd32: + case IR::Opcode::ImageAtomicSMin32: + case IR::Opcode::ImageAtomicUMin32: + case IR::Opcode::ImageAtomicSMax32: + case IR::Opcode::ImageAtomicUMax32: + case IR::Opcode::ImageAtomicInc32: + case IR::Opcode::ImageAtomicDec32: + case IR::Opcode::ImageAtomicAnd32: + case IR::Opcode::ImageAtomicOr32: + case IR::Opcode::ImageAtomicXor32: + case IR::Opcode::ImageAtomicExchange32: + case IR::Opcode::ImageWrite: { + if (cbuf.has_secondary) { + throw NotImplementedException("Unexpected separate sampler"); + } + const bool is_written{inst->GetOpcode() != IR::Opcode::ImageRead}; + const bool is_read{inst->GetOpcode() != IR::Opcode::ImageWrite}; + if (flags.type == TextureType::Buffer) { + index = descriptors.Add(ImageBufferDescriptor{ + .format = flags.image_format, + .is_written = is_written, + .is_read = is_read, + .cbuf_index = cbuf.index, + .cbuf_offset = cbuf.offset, + .count = cbuf.count, + .size_shift = DESCRIPTOR_SIZE_SHIFT, + }); + } else { + index = descriptors.Add(ImageDescriptor{ + .type = flags.type, + .format = flags.image_format, + .is_written = is_written, + .is_read = is_read, + .cbuf_index = cbuf.index, + .cbuf_offset = cbuf.offset, + .count = cbuf.count, + .size_shift = DESCRIPTOR_SIZE_SHIFT, + }); + } + break; + } + default: + if (flags.type == TextureType::Buffer) { + index = descriptors.Add(TextureBufferDescriptor{ + .has_secondary = cbuf.has_secondary, + .cbuf_index = cbuf.index, + .cbuf_offset = cbuf.offset, + .secondary_cbuf_index = cbuf.secondary_index, + .secondary_cbuf_offset = cbuf.secondary_offset, + .count = cbuf.count, + .size_shift = DESCRIPTOR_SIZE_SHIFT, + }); + } else { + index = descriptors.Add(TextureDescriptor{ + .type = flags.type, + .is_depth = flags.is_depth != 0, + .has_secondary = cbuf.has_secondary, + .cbuf_index = cbuf.index, + .cbuf_offset = cbuf.offset, + .secondary_cbuf_index = cbuf.secondary_index, + .secondary_cbuf_offset = cbuf.secondary_offset, + .count = cbuf.count, + .size_shift = DESCRIPTOR_SIZE_SHIFT, + }); + } + break; + } + flags.descriptor_index.Assign(index); + inst->SetFlags(flags); + + if (cbuf.count > 1) { + const auto insert_point{IR::Block::InstructionList::s_iterator_to(*inst)}; + IR::IREmitter ir{*texture_inst.block, insert_point}; + const IR::U32 shift{ir.Imm32(std::countr_zero(DESCRIPTOR_SIZE))}; + inst->SetArg(0, ir.ShiftRightArithmetic(cbuf.dynamic_offset, shift)); + } else { + inst->SetArg(0, IR::Value{}); + } + } +} + +void JoinTextureInfo(Info& base, Info& source) { + Descriptors descriptors{ + base.texture_buffer_descriptors, + base.image_buffer_descriptors, + base.texture_descriptors, + base.image_descriptors, + }; + for (auto& desc : source.texture_buffer_descriptors) { + descriptors.Add(desc); + } + for (auto& desc : source.image_buffer_descriptors) { + descriptors.Add(desc); + } + for (auto& desc : source.texture_descriptors) { + descriptors.Add(desc); + } + for (auto& desc : source.image_descriptors) { + descriptors.Add(desc); + } +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/ir_opt/verification_pass.cpp b/src/shader_recompiler/ir_opt/verification_pass.cpp new file mode 100755 index 000000000..975d5aadf --- /dev/null +++ b/src/shader_recompiler/ir_opt/verification_pass.cpp @@ -0,0 +1,98 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include "shader_recompiler/exception.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/ir_opt/passes.h" + +namespace Shader::Optimization { + +static void ValidateTypes(const IR::Program& program) { + for (const auto& block : program.blocks) { + for (const IR::Inst& inst : *block) { + if (inst.GetOpcode() == IR::Opcode::Phi) { + // Skip validation on phi nodes + continue; + } + const size_t num_args{inst.NumArgs()}; + for (size_t i = 0; i < num_args; ++i) { + const IR::Type t1{inst.Arg(i).Type()}; + const IR::Type t2{IR::ArgTypeOf(inst.GetOpcode(), i)}; + if (!IR::AreTypesCompatible(t1, t2)) { + throw LogicError("Invalid types in block:\n{}", IR::DumpBlock(*block)); + } + } + } + } +} + +static void ValidateUses(const IR::Program& program) { + std::map actual_uses; + for (const auto& block : program.blocks) { + for (const IR::Inst& inst : *block) { + const size_t num_args{inst.NumArgs()}; + for (size_t i = 0; i < num_args; ++i) { + const IR::Value arg{inst.Arg(i)}; + if (!arg.IsImmediate()) { + ++actual_uses[arg.Inst()]; + } + } + } + } + for (const auto [inst, uses] : actual_uses) { + if (inst->UseCount() != uses) { + throw LogicError("Invalid uses in block: {}", IR::DumpProgram(program)); + } + } +} + +static void ValidateForwardDeclarations(const IR::Program& program) { + std::set definitions; + for (const IR::Block* const block : program.blocks) { + for (const IR::Inst& inst : *block) { + definitions.emplace(&inst); + if (inst.GetOpcode() == IR::Opcode::Phi) { + // Phi nodes can have forward declarations + continue; + } + const size_t num_args{inst.NumArgs()}; + for (size_t arg = 0; arg < num_args; ++arg) { + if (inst.Arg(arg).IsImmediate()) { + continue; + } + if (!definitions.contains(inst.Arg(arg).Inst())) { + throw LogicError("Forward declaration in block: {}", IR::DumpBlock(*block)); + } + } + } + } +} + +static void ValidatePhiNodes(const IR::Program& program) { + for (const IR::Block* const block : program.blocks) { + bool no_more_phis{false}; + for (const IR::Inst& inst : *block) { + if (inst.GetOpcode() == IR::Opcode::Phi) { + if (no_more_phis) { + throw LogicError("Interleaved phi nodes: {}", IR::DumpBlock(*block)); + } + } else { + no_more_phis = true; + } + } + } +} + +void VerificationPass(const IR::Program& program) { + ValidateTypes(program); + ValidateUses(program); + ValidateForwardDeclarations(program); + ValidatePhiNodes(program); +} + +} // namespace Shader::Optimization diff --git a/src/shader_recompiler/object_pool.h b/src/shader_recompiler/object_pool.h new file mode 100755 index 000000000..f8b255b66 --- /dev/null +++ b/src/shader_recompiler/object_pool.h @@ -0,0 +1,104 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include + +namespace Shader { + +template +requires std::is_destructible_v class ObjectPool { +public: + explicit ObjectPool(size_t chunk_size = 8192) : new_chunk_size{chunk_size} { + node = &chunks.emplace_back(new_chunk_size); + } + + template + requires std::is_constructible_v[[nodiscard]] T* Create(Args&&... args) { + return std::construct_at(Memory(), std::forward(args)...); + } + + void ReleaseContents() { + if (chunks.empty()) { + return; + } + Chunk& root{chunks.front()}; + if (root.used_objects == root.num_objects) { + // Root chunk has been filled, squash allocations into it + const size_t total_objects{root.num_objects + new_chunk_size * (chunks.size() - 1)}; + chunks.clear(); + chunks.emplace_back(total_objects); + } else { + root.Release(); + chunks.resize(1); + } + chunks.shrink_to_fit(); + node = &chunks.front(); + } + +private: + struct NonTrivialDummy { + NonTrivialDummy() noexcept {} + }; + + union Storage { + Storage() noexcept {} + ~Storage() noexcept {} + + NonTrivialDummy dummy{}; + T object; + }; + + struct Chunk { + explicit Chunk() = default; + explicit Chunk(size_t size) + : num_objects{size}, storage{std::make_unique(size)} {} + + Chunk& operator=(Chunk&& rhs) noexcept { + Release(); + used_objects = std::exchange(rhs.used_objects, 0); + num_objects = std::exchange(rhs.num_objects, 0); + storage = std::move(rhs.storage); + } + + Chunk(Chunk&& rhs) noexcept + : used_objects{std::exchange(rhs.used_objects, 0)}, + num_objects{std::exchange(rhs.num_objects, 0)}, storage{std::move(rhs.storage)} {} + + ~Chunk() { + Release(); + } + + void Release() { + std::destroy_n(storage.get(), used_objects); + used_objects = 0; + } + + size_t used_objects{}; + size_t num_objects{}; + std::unique_ptr storage; + }; + + [[nodiscard]] T* Memory() { + Chunk* const chunk{FreeChunk()}; + return &chunk->storage[chunk->used_objects++].object; + } + + [[nodiscard]] Chunk* FreeChunk() { + if (node->used_objects != node->num_objects) { + return node; + } + node = &chunks.emplace_back(new_chunk_size); + return node; + } + + Chunk* node{}; + std::vector chunks; + size_t new_chunk_size{}; +}; + +} // namespace Shader diff --git a/src/shader_recompiler/profile.h b/src/shader_recompiler/profile.h new file mode 100755 index 000000000..501dcaf71 --- /dev/null +++ b/src/shader_recompiler/profile.h @@ -0,0 +1,72 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/common_types.h" + +namespace Shader { + +struct Profile { + u32 supported_spirv{0x00010000}; + + bool unified_descriptor_binding{}; + bool support_descriptor_aliasing{}; + bool support_int8{}; + bool support_int16{}; + bool support_int64{}; + bool support_vertex_instance_id{}; + bool support_float_controls{}; + bool support_separate_denorm_behavior{}; + bool support_separate_rounding_mode{}; + bool support_fp16_denorm_preserve{}; + bool support_fp32_denorm_preserve{}; + bool support_fp16_denorm_flush{}; + bool support_fp32_denorm_flush{}; + bool support_fp16_signed_zero_nan_preserve{}; + bool support_fp32_signed_zero_nan_preserve{}; + bool support_fp64_signed_zero_nan_preserve{}; + bool support_explicit_workgroup_layout{}; + bool support_vote{}; + bool support_viewport_index_layer_non_geometry{}; + bool support_viewport_mask{}; + bool support_typeless_image_loads{}; + bool support_demote_to_helper_invocation{}; + bool support_int64_atomics{}; + bool support_derivative_control{}; + bool support_geometry_shader_passthrough{}; + bool support_gl_nv_gpu_shader_5{}; + bool support_gl_amd_gpu_shader_half_float{}; + bool support_gl_texture_shadow_lod{}; + bool support_gl_warp_intrinsics{}; + bool support_gl_variable_aoffi{}; + bool support_gl_sparse_textures{}; + bool support_gl_derivative_control{}; + + bool warp_size_potentially_larger_than_guest{}; + + bool lower_left_origin_mode{}; + /// Fragment outputs have to be declared even if they are not written to avoid undefined values. + /// See Ori and the Blind Forest's main menu for reference. + bool need_declared_frag_colors{}; + /// Prevents fast math optimizations that may cause inaccuracies + bool need_fastmath_off{}; + + /// OpFClamp is broken and OpFMax + OpFMin should be used instead + bool has_broken_spirv_clamp{}; + /// Offset image operands with an unsigned type do not work + bool has_broken_unsigned_image_offsets{}; + /// Signed instructions with unsigned data types are misinterpreted + bool has_broken_signed_operations{}; + /// Float controls break when fp16 is enabled + bool has_broken_fp16_float_controls{}; + /// Dynamic vec4 indexing is broken on some OpenGL drivers + bool has_gl_component_indexing_bug{}; + /// The precise type qualifier is broken in the fragment stage of some drivers + bool has_gl_precise_bug{}; + /// Ignores SPIR-V ordered vs unordered using GLSL semantics + bool ignore_nan_fp_comparisons{}; +}; + +} // namespace Shader diff --git a/src/shader_recompiler/program_header.h b/src/shader_recompiler/program_header.h new file mode 100755 index 000000000..bd6c2bfb5 --- /dev/null +++ b/src/shader_recompiler/program_header.h @@ -0,0 +1,219 @@ +// Copyright 2018 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include "common/bit_field.h" +#include "common/common_funcs.h" +#include "common/common_types.h" + +namespace Shader { + +enum class OutputTopology : u32 { + PointList = 1, + LineStrip = 6, + TriangleStrip = 7, +}; + +enum class PixelImap : u8 { + Unused = 0, + Constant = 1, + Perspective = 2, + ScreenLinear = 3, +}; + +// Documentation in: +// http://download.nvidia.com/open-gpu-doc/Shader-Program-Header/1/Shader-Program-Header.html +struct ProgramHeader { + union { + BitField<0, 5, u32> sph_type; + BitField<5, 5, u32> version; + BitField<10, 4, u32> shader_type; + BitField<14, 1, u32> mrt_enable; + BitField<15, 1, u32> kills_pixels; + BitField<16, 1, u32> does_global_store; + BitField<17, 4, u32> sass_version; + BitField<21, 2, u32> reserved1; + BitField<24, 1, u32> geometry_passthrough; + BitField<25, 1, u32> reserved2; + BitField<26, 1, u32> does_load_or_store; + BitField<27, 1, u32> does_fp64; + BitField<28, 4, u32> stream_out_mask; + } common0; + + union { + BitField<0, 24, u32> shader_local_memory_low_size; + BitField<24, 8, u32> per_patch_attribute_count; + } common1; + + union { + BitField<0, 24, u32> shader_local_memory_high_size; + BitField<24, 8, u32> threads_per_input_primitive; + } common2; + + union { + BitField<0, 24, u32> shader_local_memory_crs_size; + BitField<24, 4, OutputTopology> output_topology; + BitField<28, 4, u32> reserved; + } common3; + + union { + BitField<0, 12, u32> max_output_vertices; + BitField<12, 8, u32> store_req_start; // NOTE: not used by geometry shaders. + BitField<20, 4, u32> reserved; + BitField<24, 8, u32> store_req_end; // NOTE: not used by geometry shaders. + } common4; + + union { + struct { + INSERT_PADDING_BYTES_NOINIT(3); // ImapSystemValuesA + + union { + BitField<0, 1, u8> primitive_array_id; + BitField<1, 1, u8> rt_array_index; + BitField<2, 1, u8> viewport_index; + BitField<3, 1, u8> point_size; + BitField<4, 1, u8> position_x; + BitField<5, 1, u8> position_y; + BitField<6, 1, u8> position_z; + BitField<7, 1, u8> position_w; + u8 raw; + } imap_systemb; + + std::array imap_generic_vector; + + INSERT_PADDING_BYTES_NOINIT(2); // ImapColor + union { + BitField<0, 8, u16> clip_distances; + BitField<8, 1, u16> point_sprite_s; + BitField<9, 1, u16> point_sprite_t; + BitField<10, 1, u16> fog_coordinate; + BitField<12, 1, u16> tessellation_eval_point_u; + BitField<13, 1, u16> tessellation_eval_point_v; + BitField<14, 1, u16> instance_id; + BitField<15, 1, u16> vertex_id; + }; + INSERT_PADDING_BYTES_NOINIT(5); // ImapFixedFncTexture[10] + INSERT_PADDING_BYTES_NOINIT(1); // ImapReserved + INSERT_PADDING_BYTES_NOINIT(3); // OmapSystemValuesA + + union { + BitField<0, 1, u8> primitive_array_id; + BitField<1, 1, u8> rt_array_index; + BitField<2, 1, u8> viewport_index; + BitField<3, 1, u8> point_size; + BitField<4, 1, u8> position_x; + BitField<5, 1, u8> position_y; + BitField<6, 1, u8> position_z; + BitField<7, 1, u8> position_w; + u8 raw; + } omap_systemb; + + std::array omap_generic_vector; + + INSERT_PADDING_BYTES_NOINIT(2); // OmapColor + + union { + BitField<0, 8, u16> clip_distances; + BitField<8, 1, u16> point_sprite_s; + BitField<9, 1, u16> point_sprite_t; + BitField<10, 1, u16> fog_coordinate; + BitField<12, 1, u16> tessellation_eval_point_u; + BitField<13, 1, u16> tessellation_eval_point_v; + BitField<14, 1, u16> instance_id; + BitField<15, 1, u16> vertex_id; + } omap_systemc; + + INSERT_PADDING_BYTES_NOINIT(5); // OmapFixedFncTexture[10] + INSERT_PADDING_BYTES_NOINIT(1); // OmapReserved + + [[nodiscard]] std::array InputGeneric(size_t index) const noexcept { + const int data{imap_generic_vector[index >> 1] >> ((index % 2) * 4)}; + return { + (data & 1) != 0, + (data & 2) != 0, + (data & 4) != 0, + (data & 8) != 0, + }; + } + + [[nodiscard]] std::array OutputGeneric(size_t index) const noexcept { + const int data{omap_generic_vector[index >> 1] >> ((index % 2) * 4)}; + return { + (data & 1) != 0, + (data & 2) != 0, + (data & 4) != 0, + (data & 8) != 0, + }; + } + } vtg; + + struct { + INSERT_PADDING_BYTES_NOINIT(3); // ImapSystemValuesA + + union { + BitField<0, 1, u8> primitive_array_id; + BitField<1, 1, u8> rt_array_index; + BitField<2, 1, u8> viewport_index; + BitField<3, 1, u8> point_size; + BitField<4, 1, u8> position_x; + BitField<5, 1, u8> position_y; + BitField<6, 1, u8> position_z; + BitField<7, 1, u8> position_w; + BitField<0, 4, u8> first; + BitField<4, 4, u8> position; + u8 raw; + } imap_systemb; + + union { + BitField<0, 2, PixelImap> x; + BitField<2, 2, PixelImap> y; + BitField<4, 2, PixelImap> z; + BitField<6, 2, PixelImap> w; + u8 raw; + } imap_generic_vector[32]; + + INSERT_PADDING_BYTES_NOINIT(2); // ImapColor + INSERT_PADDING_BYTES_NOINIT(2); // ImapSystemValuesC + INSERT_PADDING_BYTES_NOINIT(10); // ImapFixedFncTexture[10] + INSERT_PADDING_BYTES_NOINIT(2); // ImapReserved + + struct { + u32 target; + union { + BitField<0, 1, u32> sample_mask; + BitField<1, 1, u32> depth; + BitField<2, 30, u32> reserved; + }; + } omap; + + [[nodiscard]] std::array EnabledOutputComponents(u32 rt) const noexcept { + const u32 bits{omap.target >> (rt * 4)}; + return {(bits & 1) != 0, (bits & 2) != 0, (bits & 4) != 0, (bits & 8) != 0}; + } + + [[nodiscard]] std::array GenericInputMap(u32 attribute) const { + const auto& vector{imap_generic_vector[attribute]}; + return {vector.x, vector.y, vector.z, vector.w}; + } + + [[nodiscard]] bool IsGenericVectorActive(size_t index) const { + return imap_generic_vector[index].raw != 0; + } + } ps; + + std::array raw; + }; + + [[nodiscard]] u64 LocalMemorySize() const noexcept { + return static_cast(common1.shader_local_memory_low_size) | + (static_cast(common2.shader_local_memory_high_size) << 24); + } +}; +static_assert(sizeof(ProgramHeader) == 0x50, "Incorrect structure size"); + +} // namespace Shader diff --git a/src/shader_recompiler/runtime_info.h b/src/shader_recompiler/runtime_info.h new file mode 100755 index 000000000..f3f83a258 --- /dev/null +++ b/src/shader_recompiler/runtime_info.h @@ -0,0 +1,88 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include +#include + +#include "common/common_types.h" +#include "shader_recompiler/varying_state.h" + +namespace Shader { + +enum class AttributeType : u8 { + Float, + SignedInt, + UnsignedInt, + Disabled, +}; + +enum class InputTopology { + Points, + Lines, + LinesAdjacency, + Triangles, + TrianglesAdjacency, +}; + +enum class CompareFunction { + Never, + Less, + Equal, + LessThanEqual, + Greater, + NotEqual, + GreaterThanEqual, + Always, +}; + +enum class TessPrimitive { + Isolines, + Triangles, + Quads, +}; + +enum class TessSpacing { + Equal, + FractionalOdd, + FractionalEven, +}; + +struct TransformFeedbackVarying { + u32 buffer{}; + u32 stride{}; + u32 offset{}; + u32 components{}; +}; + +struct RuntimeInfo { + std::array generic_input_types{}; + VaryingState previous_stage_stores; + + bool convert_depth_mode{}; + bool force_early_z{}; + + TessPrimitive tess_primitive{}; + TessSpacing tess_spacing{}; + bool tess_clockwise{}; + + InputTopology input_topology{}; + + std::optional fixed_state_point_size; + std::optional alpha_test_func; + float alpha_test_reference{}; + + /// Static Y negate value + bool y_negate{}; + /// Use storage buffers instead of global pointers on GLASM + bool glasm_use_storage_buffers{}; + + /// Transform feedback state for each varying + std::vector xfb_varyings; +}; + +} // namespace Shader diff --git a/src/shader_recompiler/shader_info.h b/src/shader_recompiler/shader_info.h new file mode 100755 index 000000000..4ef4dbd40 --- /dev/null +++ b/src/shader_recompiler/shader_info.h @@ -0,0 +1,193 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include "common/common_types.h" +#include "shader_recompiler/frontend/ir/type.h" +#include "shader_recompiler/varying_state.h" + +#include +#include + +namespace Shader { + +enum class TextureType : u32 { + Color1D, + ColorArray1D, + Color2D, + ColorArray2D, + Color3D, + ColorCube, + ColorArrayCube, + Buffer, +}; +constexpr u32 NUM_TEXTURE_TYPES = 8; + +enum class ImageFormat : u32 { + Typeless, + R8_UINT, + R8_SINT, + R16_UINT, + R16_SINT, + R32_UINT, + R32G32_UINT, + R32G32B32A32_UINT, +}; + +enum class Interpolation { + Smooth, + Flat, + NoPerspective, +}; + +struct ConstantBufferDescriptor { + u32 index; + u32 count; +}; + +struct StorageBufferDescriptor { + u32 cbuf_index; + u32 cbuf_offset; + u32 count; + bool is_written; +}; + +struct TextureBufferDescriptor { + bool has_secondary; + u32 cbuf_index; + u32 cbuf_offset; + u32 secondary_cbuf_index; + u32 secondary_cbuf_offset; + u32 count; + u32 size_shift; +}; +using TextureBufferDescriptors = boost::container::small_vector; + +struct ImageBufferDescriptor { + ImageFormat format; + bool is_written; + bool is_read; + u32 cbuf_index; + u32 cbuf_offset; + u32 count; + u32 size_shift; +}; +using ImageBufferDescriptors = boost::container::small_vector; + +struct TextureDescriptor { + TextureType type; + bool is_depth; + bool has_secondary; + u32 cbuf_index; + u32 cbuf_offset; + u32 secondary_cbuf_index; + u32 secondary_cbuf_offset; + u32 count; + u32 size_shift; +}; +using TextureDescriptors = boost::container::small_vector; + +struct ImageDescriptor { + TextureType type; + ImageFormat format; + bool is_written; + bool is_read; + u32 cbuf_index; + u32 cbuf_offset; + u32 count; + u32 size_shift; +}; +using ImageDescriptors = boost::container::small_vector; + +struct Info { + static constexpr size_t MAX_CBUFS{18}; + static constexpr size_t MAX_SSBOS{32}; + + bool uses_workgroup_id{}; + bool uses_local_invocation_id{}; + bool uses_invocation_id{}; + bool uses_sample_id{}; + bool uses_is_helper_invocation{}; + bool uses_subgroup_invocation_id{}; + bool uses_subgroup_shuffles{}; + std::array uses_patches{}; + + std::array interpolation{}; + VaryingState loads; + VaryingState stores; + VaryingState passthrough; + + bool loads_indexed_attributes{}; + + std::array stores_frag_color{}; + bool stores_sample_mask{}; + bool stores_frag_depth{}; + + bool stores_tess_level_outer{}; + bool stores_tess_level_inner{}; + + bool stores_indexed_attributes{}; + + bool stores_global_memory{}; + + bool uses_fp16{}; + bool uses_fp64{}; + bool uses_fp16_denorms_flush{}; + bool uses_fp16_denorms_preserve{}; + bool uses_fp32_denorms_flush{}; + bool uses_fp32_denorms_preserve{}; + bool uses_int8{}; + bool uses_int16{}; + bool uses_int64{}; + bool uses_image_1d{}; + bool uses_sampled_1d{}; + bool uses_sparse_residency{}; + bool uses_demote_to_helper_invocation{}; + bool uses_subgroup_vote{}; + bool uses_subgroup_mask{}; + bool uses_fswzadd{}; + bool uses_derivatives{}; + bool uses_typeless_image_reads{}; + bool uses_typeless_image_writes{}; + bool uses_image_buffers{}; + bool uses_shared_increment{}; + bool uses_shared_decrement{}; + bool uses_global_increment{}; + bool uses_global_decrement{}; + bool uses_atomic_f32_add{}; + bool uses_atomic_f16x2_add{}; + bool uses_atomic_f16x2_min{}; + bool uses_atomic_f16x2_max{}; + bool uses_atomic_f32x2_add{}; + bool uses_atomic_f32x2_min{}; + bool uses_atomic_f32x2_max{}; + bool uses_atomic_s32_min{}; + bool uses_atomic_s32_max{}; + bool uses_int64_bit_atomics{}; + bool uses_global_memory{}; + bool uses_atomic_image_u32{}; + bool uses_shadow_lod{}; + + IR::Type used_constant_buffer_types{}; + IR::Type used_storage_buffer_types{}; + + u32 constant_buffer_mask{}; + std::array constant_buffer_used_sizes{}; + u32 nvn_buffer_base{}; + std::bitset<16> nvn_buffer_used{}; + + boost::container::static_vector + constant_buffer_descriptors; + boost::container::static_vector storage_buffers_descriptors; + TextureBufferDescriptors texture_buffer_descriptors; + ImageBufferDescriptors image_buffer_descriptors; + TextureDescriptors texture_descriptors; + ImageDescriptors image_descriptors; +}; + +} // namespace Shader diff --git a/src/shader_recompiler/stage.h b/src/shader_recompiler/stage.h new file mode 100755 index 000000000..5c1c8d8fc --- /dev/null +++ b/src/shader_recompiler/stage.h @@ -0,0 +1,28 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/common_types.h" + +namespace Shader { + +enum class Stage : u32 { + VertexB, + TessellationControl, + TessellationEval, + Geometry, + Fragment, + + Compute, + + VertexA, +}; +constexpr u32 MaxStageTypes = 6; + +[[nodiscard]] constexpr Stage StageFromIndex(size_t index) noexcept { + return static_cast(static_cast(Stage::VertexB) + index); +} + +} // namespace Shader diff --git a/src/shader_recompiler/varying_state.h b/src/shader_recompiler/varying_state.h new file mode 100755 index 000000000..9d7b24a76 --- /dev/null +++ b/src/shader_recompiler/varying_state.h @@ -0,0 +1,69 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include "shader_recompiler/frontend/ir/attribute.h" + +namespace Shader { + +struct VaryingState { + std::bitset<256> mask{}; + + void Set(IR::Attribute attribute, bool state = true) { + mask[static_cast(attribute)] = state; + } + + [[nodiscard]] bool operator[](IR::Attribute attribute) const noexcept { + return mask[static_cast(attribute)]; + } + + [[nodiscard]] bool AnyComponent(IR::Attribute base) const noexcept { + return mask[static_cast(base) + 0] || mask[static_cast(base) + 1] || + mask[static_cast(base) + 2] || mask[static_cast(base) + 3]; + } + + [[nodiscard]] bool AllComponents(IR::Attribute base) const noexcept { + return mask[static_cast(base) + 0] && mask[static_cast(base) + 1] && + mask[static_cast(base) + 2] && mask[static_cast(base) + 3]; + } + + [[nodiscard]] bool IsUniform(IR::Attribute base) const noexcept { + return AnyComponent(base) == AllComponents(base); + } + + [[nodiscard]] bool Generic(size_t index, size_t component) const noexcept { + return mask[static_cast(IR::Attribute::Generic0X) + index * 4 + component]; + } + + [[nodiscard]] bool Generic(size_t index) const noexcept { + return Generic(index, 0) || Generic(index, 1) || Generic(index, 2) || Generic(index, 3); + } + + [[nodiscard]] bool ClipDistances() const noexcept { + return AnyComponent(IR::Attribute::ClipDistance0) || + AnyComponent(IR::Attribute::ClipDistance4); + } + + [[nodiscard]] bool Legacy() const noexcept { + return AnyComponent(IR::Attribute::ColorFrontDiffuseR) || + AnyComponent(IR::Attribute::ColorFrontSpecularR) || + AnyComponent(IR::Attribute::ColorBackDiffuseR) || + AnyComponent(IR::Attribute::ColorBackSpecularR) || FixedFunctionTexture(); + } + + [[nodiscard]] bool FixedFunctionTexture() const noexcept { + for (size_t index = 0; index < 10; ++index) { + if (AnyComponent(IR::Attribute::FixedFncTexture0S + index * 4)) { + return true; + } + } + return false; + } +}; + +} // namespace Shader diff --git a/src/tests/common/unique_function.cpp b/src/tests/common/unique_function.cpp index ac9912738..aa6e86593 100755 --- a/src/tests/common/unique_function.cpp +++ b/src/tests/common/unique_function.cpp @@ -17,10 +17,12 @@ struct Noisy { Noisy& operator=(Noisy&& rhs) noexcept { state = "Move assigned"; rhs.state = "Moved away"; + return *this; } Noisy(const Noisy&) : state{"Copied constructed"} {} Noisy& operator=(const Noisy&) { state = "Copied assigned"; + return *this; } std::string state; diff --git a/src/video_core/CMakeLists.txt b/src/video_core/CMakeLists.txt index e4de55f4d..007ecc13e 100755 --- a/src/video_core/CMakeLists.txt +++ b/src/video_core/CMakeLists.txt @@ -29,7 +29,6 @@ add_library(video_core STATIC dirty_flags.h dma_pusher.cpp dma_pusher.h - engines/const_buffer_engine_interface.h engines/const_buffer_info.h engines/engine_interface.h engines/engine_upload.cpp @@ -44,9 +43,6 @@ add_library(video_core STATIC engines/maxwell_3d.h engines/maxwell_dma.cpp engines/maxwell_dma.h - engines/shader_bytecode.h - engines/shader_header.h - engines/shader_type.h framebuffer_config.h macro/macro.cpp macro/macro.h @@ -61,8 +57,6 @@ add_library(video_core STATIC gpu.h gpu_thread.cpp gpu_thread.h - guest_driver.cpp - guest_driver.h memory_manager.cpp memory_manager.h query_cache.h @@ -71,26 +65,25 @@ add_library(video_core STATIC rasterizer_interface.h renderer_base.cpp renderer_base.h - renderer_opengl/gl_arb_decompiler.cpp - renderer_opengl/gl_arb_decompiler.h renderer_opengl/gl_buffer_cache.cpp renderer_opengl/gl_buffer_cache.h + renderer_opengl/gl_compute_pipeline.cpp + renderer_opengl/gl_compute_pipeline.h renderer_opengl/gl_device.cpp renderer_opengl/gl_device.h renderer_opengl/gl_fence_manager.cpp renderer_opengl/gl_fence_manager.h + renderer_opengl/gl_graphics_pipeline.cpp + renderer_opengl/gl_graphics_pipeline.h renderer_opengl/gl_rasterizer.cpp renderer_opengl/gl_rasterizer.h renderer_opengl/gl_resource_manager.cpp renderer_opengl/gl_resource_manager.h renderer_opengl/gl_shader_cache.cpp renderer_opengl/gl_shader_cache.h - renderer_opengl/gl_shader_decompiler.cpp - renderer_opengl/gl_shader_decompiler.h - renderer_opengl/gl_shader_disk_cache.cpp - renderer_opengl/gl_shader_disk_cache.h renderer_opengl/gl_shader_manager.cpp renderer_opengl/gl_shader_manager.h + renderer_opengl/gl_shader_context.h renderer_opengl/gl_shader_util.cpp renderer_opengl/gl_shader_util.h renderer_opengl/gl_state_tracker.cpp @@ -112,6 +105,7 @@ add_library(video_core STATIC renderer_vulkan/fixed_pipeline_state.h renderer_vulkan/maxwell_to_vk.cpp renderer_vulkan/maxwell_to_vk.h + renderer_vulkan/pipeline_helper.h renderer_vulkan/renderer_vulkan.h renderer_vulkan/renderer_vulkan.cpp renderer_vulkan/vk_blit_screen.cpp @@ -138,12 +132,12 @@ add_library(video_core STATIC renderer_vulkan/vk_query_cache.h renderer_vulkan/vk_rasterizer.cpp renderer_vulkan/vk_rasterizer.h + renderer_vulkan/vk_render_pass_cache.cpp + renderer_vulkan/vk_render_pass_cache.h renderer_vulkan/vk_resource_pool.cpp renderer_vulkan/vk_resource_pool.h renderer_vulkan/vk_scheduler.cpp renderer_vulkan/vk_scheduler.h - renderer_vulkan/vk_shader_decompiler.cpp - renderer_vulkan/vk_shader_decompiler.h renderer_vulkan/vk_shader_util.cpp renderer_vulkan/vk_shader_util.h renderer_vulkan/vk_staging_buffer_pool.cpp @@ -156,60 +150,12 @@ add_library(video_core STATIC renderer_vulkan/vk_texture_cache.h renderer_vulkan/vk_update_descriptor.cpp renderer_vulkan/vk_update_descriptor.h + shader_cache.cpp shader_cache.h + shader_environment.cpp + shader_environment.h shader_notify.cpp shader_notify.h - shader/decode/arithmetic.cpp - shader/decode/arithmetic_immediate.cpp - shader/decode/bfe.cpp - shader/decode/bfi.cpp - shader/decode/shift.cpp - shader/decode/arithmetic_integer.cpp - shader/decode/arithmetic_integer_immediate.cpp - shader/decode/arithmetic_half.cpp - shader/decode/arithmetic_half_immediate.cpp - shader/decode/ffma.cpp - shader/decode/hfma2.cpp - shader/decode/conversion.cpp - shader/decode/memory.cpp - shader/decode/texture.cpp - shader/decode/image.cpp - shader/decode/float_set_predicate.cpp - shader/decode/integer_set_predicate.cpp - shader/decode/half_set_predicate.cpp - shader/decode/predicate_set_register.cpp - shader/decode/predicate_set_predicate.cpp - shader/decode/register_set_predicate.cpp - shader/decode/float_set.cpp - shader/decode/integer_set.cpp - shader/decode/half_set.cpp - shader/decode/video.cpp - shader/decode/warp.cpp - shader/decode/xmad.cpp - shader/decode/other.cpp - shader/ast.cpp - shader/ast.h - shader/async_shaders.cpp - shader/async_shaders.h - shader/compiler_settings.cpp - shader/compiler_settings.h - shader/control_flow.cpp - shader/control_flow.h - shader/decode.cpp - shader/expr.cpp - shader/expr.h - shader/memory_util.cpp - shader/memory_util.h - shader/node_helper.cpp - shader/node_helper.h - shader/node.h - shader/registry.cpp - shader/registry.h - shader/shader_ir.cpp - shader/shader_ir.h - shader/track.cpp - shader/transform_feedback.cpp - shader/transform_feedback.h surface.cpp surface.h texture_cache/accelerated_swizzle.cpp @@ -242,6 +188,8 @@ add_library(video_core STATIC textures/decoders.h textures/texture.cpp textures/texture.h + transform_feedback.cpp + transform_feedback.h video_core.cpp video_core.h vulkan_common/vulkan_debug_callback.cpp @@ -265,7 +213,7 @@ add_library(video_core STATIC create_target_directory_groups(video_core) target_link_libraries(video_core PUBLIC common core) -target_link_libraries(video_core PRIVATE glad xbyak) +target_link_libraries(video_core PUBLIC glad shader_recompiler xbyak) if (YUZU_USE_BUNDLED_FFMPEG AND NOT WIN32) add_dependencies(video_core ffmpeg-build) diff --git a/src/video_core/buffer_cache/buffer_cache.h b/src/video_core/buffer_cache/buffer_cache.h index 910909201..98dc8e8c8 100755 --- a/src/video_core/buffer_cache/buffer_cache.h +++ b/src/video_core/buffer_cache/buffer_cache.h @@ -31,6 +31,7 @@ #include "video_core/engines/maxwell_3d.h" #include "video_core/memory_manager.h" #include "video_core/rasterizer_interface.h" +#include "video_core/surface.h" #include "video_core/texture_cache/slot_vector.h" #include "video_core/texture_cache/types.h" @@ -42,14 +43,19 @@ MICROPROFILE_DECLARE(GPU_DownloadMemory); using BufferId = SlotId; +using VideoCore::Surface::PixelFormat; +using namespace Common::Literals; + constexpr u32 NUM_VERTEX_BUFFERS = 32; constexpr u32 NUM_TRANSFORM_FEEDBACK_BUFFERS = 4; constexpr u32 NUM_GRAPHICS_UNIFORM_BUFFERS = 18; constexpr u32 NUM_COMPUTE_UNIFORM_BUFFERS = 8; constexpr u32 NUM_STORAGE_BUFFERS = 16; +constexpr u32 NUM_TEXTURE_BUFFERS = 16; constexpr u32 NUM_STAGES = 5; -using namespace Common::Literals; +using UniformBufferSizes = std::array, NUM_STAGES>; +using ComputeUniformBufferSizes = std::array; template class BufferCache { @@ -67,6 +73,7 @@ class BufferCache { static constexpr bool NEEDS_BIND_UNIFORM_INDEX = P::NEEDS_BIND_UNIFORM_INDEX; static constexpr bool NEEDS_BIND_STORAGE_INDEX = P::NEEDS_BIND_STORAGE_INDEX; static constexpr bool USE_MEMORY_MAPS = P::USE_MEMORY_MAPS; + static constexpr bool SEPARATE_IMAGE_BUFFERS_BINDINGS = P::SEPARATE_IMAGE_BUFFER_BINDINGS; static constexpr BufferId NULL_BUFFER_ID{0}; @@ -96,6 +103,10 @@ class BufferCache { BufferId buffer_id; }; + struct TextureBufferBinding : Binding { + PixelFormat format; + }; + static constexpr Binding NULL_BINDING{ .cpu_addr = 0, .size = 0, @@ -133,20 +144,31 @@ public: void BindHostComputeBuffers(); - void SetEnabledUniformBuffers(size_t stage, u32 enabled); + void SetUniformBuffersState(const std::array& mask, + const UniformBufferSizes* sizes); - void SetEnabledComputeUniformBuffers(u32 enabled); + void SetComputeUniformBufferState(u32 mask, const ComputeUniformBufferSizes* sizes); void UnbindGraphicsStorageBuffers(size_t stage); void BindGraphicsStorageBuffer(size_t stage, size_t ssbo_index, u32 cbuf_index, u32 cbuf_offset, bool is_written); + void UnbindGraphicsTextureBuffers(size_t stage); + + void BindGraphicsTextureBuffer(size_t stage, size_t tbo_index, GPUVAddr gpu_addr, u32 size, + PixelFormat format, bool is_written, bool is_image); + void UnbindComputeStorageBuffers(); void BindComputeStorageBuffer(size_t ssbo_index, u32 cbuf_index, u32 cbuf_offset, bool is_written); + void UnbindComputeTextureBuffers(); + + void BindComputeTextureBuffer(size_t tbo_index, GPUVAddr gpu_addr, u32 size, PixelFormat format, + bool is_written, bool is_image); + void FlushCachedWrites(); /// Return true when there are uncommitted buffers to be downloaded @@ -171,6 +193,7 @@ public: [[nodiscard]] bool IsRegionCpuModified(VAddr addr, size_t size); std::mutex mutex; + Runtime& runtime; private: template @@ -217,12 +240,16 @@ private: void BindHostGraphicsStorageBuffers(size_t stage); + void BindHostGraphicsTextureBuffers(size_t stage); + void BindHostTransformFeedbackBuffers(); void BindHostComputeUniformBuffers(); void BindHostComputeStorageBuffers(); + void BindHostComputeTextureBuffers(); + void DoUpdateGraphicsBuffers(bool is_indexed); void DoUpdateComputeBuffers(); @@ -237,6 +264,8 @@ private: void UpdateStorageBuffers(size_t stage); + void UpdateTextureBuffers(size_t stage); + void UpdateTransformFeedbackBuffers(); void UpdateTransformFeedbackBuffer(u32 index); @@ -245,6 +274,8 @@ private: void UpdateComputeStorageBuffers(); + void UpdateComputeTextureBuffers(); + void MarkWrittenBuffer(BufferId buffer_id, VAddr cpu_addr, u32 size); [[nodiscard]] BufferId FindBuffer(VAddr cpu_addr, u32 size); @@ -286,6 +317,9 @@ private: [[nodiscard]] Binding StorageBufferBinding(GPUVAddr ssbo_addr) const; + [[nodiscard]] TextureBufferBinding GetTextureBufferBinding(GPUVAddr gpu_addr, u32 size, + PixelFormat format); + [[nodiscard]] std::span ImmediateBufferWithData(VAddr cpu_addr, size_t size); [[nodiscard]] std::span ImmediateBuffer(size_t wanted_capacity); @@ -297,7 +331,6 @@ private: Tegra::Engines::KeplerCompute& kepler_compute; Tegra::MemoryManager& gpu_memory; Core::Memory::Memory& cpu_memory; - Runtime& runtime; SlotVector slot_buffers; DelayedDestructionRing delayed_destruction_ring; @@ -308,20 +341,30 @@ private: std::array vertex_buffers; std::array, NUM_STAGES> uniform_buffers; std::array, NUM_STAGES> storage_buffers; + std::array, NUM_STAGES> texture_buffers; std::array transform_feedback_buffers; std::array compute_uniform_buffers; std::array compute_storage_buffers; + std::array compute_texture_buffers; - std::array enabled_uniform_buffers{}; - u32 enabled_compute_uniform_buffers = 0; + std::array enabled_uniform_buffer_masks{}; + u32 enabled_compute_uniform_buffer_mask = 0; + + const UniformBufferSizes* uniform_buffer_sizes{}; + const ComputeUniformBufferSizes* compute_uniform_buffer_sizes{}; std::array enabled_storage_buffers{}; std::array written_storage_buffers{}; u32 enabled_compute_storage_buffers = 0; u32 written_compute_storage_buffers = 0; - std::array fast_bound_uniform_buffers{}; + std::array enabled_texture_buffers{}; + std::array written_texture_buffers{}; + std::array image_texture_buffers{}; + u32 enabled_compute_texture_buffers = 0; + u32 written_compute_texture_buffers = 0; + u32 image_compute_texture_buffers = 0; std::array uniform_cache_hits{}; std::array uniform_cache_shots{}; @@ -332,6 +375,10 @@ private: std::conditional_t, Empty> dirty_uniform_buffers{}; + std::conditional_t, Empty> fast_bound_uniform_buffers{}; + std::conditional_t, NUM_STAGES>, Empty> + uniform_buffer_binding_sizes{}; std::vector cached_write_buffer_ids; @@ -355,8 +402,8 @@ BufferCache

::BufferCache(VideoCore::RasterizerInterface& rasterizer_, Tegra::Engines::KeplerCompute& kepler_compute_, Tegra::MemoryManager& gpu_memory_, Core::Memory::Memory& cpu_memory_, Runtime& runtime_) - : rasterizer{rasterizer_}, maxwell3d{maxwell3d_}, kepler_compute{kepler_compute_}, - gpu_memory{gpu_memory_}, cpu_memory{cpu_memory_}, runtime{runtime_} { + : runtime{runtime_}, rasterizer{rasterizer_}, maxwell3d{maxwell3d_}, + kepler_compute{kepler_compute_}, gpu_memory{gpu_memory_}, cpu_memory{cpu_memory_} { // Ensure the first slot is used for the null buffer void(slot_buffers.insert(runtime, NullBufferParams{})); deletion_iterator = slot_buffers.end(); @@ -485,6 +532,7 @@ void BufferCache

::BindHostStageBuffers(size_t stage) { MICROPROFILE_SCOPE(GPU_BindUploadBuffers); BindHostGraphicsUniformBuffers(stage); BindHostGraphicsStorageBuffers(stage); + BindHostGraphicsTextureBuffers(stage); } template @@ -492,21 +540,30 @@ void BufferCache

::BindHostComputeBuffers() { MICROPROFILE_SCOPE(GPU_BindUploadBuffers); BindHostComputeUniformBuffers(); BindHostComputeStorageBuffers(); + BindHostComputeTextureBuffers(); } template -void BufferCache

::SetEnabledUniformBuffers(size_t stage, u32 enabled) { +void BufferCache

::SetUniformBuffersState(const std::array& mask, + const UniformBufferSizes* sizes) { if constexpr (HAS_PERSISTENT_UNIFORM_BUFFER_BINDINGS) { - if (enabled_uniform_buffers[stage] != enabled) { - dirty_uniform_buffers[stage] = ~u32{0}; + if (enabled_uniform_buffer_masks != mask) { + if constexpr (IS_OPENGL) { + fast_bound_uniform_buffers.fill(0); + } + dirty_uniform_buffers.fill(~u32{0}); + uniform_buffer_binding_sizes.fill({}); } } - enabled_uniform_buffers[stage] = enabled; + enabled_uniform_buffer_masks = mask; + uniform_buffer_sizes = sizes; } template -void BufferCache

::SetEnabledComputeUniformBuffers(u32 enabled) { - enabled_compute_uniform_buffers = enabled; +void BufferCache

::SetComputeUniformBufferState(u32 mask, + const ComputeUniformBufferSizes* sizes) { + enabled_compute_uniform_buffer_mask = mask; + compute_uniform_buffer_sizes = sizes; } template @@ -526,10 +583,30 @@ void BufferCache

::BindGraphicsStorageBuffer(size_t stage, size_t ssbo_index, storage_buffers[stage][ssbo_index] = StorageBufferBinding(ssbo_addr); } +template +void BufferCache

::UnbindGraphicsTextureBuffers(size_t stage) { + enabled_texture_buffers[stage] = 0; + written_texture_buffers[stage] = 0; + image_texture_buffers[stage] = 0; +} + +template +void BufferCache

::BindGraphicsTextureBuffer(size_t stage, size_t tbo_index, GPUVAddr gpu_addr, + u32 size, PixelFormat format, bool is_written, + bool is_image) { + enabled_texture_buffers[stage] |= 1U << tbo_index; + written_texture_buffers[stage] |= (is_written ? 1U : 0U) << tbo_index; + if constexpr (SEPARATE_IMAGE_BUFFERS_BINDINGS) { + image_texture_buffers[stage] |= (is_image ? 1U : 0U) << tbo_index; + } + texture_buffers[stage][tbo_index] = GetTextureBufferBinding(gpu_addr, size, format); +} + template void BufferCache

::UnbindComputeStorageBuffers() { enabled_compute_storage_buffers = 0; written_compute_storage_buffers = 0; + image_compute_texture_buffers = 0; } template @@ -546,6 +623,24 @@ void BufferCache

::BindComputeStorageBuffer(size_t ssbo_index, u32 cbuf_index, compute_storage_buffers[ssbo_index] = StorageBufferBinding(ssbo_addr); } +template +void BufferCache

::UnbindComputeTextureBuffers() { + enabled_compute_texture_buffers = 0; + written_compute_texture_buffers = 0; + image_compute_texture_buffers = 0; +} + +template +void BufferCache

::BindComputeTextureBuffer(size_t tbo_index, GPUVAddr gpu_addr, u32 size, + PixelFormat format, bool is_written, bool is_image) { + enabled_compute_texture_buffers |= 1U << tbo_index; + written_compute_texture_buffers |= (is_written ? 1U : 0U) << tbo_index; + if constexpr (SEPARATE_IMAGE_BUFFERS_BINDINGS) { + image_compute_texture_buffers |= (is_image ? 1U : 0U) << tbo_index; + } + compute_texture_buffers[tbo_index] = GetTextureBufferBinding(gpu_addr, size, format); +} + template void BufferCache

::FlushCachedWrites() { for (const BufferId buffer_id : cached_write_buffer_ids) { @@ -771,7 +866,7 @@ void BufferCache

::BindHostGraphicsUniformBuffers(size_t stage) { dirty = std::exchange(dirty_uniform_buffers[stage], 0); } u32 binding_index = 0; - ForEachEnabledBit(enabled_uniform_buffers[stage], [&](u32 index) { + ForEachEnabledBit(enabled_uniform_buffer_masks[stage], [&](u32 index) { const bool needs_bind = ((dirty >> index) & 1) != 0; BindHostGraphicsUniformBuffer(stage, index, binding_index, needs_bind); if constexpr (NEEDS_BIND_UNIFORM_INDEX) { @@ -785,7 +880,7 @@ void BufferCache

::BindHostGraphicsUniformBuffer(size_t stage, u32 index, u32 bool needs_bind) { const Binding& binding = uniform_buffers[stage][index]; const VAddr cpu_addr = binding.cpu_addr; - const u32 size = binding.size; + const u32 size = std::min(binding.size, (*uniform_buffer_sizes)[stage][index]); Buffer& buffer = slot_buffers[binding.buffer_id]; TouchBuffer(buffer); const bool use_fast_buffer = binding.buffer_id != NULL_BUFFER_ID && @@ -795,8 +890,13 @@ void BufferCache

::BindHostGraphicsUniformBuffer(size_t stage, u32 index, u32 if constexpr (IS_OPENGL) { if (runtime.HasFastBufferSubData()) { // Fast path for Nvidia - if (!HasFastUniformBufferBound(stage, binding_index)) { + const bool should_fast_bind = + !HasFastUniformBufferBound(stage, binding_index) || + uniform_buffer_binding_sizes[stage][binding_index] != size; + if (should_fast_bind) { // We only have to bind when the currently bound buffer is not the fast version + fast_bound_uniform_buffers[stage] |= 1U << binding_index; + uniform_buffer_binding_sizes[stage][binding_index] = size; runtime.BindFastUniformBuffer(stage, binding_index, size); } const auto span = ImmediateBufferWithData(cpu_addr, size); @@ -804,8 +904,10 @@ void BufferCache

::BindHostGraphicsUniformBuffer(size_t stage, u32 index, u32 return; } } - fast_bound_uniform_buffers[stage] |= 1U << binding_index; - + if constexpr (IS_OPENGL) { + fast_bound_uniform_buffers[stage] |= 1U << binding_index; + uniform_buffer_binding_sizes[stage][binding_index] = size; + } // Stream buffer path to avoid stalling on non-Nvidia drivers or Vulkan const std::span span = runtime.BindMappedUniformBuffer(stage, binding_index, size); cpu_memory.ReadBlockUnsafe(cpu_addr, span.data(), size); @@ -818,14 +920,27 @@ void BufferCache

::BindHostGraphicsUniformBuffer(size_t stage, u32 index, u32 } ++uniform_cache_shots[0]; - if (!needs_bind && !HasFastUniformBufferBound(stage, binding_index)) { - // Skip binding if it's not needed and if the bound buffer is not the fast version - // This exists to avoid instances where the fast buffer is bound and a GPU write happens + // Skip binding if it's not needed and if the bound buffer is not the fast version + // This exists to avoid instances where the fast buffer is bound and a GPU write happens + needs_bind |= HasFastUniformBufferBound(stage, binding_index); + if constexpr (HAS_PERSISTENT_UNIFORM_BUFFER_BINDINGS) { + needs_bind |= uniform_buffer_binding_sizes[stage][binding_index] != size; + } + if (!needs_bind) { return; } - fast_bound_uniform_buffers[stage] &= ~(1U << binding_index); - const u32 offset = buffer.Offset(cpu_addr); + if constexpr (IS_OPENGL) { + // Fast buffer will be unbound + fast_bound_uniform_buffers[stage] &= ~(1U << binding_index); + + // Mark the index as dirty if offset doesn't match + const bool is_copy_bind = offset != 0 && !runtime.SupportsNonZeroUniformOffset(); + dirty_uniform_buffers[stage] |= (is_copy_bind ? 1U : 0U) << index; + } + if constexpr (HAS_PERSISTENT_UNIFORM_BUFFER_BINDINGS) { + uniform_buffer_binding_sizes[stage][binding_index] = size; + } if constexpr (NEEDS_BIND_UNIFORM_INDEX) { runtime.BindUniformBuffer(stage, binding_index, buffer, offset, size); } else { @@ -854,6 +969,28 @@ void BufferCache

::BindHostGraphicsStorageBuffers(size_t stage) { }); } +template +void BufferCache

::BindHostGraphicsTextureBuffers(size_t stage) { + ForEachEnabledBit(enabled_texture_buffers[stage], [&](u32 index) { + const TextureBufferBinding& binding = texture_buffers[stage][index]; + Buffer& buffer = slot_buffers[binding.buffer_id]; + const u32 size = binding.size; + SynchronizeBuffer(buffer, binding.cpu_addr, size); + + const u32 offset = buffer.Offset(binding.cpu_addr); + const PixelFormat format = binding.format; + if constexpr (SEPARATE_IMAGE_BUFFERS_BINDINGS) { + if (((image_texture_buffers[stage] >> index) & 1) != 0) { + runtime.BindImageBuffer(buffer, offset, size, format); + } else { + runtime.BindTextureBuffer(buffer, offset, size, format); + } + } else { + runtime.BindTextureBuffer(buffer, offset, size, format); + } + }); +} + template void BufferCache

::BindHostTransformFeedbackBuffers() { if (maxwell3d.regs.tfb_enabled == 0) { @@ -876,13 +1013,14 @@ void BufferCache

::BindHostComputeUniformBuffers() { if constexpr (HAS_PERSISTENT_UNIFORM_BUFFER_BINDINGS) { // Mark all uniform buffers as dirty dirty_uniform_buffers.fill(~u32{0}); + fast_bound_uniform_buffers.fill(0); } u32 binding_index = 0; - ForEachEnabledBit(enabled_compute_uniform_buffers, [&](u32 index) { + ForEachEnabledBit(enabled_compute_uniform_buffer_mask, [&](u32 index) { const Binding& binding = compute_uniform_buffers[index]; Buffer& buffer = slot_buffers[binding.buffer_id]; TouchBuffer(buffer); - const u32 size = binding.size; + const u32 size = std::min(binding.size, (*compute_uniform_buffer_sizes)[index]); SynchronizeBuffer(buffer, binding.cpu_addr, size); const u32 offset = buffer.Offset(binding.cpu_addr); @@ -916,6 +1054,28 @@ void BufferCache

::BindHostComputeStorageBuffers() { }); } +template +void BufferCache

::BindHostComputeTextureBuffers() { + ForEachEnabledBit(enabled_compute_texture_buffers, [&](u32 index) { + const TextureBufferBinding& binding = compute_texture_buffers[index]; + Buffer& buffer = slot_buffers[binding.buffer_id]; + const u32 size = binding.size; + SynchronizeBuffer(buffer, binding.cpu_addr, size); + + const u32 offset = buffer.Offset(binding.cpu_addr); + const PixelFormat format = binding.format; + if constexpr (SEPARATE_IMAGE_BUFFERS_BINDINGS) { + if (((image_compute_texture_buffers >> index) & 1) != 0) { + runtime.BindImageBuffer(buffer, offset, size, format); + } else { + runtime.BindTextureBuffer(buffer, offset, size, format); + } + } else { + runtime.BindTextureBuffer(buffer, offset, size, format); + } + }); +} + template void BufferCache

::DoUpdateGraphicsBuffers(bool is_indexed) { if (is_indexed) { @@ -926,6 +1086,7 @@ void BufferCache

::DoUpdateGraphicsBuffers(bool is_indexed) { for (size_t stage = 0; stage < NUM_STAGES; ++stage) { UpdateUniformBuffers(stage); UpdateStorageBuffers(stage); + UpdateTextureBuffers(stage); } } @@ -933,6 +1094,7 @@ template void BufferCache

::DoUpdateComputeBuffers() { UpdateComputeUniformBuffers(); UpdateComputeStorageBuffers(); + UpdateComputeTextureBuffers(); } template @@ -1002,7 +1164,7 @@ void BufferCache

::UpdateVertexBuffer(u32 index) { template void BufferCache

::UpdateUniformBuffers(size_t stage) { - ForEachEnabledBit(enabled_uniform_buffers[stage], [&](u32 index) { + ForEachEnabledBit(enabled_uniform_buffer_masks[stage], [&](u32 index) { Binding& binding = uniform_buffers[stage][index]; if (binding.buffer_id) { // Already updated @@ -1032,6 +1194,18 @@ void BufferCache

::UpdateStorageBuffers(size_t stage) { }); } +template +void BufferCache

::UpdateTextureBuffers(size_t stage) { + ForEachEnabledBit(enabled_texture_buffers[stage], [&](u32 index) { + Binding& binding = texture_buffers[stage][index]; + binding.buffer_id = FindBuffer(binding.cpu_addr, binding.size); + // Mark buffer as written if needed + if (((written_texture_buffers[stage] >> index) & 1) != 0) { + MarkWrittenBuffer(binding.buffer_id, binding.cpu_addr, binding.size); + } + }); +} + template void BufferCache

::UpdateTransformFeedbackBuffers() { if (maxwell3d.regs.tfb_enabled == 0) { @@ -1063,7 +1237,7 @@ void BufferCache

::UpdateTransformFeedbackBuffer(u32 index) { template void BufferCache

::UpdateComputeUniformBuffers() { - ForEachEnabledBit(enabled_compute_uniform_buffers, [&](u32 index) { + ForEachEnabledBit(enabled_compute_uniform_buffer_mask, [&](u32 index) { Binding& binding = compute_uniform_buffers[index]; binding = NULL_BINDING; const auto& launch_desc = kepler_compute.launch_description; @@ -1084,11 +1258,22 @@ void BufferCache

::UpdateComputeStorageBuffers() { ForEachEnabledBit(enabled_compute_storage_buffers, [&](u32 index) { // Resolve buffer Binding& binding = compute_storage_buffers[index]; - const BufferId buffer_id = FindBuffer(binding.cpu_addr, binding.size); - binding.buffer_id = buffer_id; + binding.buffer_id = FindBuffer(binding.cpu_addr, binding.size); // Mark as written if needed if (((written_compute_storage_buffers >> index) & 1) != 0) { - MarkWrittenBuffer(buffer_id, binding.cpu_addr, binding.size); + MarkWrittenBuffer(binding.buffer_id, binding.cpu_addr, binding.size); + } + }); +} + +template +void BufferCache

::UpdateComputeTextureBuffers() { + ForEachEnabledBit(enabled_compute_texture_buffers, [&](u32 index) { + Binding& binding = compute_texture_buffers[index]; + binding.buffer_id = FindBuffer(binding.cpu_addr, binding.size); + // Mark as written if needed + if (((written_compute_texture_buffers >> index) & 1) != 0) { + MarkWrittenBuffer(binding.buffer_id, binding.cpu_addr, binding.size); } }); } @@ -1443,6 +1628,7 @@ template void BufferCache

::NotifyBufferDeletion() { if constexpr (HAS_PERSISTENT_UNIFORM_BUFFER_BINDINGS) { dirty_uniform_buffers.fill(~u32{0}); + uniform_buffer_binding_sizes.fill({}); } auto& flags = maxwell3d.dirty.flags; flags[Dirty::IndexBuffer] = true; @@ -1469,6 +1655,25 @@ typename BufferCache

::Binding BufferCache

::StorageBufferBinding(GPUVAddr s return binding; } +template +typename BufferCache

::TextureBufferBinding BufferCache

::GetTextureBufferBinding( + GPUVAddr gpu_addr, u32 size, PixelFormat format) { + const std::optional cpu_addr = gpu_memory.GpuToCpuAddress(gpu_addr); + TextureBufferBinding binding; + if (!cpu_addr || size == 0) { + binding.cpu_addr = 0; + binding.size = 0; + binding.buffer_id = NULL_BUFFER_ID; + binding.format = PixelFormat::Invalid; + } else { + binding.cpu_addr = *cpu_addr; + binding.size = size; + binding.buffer_id = BufferId{}; + binding.format = format; + } + return binding; +} + template std::span BufferCache

::ImmediateBufferWithData(VAddr cpu_addr, size_t size) { u8* const base_pointer = cpu_memory.GetPointer(cpu_addr); diff --git a/src/video_core/dirty_flags.cpp b/src/video_core/dirty_flags.cpp index 7149af290..b1be065c3 100755 --- a/src/video_core/dirty_flags.cpp +++ b/src/video_core/dirty_flags.cpp @@ -58,6 +58,11 @@ void SetupDirtyRenderTargets(Maxwell3D::DirtyState::Tables& tables) { FillBlock(table, OFF(zeta), NUM(zeta), flag); } } + +void SetupDirtyShaders(Maxwell3D::DirtyState::Tables& tables) { + FillBlock(tables[0], OFF(shader_config[0]), + NUM(shader_config[0]) * Maxwell3D::Regs::MaxShaderProgram, Shaders); +} } // Anonymous namespace void SetupDirtyFlags(Maxwell3D::DirtyState::Tables& tables) { @@ -65,6 +70,7 @@ void SetupDirtyFlags(Maxwell3D::DirtyState::Tables& tables) { SetupIndexBuffer(tables); SetupDirtyDescriptors(tables); SetupDirtyRenderTargets(tables); + SetupDirtyShaders(tables); } } // namespace VideoCommon::Dirty diff --git a/src/video_core/dirty_flags.h b/src/video_core/dirty_flags.h index 702688ace..504465d3f 100755 --- a/src/video_core/dirty_flags.h +++ b/src/video_core/dirty_flags.h @@ -36,6 +36,8 @@ enum : u8 { IndexBuffer, + Shaders, + LastCommonEntry, }; diff --git a/src/video_core/engines/kepler_compute.cpp b/src/video_core/engines/kepler_compute.cpp index a9b75091e..492b4c5a3 100755 --- a/src/video_core/engines/kepler_compute.cpp +++ b/src/video_core/engines/kepler_compute.cpp @@ -8,7 +8,6 @@ #include "core/core.h" #include "video_core/engines/kepler_compute.h" #include "video_core/engines/maxwell_3d.h" -#include "video_core/engines/shader_type.h" #include "video_core/memory_manager.h" #include "video_core/rasterizer_interface.h" #include "video_core/renderer_base.h" @@ -57,53 +56,11 @@ void KeplerCompute::CallMultiMethod(u32 method, const u32* base_start, u32 amoun } } -u32 KeplerCompute::AccessConstBuffer32(ShaderType stage, u64 const_buffer, u64 offset) const { - ASSERT(stage == ShaderType::Compute); - const auto& buffer = launch_description.const_buffer_config[const_buffer]; - u32 result; - std::memcpy(&result, memory_manager.GetPointer(buffer.Address() + offset), sizeof(u32)); - return result; -} - -SamplerDescriptor KeplerCompute::AccessBoundSampler(ShaderType stage, u64 offset) const { - return AccessBindlessSampler(stage, regs.tex_cb_index, offset * sizeof(Texture::TextureHandle)); -} - -SamplerDescriptor KeplerCompute::AccessBindlessSampler(ShaderType stage, u64 const_buffer, - u64 offset) const { - ASSERT(stage == ShaderType::Compute); - const auto& tex_info_buffer = launch_description.const_buffer_config[const_buffer]; - const GPUVAddr tex_info_address = tex_info_buffer.Address() + offset; - return AccessSampler(memory_manager.Read(tex_info_address)); -} - -SamplerDescriptor KeplerCompute::AccessSampler(u32 handle) const { - const Texture::TextureHandle tex_handle{handle}; - const Texture::TICEntry tic = GetTICEntry(tex_handle.tic_id); - const Texture::TSCEntry tsc = GetTSCEntry(tex_handle.tsc_id); - - SamplerDescriptor result = SamplerDescriptor::FromTIC(tic); - result.is_shadow.Assign(tsc.depth_compare_enabled.Value()); - return result; -} - -VideoCore::GuestDriverProfile& KeplerCompute::AccessGuestDriverProfile() { - return rasterizer->AccessGuestDriverProfile(); -} - -const VideoCore::GuestDriverProfile& KeplerCompute::AccessGuestDriverProfile() const { - return rasterizer->AccessGuestDriverProfile(); -} - void KeplerCompute::ProcessLaunch() { const GPUVAddr launch_desc_loc = regs.launch_desc_loc.Address(); memory_manager.ReadBlockUnsafe(launch_desc_loc, &launch_description, LaunchParams::NUM_LAUNCH_PARAMETERS * sizeof(u32)); - - const GPUVAddr code_addr = regs.code_loc.Address() + launch_description.program_start; - LOG_TRACE(HW_GPU, "Compute invocation launched at address 0x{:016x}", code_addr); - - rasterizer->DispatchCompute(code_addr); + rasterizer->DispatchCompute(); } Texture::TICEntry KeplerCompute::GetTICEntry(u32 tic_index) const { diff --git a/src/video_core/engines/kepler_compute.h b/src/video_core/engines/kepler_compute.h index 7c40cba38..f8b8d06ac 100755 --- a/src/video_core/engines/kepler_compute.h +++ b/src/video_core/engines/kepler_compute.h @@ -10,10 +10,8 @@ #include "common/bit_field.h" #include "common/common_funcs.h" #include "common/common_types.h" -#include "video_core/engines/const_buffer_engine_interface.h" #include "video_core/engines/engine_interface.h" #include "video_core/engines/engine_upload.h" -#include "video_core/engines/shader_type.h" #include "video_core/gpu.h" #include "video_core/textures/texture.h" @@ -40,7 +38,7 @@ namespace Tegra::Engines { #define KEPLER_COMPUTE_REG_INDEX(field_name) \ (offsetof(Tegra::Engines::KeplerCompute::Regs, field_name) / sizeof(u32)) -class KeplerCompute final : public ConstBufferEngineInterface, public EngineInterface { +class KeplerCompute final : public EngineInterface { public: explicit KeplerCompute(Core::System& system, MemoryManager& memory_manager); ~KeplerCompute(); @@ -209,23 +207,6 @@ public: void CallMultiMethod(u32 method, const u32* base_start, u32 amount, u32 methods_pending) override; - u32 AccessConstBuffer32(ShaderType stage, u64 const_buffer, u64 offset) const override; - - SamplerDescriptor AccessBoundSampler(ShaderType stage, u64 offset) const override; - - SamplerDescriptor AccessBindlessSampler(ShaderType stage, u64 const_buffer, - u64 offset) const override; - - SamplerDescriptor AccessSampler(u32 handle) const override; - - u32 GetBoundBuffer() const override { - return regs.tex_cb_index; - } - - VideoCore::GuestDriverProfile& AccessGuestDriverProfile() override; - - const VideoCore::GuestDriverProfile& AccessGuestDriverProfile() const override; - private: void ProcessLaunch(); diff --git a/src/video_core/engines/maxwell_3d.cpp b/src/video_core/engines/maxwell_3d.cpp index aab6b8f7a..b18b8a02a 100755 --- a/src/video_core/engines/maxwell_3d.cpp +++ b/src/video_core/engines/maxwell_3d.cpp @@ -8,7 +8,6 @@ #include "core/core.h" #include "core/core_timing.h" #include "video_core/engines/maxwell_3d.h" -#include "video_core/engines/shader_type.h" #include "video_core/gpu.h" #include "video_core/memory_manager.h" #include "video_core/rasterizer_interface.h" @@ -670,42 +669,4 @@ void Maxwell3D::ProcessClearBuffers() { rasterizer->Clear(); } -u32 Maxwell3D::AccessConstBuffer32(ShaderType stage, u64 const_buffer, u64 offset) const { - ASSERT(stage != ShaderType::Compute); - const auto& shader_stage = state.shader_stages[static_cast(stage)]; - const auto& buffer = shader_stage.const_buffers[const_buffer]; - return memory_manager.Read(buffer.address + offset); -} - -SamplerDescriptor Maxwell3D::AccessBoundSampler(ShaderType stage, u64 offset) const { - return AccessBindlessSampler(stage, regs.tex_cb_index, offset * sizeof(Texture::TextureHandle)); -} - -SamplerDescriptor Maxwell3D::AccessBindlessSampler(ShaderType stage, u64 const_buffer, - u64 offset) const { - ASSERT(stage != ShaderType::Compute); - const auto& shader = state.shader_stages[static_cast(stage)]; - const auto& tex_info_buffer = shader.const_buffers[const_buffer]; - const GPUVAddr tex_info_address = tex_info_buffer.address + offset; - return AccessSampler(memory_manager.Read(tex_info_address)); -} - -SamplerDescriptor Maxwell3D::AccessSampler(u32 handle) const { - const Texture::TextureHandle tex_handle{handle}; - const Texture::TICEntry tic = GetTICEntry(tex_handle.tic_id); - const Texture::TSCEntry tsc = GetTSCEntry(tex_handle.tsc_id); - - SamplerDescriptor result = SamplerDescriptor::FromTIC(tic); - result.is_shadow.Assign(tsc.depth_compare_enabled.Value()); - return result; -} - -VideoCore::GuestDriverProfile& Maxwell3D::AccessGuestDriverProfile() { - return rasterizer->AccessGuestDriverProfile(); -} - -const VideoCore::GuestDriverProfile& Maxwell3D::AccessGuestDriverProfile() const { - return rasterizer->AccessGuestDriverProfile(); -} - } // namespace Tegra::Engines diff --git a/src/video_core/engines/maxwell_3d.h b/src/video_core/engines/maxwell_3d.h index 335383955..1aa43523a 100755 --- a/src/video_core/engines/maxwell_3d.h +++ b/src/video_core/engines/maxwell_3d.h @@ -17,11 +17,9 @@ #include "common/common_funcs.h" #include "common/common_types.h" #include "common/math_util.h" -#include "video_core/engines/const_buffer_engine_interface.h" #include "video_core/engines/const_buffer_info.h" #include "video_core/engines/engine_interface.h" #include "video_core/engines/engine_upload.h" -#include "video_core/engines/shader_type.h" #include "video_core/gpu.h" #include "video_core/macro/macro.h" #include "video_core/textures/texture.h" @@ -49,7 +47,7 @@ namespace Tegra::Engines { #define MAXWELL3D_REG_INDEX(field_name) \ (offsetof(Tegra::Engines::Maxwell3D::Regs, field_name) / sizeof(u32)) -class Maxwell3D final : public ConstBufferEngineInterface, public EngineInterface { +class Maxwell3D final : public EngineInterface { public: explicit Maxwell3D(Core::System& system, MemoryManager& memory_manager); ~Maxwell3D(); @@ -307,10 +305,6 @@ public: return (type == Type::SignedNorm) || (type == Type::UnsignedNorm); } - bool IsConstant() const { - return constant; - } - bool IsValid() const { return size != Size::Invalid; } @@ -912,7 +906,11 @@ public: u32 fill_rectangle; - INSERT_PADDING_WORDS_NOINIT(0x8); + INSERT_PADDING_WORDS_NOINIT(0x2); + + u32 conservative_raster_enable; + + INSERT_PADDING_WORDS_NOINIT(0x5); std::array vertex_attrib_format; @@ -959,7 +957,11 @@ public: SamplerIndex sampler_index; - INSERT_PADDING_WORDS_NOINIT(0x25); + INSERT_PADDING_WORDS_NOINIT(0x2); + + std::array gp_passthrough_mask; + + INSERT_PADDING_WORDS_NOINIT(0x1B); u32 depth_test_enable; @@ -1152,7 +1154,11 @@ public: u32 index; } primitive_restart; - INSERT_PADDING_WORDS_NOINIT(0x5F); + INSERT_PADDING_WORDS_NOINIT(0xE); + + u32 provoking_vertex_last; + + INSERT_PADDING_WORDS_NOINIT(0x50); struct { u32 start_addr_high; @@ -1424,23 +1430,6 @@ public: void FlushMMEInlineDraw(); - u32 AccessConstBuffer32(ShaderType stage, u64 const_buffer, u64 offset) const override; - - SamplerDescriptor AccessBoundSampler(ShaderType stage, u64 offset) const override; - - SamplerDescriptor AccessBindlessSampler(ShaderType stage, u64 const_buffer, - u64 offset) const override; - - SamplerDescriptor AccessSampler(u32 handle) const override; - - u32 GetBoundBuffer() const override { - return regs.tex_cb_index; - } - - VideoCore::GuestDriverProfile& AccessGuestDriverProfile() override; - - const VideoCore::GuestDriverProfile& AccessGuestDriverProfile() const override; - bool ShouldExecute() const { return execute_on; } @@ -1630,6 +1619,7 @@ ASSERT_REG_POSITION(zeta, 0x3F8); ASSERT_REG_POSITION(render_area, 0x3FD); ASSERT_REG_POSITION(clear_flags, 0x43E); ASSERT_REG_POSITION(fill_rectangle, 0x44F); +ASSERT_REG_POSITION(conservative_raster_enable, 0x452); ASSERT_REG_POSITION(vertex_attrib_format, 0x458); ASSERT_REG_POSITION(multisample_sample_locations, 0x478); ASSERT_REG_POSITION(multisample_coverage_to_color, 0x47E); @@ -1638,6 +1628,7 @@ ASSERT_REG_POSITION(zeta_width, 0x48a); ASSERT_REG_POSITION(zeta_height, 0x48b); ASSERT_REG_POSITION(zeta_depth, 0x48c); ASSERT_REG_POSITION(sampler_index, 0x48D); +ASSERT_REG_POSITION(gp_passthrough_mask, 0x490); ASSERT_REG_POSITION(depth_test_enable, 0x4B3); ASSERT_REG_POSITION(independent_blend_enable, 0x4B9); ASSERT_REG_POSITION(depth_write_enabled, 0x4BA); @@ -1690,6 +1681,7 @@ ASSERT_REG_POSITION(point_coord_replace, 0x581); ASSERT_REG_POSITION(code_address, 0x582); ASSERT_REG_POSITION(draw, 0x585); ASSERT_REG_POSITION(primitive_restart, 0x591); +ASSERT_REG_POSITION(provoking_vertex_last, 0x5A1); ASSERT_REG_POSITION(index_array, 0x5F2); ASSERT_REG_POSITION(polygon_offset_clamp, 0x61F); ASSERT_REG_POSITION(instanced_arrays, 0x620); diff --git a/src/video_core/engines/maxwell_dma.cpp b/src/video_core/engines/maxwell_dma.cpp index 2ee980bab..7817456a6 100755 --- a/src/video_core/engines/maxwell_dma.cpp +++ b/src/video_core/engines/maxwell_dma.cpp @@ -99,7 +99,8 @@ void MaxwellDMA::CopyBlockLinearToPitch() { // Optimized path for micro copies. const size_t dst_size = static_cast(regs.pitch_out) * regs.line_count; - if (dst_size < GOB_SIZE && regs.pitch_out <= GOB_SIZE_X) { + if (dst_size < GOB_SIZE && regs.pitch_out <= GOB_SIZE_X && + regs.src_params.height > GOB_SIZE_Y) { FastCopyBlockLinearToPitch(); return; } diff --git a/src/video_core/memory_manager.cpp b/src/video_core/memory_manager.cpp index 9deea9a26..fa396ccf8 100755 --- a/src/video_core/memory_manager.cpp +++ b/src/video_core/memory_manager.cpp @@ -69,7 +69,6 @@ void MemoryManager::Unmap(GPUVAddr gpu_addr, std::size_t size) { } else { UNREACHABLE_MSG("Unmapping non-existent GPU address=0x{:x}", gpu_addr); } - const auto submapped_ranges = GetSubmappedRange(gpu_addr, size); for (const auto& map : submapped_ranges) { diff --git a/src/video_core/rasterizer_interface.h b/src/video_core/rasterizer_interface.h index 554dd70b6..fcb204768 100755 --- a/src/video_core/rasterizer_interface.h +++ b/src/video_core/rasterizer_interface.h @@ -11,7 +11,6 @@ #include "common/common_types.h" #include "video_core/engines/fermi_2d.h" #include "video_core/gpu.h" -#include "video_core/guest_driver.h" namespace Tegra { class MemoryManager; @@ -42,7 +41,7 @@ public: virtual void Clear() = 0; /// Dispatches a compute shader invocation - virtual void DispatchCompute(GPUVAddr code_addr) = 0; + virtual void DispatchCompute() = 0; /// Resets the counter of a query virtual void ResetCounter(QueryType type) = 0; @@ -137,18 +136,5 @@ public: /// Initialize disk cached resources for the game being emulated virtual void LoadDiskResources(u64 title_id, std::stop_token stop_loading, const DiskResourceLoadCallback& callback) {} - - /// Grant access to the Guest Driver Profile for recording/obtaining info on the guest driver. - [[nodiscard]] GuestDriverProfile& AccessGuestDriverProfile() { - return guest_driver_profile; - } - - /// Grant access to the Guest Driver Profile for recording/obtaining info on the guest driver. - [[nodiscard]] const GuestDriverProfile& AccessGuestDriverProfile() const { - return guest_driver_profile; - } - -private: - GuestDriverProfile guest_driver_profile{}; }; } // namespace VideoCore diff --git a/src/video_core/renderer_opengl/gl_buffer_cache.cpp b/src/video_core/renderer_opengl/gl_buffer_cache.cpp index c225d1fc9..91c45c9c0 100755 --- a/src/video_core/renderer_opengl/gl_buffer_cache.cpp +++ b/src/video_core/renderer_opengl/gl_buffer_cache.cpp @@ -2,14 +2,18 @@ // Licensed under GPLv2 or any later version // Refer to the license.txt file included. +#include #include #include "video_core/buffer_cache/buffer_cache.h" #include "video_core/renderer_opengl/gl_buffer_cache.h" #include "video_core/renderer_opengl/gl_device.h" +#include "video_core/renderer_opengl/maxwell_to_gl.h" namespace OpenGL { namespace { +using VideoCore::Surface::PixelFormat; + struct BindlessSSBO { GLuint64EXT address; GLsizei length; @@ -21,6 +25,25 @@ constexpr std::array PROGRAM_LUT{ GL_VERTEX_PROGRAM_NV, GL_TESS_CONTROL_PROGRAM_NV, GL_TESS_EVALUATION_PROGRAM_NV, GL_GEOMETRY_PROGRAM_NV, GL_FRAGMENT_PROGRAM_NV, }; + +[[nodiscard]] GLenum GetTextureBufferFormat(GLenum gl_format) { + switch (gl_format) { + case GL_RGBA8_SNORM: + return GL_RGBA8; + case GL_R8_SNORM: + return GL_R8; + case GL_RGBA16_SNORM: + return GL_RGBA16; + case GL_R16_SNORM: + return GL_R16; + case GL_RG16_SNORM: + return GL_RG16; + case GL_RG8_SNORM: + return GL_RG8; + default: + return gl_format; + } +} } // Anonymous namespace Buffer::Buffer(BufferCacheRuntime&, VideoCommon::NullBufferParams null_params) @@ -62,6 +85,30 @@ void Buffer::MakeResident(GLenum access) noexcept { glMakeNamedBufferResidentNV(buffer.handle, access); } +GLuint Buffer::View(u32 offset, u32 size, PixelFormat format) { + const auto it{std::ranges::find_if(views, [offset, size, format](const BufferView& view) { + return offset == view.offset && size == view.size && format == view.format; + })}; + if (it != views.end()) { + return it->texture.handle; + } + OGLTexture texture; + texture.Create(GL_TEXTURE_BUFFER); + const GLenum gl_format{MaxwellToGL::GetFormatTuple(format).internal_format}; + const GLenum texture_format{GetTextureBufferFormat(gl_format)}; + if (texture_format != gl_format) { + LOG_WARNING(Render_OpenGL, "Emulating SNORM texture buffer with UNORM."); + } + glTextureBufferRange(texture.handle, texture_format, buffer.handle, offset, size); + views.push_back({ + .offset = offset, + .size = size, + .format = format, + .texture = std::move(texture), + }); + return views.back().texture.handle; +} + BufferCacheRuntime::BufferCacheRuntime(const Device& device_) : device{device_}, has_fast_buffer_sub_data{device.HasFastBufferSubData()}, use_assembly_shaders{device.UseAssemblyShaders()}, @@ -138,7 +185,7 @@ void BufferCacheRuntime::BindUniformBuffer(size_t stage, u32 binding_index, Buff glBindBufferRangeNV(PABO_LUT[stage], binding_index, handle, 0, static_cast(size)); } else { - const GLuint base_binding = device.GetBaseBindings(stage).uniform_buffer; + const GLuint base_binding = graphics_base_uniform_bindings[stage]; const GLuint binding = base_binding + binding_index; glBindBufferRange(GL_UNIFORM_BUFFER, binding, buffer.Handle(), static_cast(offset), static_cast(size)); @@ -165,7 +212,12 @@ void BufferCacheRuntime::BindComputeUniformBuffer(u32 binding_index, Buffer& buf void BufferCacheRuntime::BindStorageBuffer(size_t stage, u32 binding_index, Buffer& buffer, u32 offset, u32 size, bool is_written) { - if (use_assembly_shaders) { + if (use_storage_buffers) { + const GLuint base_binding = graphics_base_storage_bindings[stage]; + const GLuint binding = base_binding + binding_index; + glBindBufferRange(GL_SHADER_STORAGE_BUFFER, binding, buffer.Handle(), + static_cast(offset), static_cast(size)); + } else { const BindlessSSBO ssbo{ .address = buffer.HostGpuAddr() + offset, .length = static_cast(size), @@ -174,17 +226,19 @@ void BufferCacheRuntime::BindStorageBuffer(size_t stage, u32 binding_index, Buff buffer.MakeResident(is_written ? GL_READ_WRITE : GL_READ_ONLY); glProgramLocalParametersI4uivNV(PROGRAM_LUT[stage], binding_index, 1, reinterpret_cast(&ssbo)); - } else { - const GLuint base_binding = device.GetBaseBindings(stage).shader_storage_buffer; - const GLuint binding = base_binding + binding_index; - glBindBufferRange(GL_SHADER_STORAGE_BUFFER, binding, buffer.Handle(), - static_cast(offset), static_cast(size)); } } void BufferCacheRuntime::BindComputeStorageBuffer(u32 binding_index, Buffer& buffer, u32 offset, u32 size, bool is_written) { - if (use_assembly_shaders) { + if (use_storage_buffers) { + if (size != 0) { + glBindBufferRange(GL_SHADER_STORAGE_BUFFER, binding_index, buffer.Handle(), + static_cast(offset), static_cast(size)); + } else { + glBindBufferRange(GL_SHADER_STORAGE_BUFFER, binding_index, 0, 0, 0); + } + } else { const BindlessSSBO ssbo{ .address = buffer.HostGpuAddr() + offset, .length = static_cast(size), @@ -193,11 +247,6 @@ void BufferCacheRuntime::BindComputeStorageBuffer(u32 binding_index, Buffer& buf buffer.MakeResident(is_written ? GL_READ_WRITE : GL_READ_ONLY); glProgramLocalParametersI4uivNV(GL_COMPUTE_PROGRAM_NV, binding_index, 1, reinterpret_cast(&ssbo)); - } else if (size == 0) { - glBindBufferRange(GL_SHADER_STORAGE_BUFFER, binding_index, 0, 0, 0); - } else { - glBindBufferRange(GL_SHADER_STORAGE_BUFFER, binding_index, buffer.Handle(), - static_cast(offset), static_cast(size)); } } @@ -207,4 +256,13 @@ void BufferCacheRuntime::BindTransformFeedbackBuffer(u32 index, Buffer& buffer, static_cast(offset), static_cast(size)); } +void BufferCacheRuntime::BindTextureBuffer(Buffer& buffer, u32 offset, u32 size, + PixelFormat format) { + *texture_handles++ = buffer.View(offset, size, format); +} + +void BufferCacheRuntime::BindImageBuffer(Buffer& buffer, u32 offset, u32 size, PixelFormat format) { + *image_handles++ = buffer.View(offset, size, format); +} + } // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_buffer_cache.h b/src/video_core/renderer_opengl/gl_buffer_cache.h index d8b20a9af..af5720afd 100755 --- a/src/video_core/renderer_opengl/gl_buffer_cache.h +++ b/src/video_core/renderer_opengl/gl_buffer_cache.h @@ -32,6 +32,8 @@ public: void MakeResident(GLenum access) noexcept; + [[nodiscard]] GLuint View(u32 offset, u32 size, VideoCore::Surface::PixelFormat format); + [[nodiscard]] GLuint64EXT HostGpuAddr() const noexcept { return address; } @@ -41,9 +43,17 @@ public: } private: + struct BufferView { + u32 offset; + u32 size; + VideoCore::Surface::PixelFormat format; + OGLTexture texture; + }; + GLuint64EXT address = 0; OGLBuffer buffer; GLenum current_residency_access = GL_NONE; + std::vector views; }; class BufferCacheRuntime { @@ -73,17 +83,21 @@ public: void BindTransformFeedbackBuffer(u32 index, Buffer& buffer, u32 offset, u32 size); + void BindTextureBuffer(Buffer& buffer, u32 offset, u32 size, + VideoCore::Surface::PixelFormat format); + + void BindImageBuffer(Buffer& buffer, u32 offset, u32 size, + VideoCore::Surface::PixelFormat format); + void BindFastUniformBuffer(size_t stage, u32 binding_index, u32 size) { + const GLuint handle = fast_uniforms[stage][binding_index].handle; + const GLsizeiptr gl_size = static_cast(size); if (use_assembly_shaders) { - const GLuint handle = fast_uniforms[stage][binding_index].handle; - const GLsizeiptr gl_size = static_cast(size); glBindBufferRangeNV(PABO_LUT[stage], binding_index, handle, 0, gl_size); } else { - const GLuint base_binding = device.GetBaseBindings(stage).uniform_buffer; + const GLuint base_binding = graphics_base_uniform_bindings[stage]; const GLuint binding = base_binding + binding_index; - glBindBufferRange(GL_UNIFORM_BUFFER, binding, - fast_uniforms[stage][binding_index].handle, 0, - static_cast(size)); + glBindBufferRange(GL_UNIFORM_BUFFER, binding, handle, 0, gl_size); } } @@ -101,7 +115,7 @@ public: std::span BindMappedUniformBuffer(size_t stage, u32 binding_index, u32 size) noexcept { const auto [mapped_span, offset] = stream_buffer->Request(static_cast(size)); - const GLuint base_binding = device.GetBaseBindings(stage).uniform_buffer; + const GLuint base_binding = graphics_base_uniform_bindings[stage]; const GLuint binding = base_binding + binding_index; glBindBufferRange(GL_UNIFORM_BUFFER, binding, stream_buffer->Handle(), static_cast(offset), static_cast(size)); @@ -116,6 +130,27 @@ public: return has_fast_buffer_sub_data; } + [[nodiscard]] bool SupportsNonZeroUniformOffset() const noexcept { + return !use_assembly_shaders; + } + + void SetBaseUniformBindings(const std::array& bindings) { + graphics_base_uniform_bindings = bindings; + } + + void SetBaseStorageBindings(const std::array& bindings) { + graphics_base_storage_bindings = bindings; + } + + void SetImagePointers(GLuint* texture_handles_, GLuint* image_handles_) { + texture_handles = texture_handles_; + image_handles = image_handles_; + } + + void SetEnableStorageBuffers(bool use_storage_buffers_) { + use_storage_buffers = use_storage_buffers_; + } + private: static constexpr std::array PABO_LUT{ GL_VERTEX_PROGRAM_PARAMETER_BUFFER_NV, GL_TESS_CONTROL_PROGRAM_PARAMETER_BUFFER_NV, @@ -129,8 +164,15 @@ private: bool use_assembly_shaders = false; bool has_unified_vertex_buffers = false; + bool use_storage_buffers = false; + u32 max_attributes = 0; + std::array graphics_base_uniform_bindings{}; + std::array graphics_base_storage_bindings{}; + GLuint* texture_handles = nullptr; + GLuint* image_handles = nullptr; + std::optional stream_buffer; std::array, @@ -154,6 +196,7 @@ struct BufferCacheParams { static constexpr bool NEEDS_BIND_UNIFORM_INDEX = true; static constexpr bool NEEDS_BIND_STORAGE_INDEX = true; static constexpr bool USE_MEMORY_MAPS = false; + static constexpr bool SEPARATE_IMAGE_BUFFER_BINDINGS = true; }; using BufferCache = VideoCommon::BufferCache; diff --git a/src/video_core/renderer_opengl/gl_compute_pipeline.cpp b/src/video_core/renderer_opengl/gl_compute_pipeline.cpp new file mode 100755 index 000000000..c63e87a56 --- /dev/null +++ b/src/video_core/renderer_opengl/gl_compute_pipeline.cpp @@ -0,0 +1,213 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include "common/cityhash.h" +#include "common/settings.h" // for enum class Settings::ShaderBackend +#include "video_core/renderer_opengl/gl_compute_pipeline.h" +#include "video_core/renderer_opengl/gl_shader_manager.h" +#include "video_core/renderer_opengl/gl_shader_util.h" + +namespace OpenGL { + +using Shader::ImageBufferDescriptor; +using Tegra::Texture::TexturePair; +using VideoCommon::ImageId; + +constexpr u32 MAX_TEXTURES = 64; +constexpr u32 MAX_IMAGES = 16; + +template +u32 AccumulateCount(const Range& range) { + u32 num{}; + for (const auto& desc : range) { + num += desc.count; + } + return num; +} + +size_t ComputePipelineKey::Hash() const noexcept { + return static_cast( + Common::CityHash64(reinterpret_cast(this), sizeof *this)); +} + +bool ComputePipelineKey::operator==(const ComputePipelineKey& rhs) const noexcept { + return std::memcmp(this, &rhs, sizeof *this) == 0; +} + +ComputePipeline::ComputePipeline(const Device& device, TextureCache& texture_cache_, + BufferCache& buffer_cache_, Tegra::MemoryManager& gpu_memory_, + Tegra::Engines::KeplerCompute& kepler_compute_, + ProgramManager& program_manager_, const Shader::Info& info_, + std::string code, std::vector code_v) + : texture_cache{texture_cache_}, buffer_cache{buffer_cache_}, gpu_memory{gpu_memory_}, + kepler_compute{kepler_compute_}, program_manager{program_manager_}, info{info_} { + switch (device.GetShaderBackend()) { + case Settings::ShaderBackend::GLSL: + source_program.handle = glCreateProgram(); + AttachShader(GL_COMPUTE_SHADER, source_program.handle, code); + LinkProgram(source_program.handle); + break; + case Settings::ShaderBackend::GLASM: + assembly_program = CompileProgram(code, GL_COMPUTE_PROGRAM_NV); + break; + case Settings::ShaderBackend::SPIRV: + source_program.handle = glCreateProgram(); + AttachShader(GL_COMPUTE_SHADER, source_program.handle, code_v); + LinkProgram(source_program.handle); + break; + } + std::copy_n(info.constant_buffer_used_sizes.begin(), uniform_buffer_sizes.size(), + uniform_buffer_sizes.begin()); + + num_texture_buffers = AccumulateCount(info.texture_buffer_descriptors); + num_image_buffers = AccumulateCount(info.image_buffer_descriptors); + + const u32 num_textures{num_texture_buffers + AccumulateCount(info.texture_descriptors)}; + ASSERT(num_textures <= MAX_TEXTURES); + + const u32 num_images{num_image_buffers + AccumulateCount(info.image_descriptors)}; + ASSERT(num_images <= MAX_IMAGES); + + const bool is_glasm{assembly_program.handle != 0}; + const u32 num_storage_buffers{AccumulateCount(info.storage_buffers_descriptors)}; + use_storage_buffers = + !is_glasm || num_storage_buffers < device.GetMaxGLASMStorageBufferBlocks(); + writes_global_memory = !use_storage_buffers && + std::ranges::any_of(info.storage_buffers_descriptors, + [](const auto& desc) { return desc.is_written; }); +} + +void ComputePipeline::Configure() { + buffer_cache.SetComputeUniformBufferState(info.constant_buffer_mask, &uniform_buffer_sizes); + buffer_cache.UnbindComputeStorageBuffers(); + size_t ssbo_index{}; + for (const auto& desc : info.storage_buffers_descriptors) { + ASSERT(desc.count == 1); + buffer_cache.BindComputeStorageBuffer(ssbo_index, desc.cbuf_index, desc.cbuf_offset, + desc.is_written); + ++ssbo_index; + } + texture_cache.SynchronizeComputeDescriptors(); + + std::array image_view_ids; + boost::container::static_vector image_view_indices; + std::array samplers; + std::array textures; + std::array images; + GLsizei sampler_binding{}; + GLsizei texture_binding{}; + GLsizei image_binding{}; + + const auto& qmd{kepler_compute.launch_description}; + const auto& cbufs{qmd.const_buffer_config}; + const bool via_header_index{qmd.linked_tsc != 0}; + const auto read_handle{[&](const auto& desc, u32 index) { + ASSERT(((qmd.const_buffer_enable_mask >> desc.cbuf_index) & 1) != 0); + const u32 index_offset{index << desc.size_shift}; + const u32 offset{desc.cbuf_offset + index_offset}; + const GPUVAddr addr{cbufs[desc.cbuf_index].Address() + offset}; + if constexpr (std::is_same_v || + std::is_same_v) { + if (desc.has_secondary) { + ASSERT(((qmd.const_buffer_enable_mask >> desc.secondary_cbuf_index) & 1) != 0); + const u32 secondary_offset{desc.secondary_cbuf_offset + index_offset}; + const GPUVAddr separate_addr{cbufs[desc.secondary_cbuf_index].Address() + + secondary_offset}; + const u32 lhs_raw{gpu_memory.Read(addr)}; + const u32 rhs_raw{gpu_memory.Read(separate_addr)}; + return TexturePair(lhs_raw | rhs_raw, via_header_index); + } + } + return TexturePair(gpu_memory.Read(addr), via_header_index); + }}; + const auto add_image{[&](const auto& desc) { + for (u32 index = 0; index < desc.count; ++index) { + const auto handle{read_handle(desc, index)}; + image_view_indices.push_back(handle.first); + } + }}; + for (const auto& desc : info.texture_buffer_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + const auto handle{read_handle(desc, index)}; + image_view_indices.push_back(handle.first); + samplers[sampler_binding++] = 0; + } + } + std::ranges::for_each(info.image_buffer_descriptors, add_image); + for (const auto& desc : info.texture_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + const auto handle{read_handle(desc, index)}; + image_view_indices.push_back(handle.first); + + Sampler* const sampler = texture_cache.GetComputeSampler(handle.second); + samplers[sampler_binding++] = sampler->Handle(); + } + } + std::ranges::for_each(info.image_descriptors, add_image); + + const std::span indices_span(image_view_indices.data(), image_view_indices.size()); + texture_cache.FillComputeImageViews(indices_span, image_view_ids); + + if (assembly_program.handle != 0) { + program_manager.BindComputeAssemblyProgram(assembly_program.handle); + } else { + program_manager.BindProgram(source_program.handle); + } + buffer_cache.UnbindComputeTextureBuffers(); + size_t texbuf_index{}; + const auto add_buffer{[&](const auto& desc) { + constexpr bool is_image = std::is_same_v; + for (u32 i = 0; i < desc.count; ++i) { + bool is_written{false}; + if constexpr (is_image) { + is_written = desc.is_written; + } + ImageView& image_view{texture_cache.GetImageView(image_view_ids[texbuf_index])}; + buffer_cache.BindComputeTextureBuffer(texbuf_index, image_view.GpuAddr(), + image_view.BufferSize(), image_view.format, + is_written, is_image); + ++texbuf_index; + } + }}; + std::ranges::for_each(info.texture_buffer_descriptors, add_buffer); + std::ranges::for_each(info.image_buffer_descriptors, add_buffer); + + buffer_cache.UpdateComputeBuffers(); + + buffer_cache.runtime.SetEnableStorageBuffers(use_storage_buffers); + buffer_cache.runtime.SetImagePointers(textures.data(), images.data()); + buffer_cache.BindHostComputeBuffers(); + + const ImageId* views_it{image_view_ids.data() + num_texture_buffers + num_image_buffers}; + texture_binding += num_texture_buffers; + image_binding += num_image_buffers; + + for (const auto& desc : info.texture_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + ImageView& image_view{texture_cache.GetImageView(*(views_it++))}; + textures[texture_binding++] = image_view.Handle(desc.type); + } + } + for (const auto& desc : info.image_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + ImageView& image_view{texture_cache.GetImageView(*(views_it++))}; + if (desc.is_written) { + texture_cache.MarkModification(image_view.image_id); + } + images[image_binding++] = image_view.StorageView(desc.type, desc.format); + } + } + if (texture_binding != 0) { + ASSERT(texture_binding == sampler_binding); + glBindTextures(0, texture_binding, textures.data()); + glBindSamplers(0, sampler_binding, samplers.data()); + } + if (image_binding != 0) { + glBindImageTextures(0, image_binding, images.data()); + } +} + +} // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_compute_pipeline.h b/src/video_core/renderer_opengl/gl_compute_pipeline.h new file mode 100755 index 000000000..50c676365 --- /dev/null +++ b/src/video_core/renderer_opengl/gl_compute_pipeline.h @@ -0,0 +1,93 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include + +#include "common/common_types.h" +#include "shader_recompiler/shader_info.h" +#include "video_core/renderer_opengl/gl_buffer_cache.h" +#include "video_core/renderer_opengl/gl_resource_manager.h" +#include "video_core/renderer_opengl/gl_texture_cache.h" + +namespace Tegra { +class MemoryManager; +} + +namespace Tegra::Engines { +class KeplerCompute; +} + +namespace Shader { +struct Info; +} + +namespace OpenGL { + +class Device; +class ProgramManager; + +struct ComputePipelineKey { + u64 unique_hash; + u32 shared_memory_size; + std::array workgroup_size; + + size_t Hash() const noexcept; + + bool operator==(const ComputePipelineKey&) const noexcept; + + bool operator!=(const ComputePipelineKey& rhs) const noexcept { + return !operator==(rhs); + } +}; +static_assert(std::has_unique_object_representations_v); +static_assert(std::is_trivially_copyable_v); +static_assert(std::is_trivially_constructible_v); + +class ComputePipeline { +public: + explicit ComputePipeline(const Device& device, TextureCache& texture_cache_, + BufferCache& buffer_cache_, Tegra::MemoryManager& gpu_memory_, + Tegra::Engines::KeplerCompute& kepler_compute_, + ProgramManager& program_manager_, const Shader::Info& info_, + std::string code, std::vector code_v); + + void Configure(); + + [[nodiscard]] bool WritesGlobalMemory() const noexcept { + return writes_global_memory; + } + +private: + TextureCache& texture_cache; + BufferCache& buffer_cache; + Tegra::MemoryManager& gpu_memory; + Tegra::Engines::KeplerCompute& kepler_compute; + ProgramManager& program_manager; + + Shader::Info info; + OGLProgram source_program; + OGLAssemblyProgram assembly_program; + VideoCommon::ComputeUniformBufferSizes uniform_buffer_sizes{}; + + u32 num_texture_buffers{}; + u32 num_image_buffers{}; + + bool use_storage_buffers{}; + bool writes_global_memory{}; +}; + +} // namespace OpenGL + +namespace std { +template <> +struct hash { + size_t operator()(const OpenGL::ComputePipelineKey& k) const noexcept { + return k.Hash(); + } +}; +} // namespace std diff --git a/src/video_core/renderer_opengl/gl_device.cpp b/src/video_core/renderer_opengl/gl_device.cpp index 3b00614e7..99f8769fc 100755 --- a/src/video_core/renderer_opengl/gl_device.cpp +++ b/src/video_core/renderer_opengl/gl_device.cpp @@ -17,39 +17,17 @@ #include "common/logging/log.h" #include "common/scope_exit.h" #include "common/settings.h" +#include "shader_recompiler/stage.h" #include "video_core/renderer_opengl/gl_device.h" #include "video_core/renderer_opengl/gl_resource_manager.h" namespace OpenGL { namespace { -// One uniform block is reserved for emulation purposes -constexpr u32 ReservedUniformBlocks = 1; - -constexpr u32 NumStages = 5; - constexpr std::array LIMIT_UBOS = { GL_MAX_VERTEX_UNIFORM_BLOCKS, GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS, GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS, GL_MAX_GEOMETRY_UNIFORM_BLOCKS, GL_MAX_FRAGMENT_UNIFORM_BLOCKS, GL_MAX_COMPUTE_UNIFORM_BLOCKS, }; -constexpr std::array LIMIT_SSBOS = { - GL_MAX_VERTEX_SHADER_STORAGE_BLOCKS, GL_MAX_TESS_CONTROL_SHADER_STORAGE_BLOCKS, - GL_MAX_TESS_EVALUATION_SHADER_STORAGE_BLOCKS, GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS, - GL_MAX_FRAGMENT_SHADER_STORAGE_BLOCKS, GL_MAX_COMPUTE_SHADER_STORAGE_BLOCKS, -}; -constexpr std::array LIMIT_SAMPLERS = { - GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS, - GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS, - GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS, - GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS, - GL_MAX_TEXTURE_IMAGE_UNITS, - GL_MAX_COMPUTE_TEXTURE_IMAGE_UNITS, -}; -constexpr std::array LIMIT_IMAGES = { - GL_MAX_VERTEX_IMAGE_UNIFORMS, GL_MAX_TESS_CONTROL_IMAGE_UNIFORMS, - GL_MAX_TESS_EVALUATION_IMAGE_UNIFORMS, GL_MAX_GEOMETRY_IMAGE_UNIFORMS, - GL_MAX_FRAGMENT_IMAGE_UNIFORMS, GL_MAX_COMPUTE_IMAGE_UNIFORMS, -}; template T GetInteger(GLenum pname) { @@ -82,81 +60,18 @@ bool HasExtension(std::span extensions, std::string_view return std::ranges::find(extensions, extension) != extensions.end(); } -u32 Extract(u32& base, u32& num, u32 amount, std::optional limit = {}) { - ASSERT(num >= amount); - if (limit) { - amount = std::min(amount, GetInteger(*limit)); - } - num -= amount; - return std::exchange(base, base + amount); -} - -std::array BuildMaxUniformBuffers() noexcept { - std::array max; - std::ranges::transform(LIMIT_UBOS, max.begin(), - [](GLenum pname) { return GetInteger(pname); }); +std::array BuildMaxUniformBuffers() noexcept { + std::array max; + std::ranges::transform(LIMIT_UBOS, max.begin(), &GetInteger); return max; } -std::array BuildBaseBindings() noexcept { - std::array bindings; - - static constexpr std::array stage_swizzle{0, 1, 2, 3, 4}; - const u32 total_ubos = GetInteger(GL_MAX_UNIFORM_BUFFER_BINDINGS); - const u32 total_ssbos = GetInteger(GL_MAX_SHADER_STORAGE_BUFFER_BINDINGS); - const u32 total_samplers = GetInteger(GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS); - - u32 num_ubos = total_ubos - ReservedUniformBlocks; - u32 num_ssbos = total_ssbos; - u32 num_samplers = total_samplers; - - u32 base_ubo = ReservedUniformBlocks; - u32 base_ssbo = 0; - u32 base_samplers = 0; - - for (std::size_t i = 0; i < NumStages; ++i) { - const std::size_t stage = stage_swizzle[i]; - bindings[stage] = { - Extract(base_ubo, num_ubos, total_ubos / NumStages, LIMIT_UBOS[stage]), - Extract(base_ssbo, num_ssbos, total_ssbos / NumStages, LIMIT_SSBOS[stage]), - Extract(base_samplers, num_samplers, total_samplers / NumStages, - LIMIT_SAMPLERS[stage])}; - } - - u32 num_images = GetInteger(GL_MAX_IMAGE_UNITS); - u32 base_images = 0; - - // GL_MAX_IMAGE_UNITS is guaranteed by the spec to have a minimum value of 8. - // Due to the limitation of GL_MAX_IMAGE_UNITS, reserve at least 4 image bindings on the - // fragment stage, and at least 1 for the rest of the stages. - // So far games are observed to use 1 image binding on vertex and 4 on fragment stages. - - // Reserve at least 4 image bindings on the fragment stage. - bindings[4].image = - Extract(base_images, num_images, std::max(4U, num_images / NumStages), LIMIT_IMAGES[4]); - - // This is guaranteed to be at least 1. - const u32 total_extracted_images = num_images / (NumStages - 1); - - // Reserve the other image bindings. - for (std::size_t i = 0; i < NumStages; ++i) { - const std::size_t stage = stage_swizzle[i]; - if (stage == 4) { - continue; - } - bindings[stage].image = - Extract(base_images, num_images, total_extracted_images, LIMIT_IMAGES[stage]); - } - - // Compute doesn't care about any of this. - bindings[5] = {0, 0, 0, 0}; - - return bindings; -} - bool IsASTCSupported() { - static constexpr std::array targets = {GL_TEXTURE_2D, GL_TEXTURE_2D_ARRAY}; - static constexpr std::array formats = { + static constexpr std::array targets{ + GL_TEXTURE_2D, + GL_TEXTURE_2D_ARRAY, + }; + static constexpr std::array formats{ GL_COMPRESSED_RGBA_ASTC_4x4_KHR, GL_COMPRESSED_RGBA_ASTC_5x4_KHR, GL_COMPRESSED_RGBA_ASTC_5x5_KHR, GL_COMPRESSED_RGBA_ASTC_6x5_KHR, GL_COMPRESSED_RGBA_ASTC_6x6_KHR, GL_COMPRESSED_RGBA_ASTC_8x5_KHR, @@ -172,11 +87,10 @@ bool IsASTCSupported() { GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR, GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR, GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR, GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR, }; - static constexpr std::array required_support = { + static constexpr std::array required_support{ GL_VERTEX_TEXTURE, GL_TESS_CONTROL_TEXTURE, GL_TESS_EVALUATION_TEXTURE, GL_GEOMETRY_TEXTURE, GL_FRAGMENT_TEXTURE, GL_COMPUTE_TEXTURE, }; - for (const GLenum target : targets) { for (const GLenum format : formats) { for (const GLenum support : required_support) { @@ -223,14 +137,13 @@ Device::Device() { "Beta driver 443.24 is known to have issues. There might be performance issues."); disable_fast_buffer_sub_data = true; } - max_uniform_buffers = BuildMaxUniformBuffers(); - base_bindings = BuildBaseBindings(); uniform_buffer_alignment = GetInteger(GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT); shader_storage_alignment = GetInteger(GL_SHADER_STORAGE_BUFFER_OFFSET_ALIGNMENT); max_vertex_attributes = GetInteger(GL_MAX_VERTEX_ATTRIBS); max_varyings = GetInteger(GL_MAX_VARYING_VECTORS); max_compute_shared_memory_size = GetInteger(GL_MAX_COMPUTE_SHARED_MEMORY_SIZE); + max_glasm_storage_buffer_blocks = GetInteger(GL_MAX_VERTEX_SHADER_STORAGE_BLOCKS); has_warp_intrinsics = GLAD_GL_NV_gpu_shader5 && GLAD_GL_NV_shader_thread_group && GLAD_GL_NV_shader_thread_shuffle; has_shader_ballot = GLAD_GL_ARB_shader_ballot; @@ -243,17 +156,31 @@ Device::Device() { has_precise_bug = TestPreciseBug(); has_broken_texture_view_formats = is_amd || (!is_linux && is_intel); has_nv_viewport_array2 = GLAD_GL_NV_viewport_array2; + has_derivative_control = GLAD_GL_ARB_derivative_control; has_vertex_buffer_unified_memory = GLAD_GL_NV_vertex_buffer_unified_memory; has_debugging_tool_attached = IsDebugToolAttached(extensions); has_depth_buffer_float = HasExtension(extensions, "GL_NV_depth_buffer_float"); + has_geometry_shader_passthrough = GLAD_GL_NV_geometry_shader_passthrough; + has_nv_gpu_shader_5 = GLAD_GL_NV_gpu_shader5; + has_shader_int64 = HasExtension(extensions, "GL_ARB_gpu_shader_int64"); + has_amd_shader_half_float = GLAD_GL_AMD_gpu_shader_half_float; + has_sparse_texture_2 = GLAD_GL_ARB_sparse_texture2; + warp_size_potentially_larger_than_guest = !is_nvidia && !is_intel; + need_fastmath_off = is_nvidia; // At the moment of writing this, only Nvidia's driver optimizes BufferSubData on exclusive // uniform buffers as "push constants" has_fast_buffer_sub_data = is_nvidia && !disable_fast_buffer_sub_data; - use_assembly_shaders = Settings::values.use_assembly_shaders.GetValue() && - GLAD_GL_NV_gpu_program5 && GLAD_GL_NV_compute_program5 && - GLAD_GL_NV_transform_feedback && GLAD_GL_NV_transform_feedback2; + use_assembly_shaders = + Settings::values.shader_backend.GetValue() == Settings::ShaderBackend::GLASM && + GLAD_GL_NV_gpu_program5 && GLAD_GL_NV_compute_program5 && GLAD_GL_NV_transform_feedback && + GLAD_GL_NV_transform_feedback2; + + shader_backend = (Settings::values.shader_backend.GetValue() == + Settings::ShaderBackend::GLASM) == use_assembly_shaders + ? Settings::values.shader_backend.GetValue() + : Settings::ShaderBackend::GLSL; // Blocks AMD and Intel OpenGL drivers on Windows from using asynchronous shader compilation. use_asynchronous_shaders = Settings::values.use_asynchronous_shaders.GetValue() && @@ -266,7 +193,7 @@ Device::Device() { LOG_INFO(Render_OpenGL, "Renderer_BrokenTextureViewFormats: {}", has_broken_texture_view_formats); - if (Settings::values.use_assembly_shaders.GetValue() && !use_assembly_shaders) { + if (shader_backend == Settings::ShaderBackend::GLASM && !use_assembly_shaders) { LOG_ERROR(Render_OpenGL, "Assembly shaders enabled but not supported"); } @@ -325,22 +252,6 @@ std::string Device::GetVendorName() const { return vendor_name; } -Device::Device(std::nullptr_t) { - max_uniform_buffers.fill(std::numeric_limits::max()); - uniform_buffer_alignment = 4; - shader_storage_alignment = 4; - max_vertex_attributes = 16; - max_varyings = 15; - max_compute_shared_memory_size = 0x10000; - has_warp_intrinsics = true; - has_shader_ballot = true; - has_vertex_viewport_layer = true; - has_image_load_formatted = true; - has_texture_shadow_lod = true; - has_variable_aoffi = true; - has_depth_buffer_float = true; -} - bool Device::TestVariableAoffi() { return TestProgram(R"(#version 430 core // This is a unit test, please ignore me on apitrace bug reports. diff --git a/src/video_core/renderer_opengl/gl_device.h b/src/video_core/renderer_opengl/gl_device.h index 2c2b13767..ee992aed4 100755 --- a/src/video_core/renderer_opengl/gl_device.h +++ b/src/video_core/renderer_opengl/gl_device.h @@ -6,34 +6,22 @@ #include #include "common/common_types.h" -#include "video_core/engines/shader_type.h" +#include "shader_recompiler/stage.h" + +namespace Settings { +enum class ShaderBackend : u32; +}; namespace OpenGL { class Device { public: - struct BaseBindings { - u32 uniform_buffer{}; - u32 shader_storage_buffer{}; - u32 sampler{}; - u32 image{}; - }; - explicit Device(); - explicit Device(std::nullptr_t); [[nodiscard]] std::string GetVendorName() const; - u32 GetMaxUniformBuffers(Tegra::Engines::ShaderType shader_type) const noexcept { - return max_uniform_buffers[static_cast(shader_type)]; - } - - const BaseBindings& GetBaseBindings(std::size_t stage_index) const noexcept { - return base_bindings[stage_index]; - } - - const BaseBindings& GetBaseBindings(Tegra::Engines::ShaderType shader_type) const noexcept { - return GetBaseBindings(static_cast(shader_type)); + u32 GetMaxUniformBuffers(Shader::Stage stage) const noexcept { + return max_uniform_buffers[static_cast(stage)]; } size_t GetUniformBufferAlignment() const { @@ -56,6 +44,10 @@ public: return max_compute_shared_memory_size; } + u32 GetMaxGLASMStorageBufferBlocks() const { + return max_glasm_storage_buffer_blocks; + } + bool HasWarpIntrinsics() const { return has_warp_intrinsics; } @@ -108,6 +100,10 @@ public: return has_nv_viewport_array2; } + bool HasDerivativeControl() const { + return has_derivative_control; + } + bool HasDebuggingToolAttached() const { return has_debugging_tool_attached; } @@ -128,18 +124,52 @@ public: return has_depth_buffer_float; } + bool HasGeometryShaderPassthrough() const { + return has_geometry_shader_passthrough; + } + + bool HasNvGpuShader5() const { + return has_nv_gpu_shader_5; + } + + bool HasShaderInt64() const { + return has_shader_int64; + } + + bool HasAmdShaderHalfFloat() const { + return has_amd_shader_half_float; + } + + bool HasSparseTexture2() const { + return has_sparse_texture_2; + } + + bool IsWarpSizePotentiallyLargerThanGuest() const { + return warp_size_potentially_larger_than_guest; + } + + bool NeedsFastmathOff() const { + return need_fastmath_off; + } + + Settings::ShaderBackend GetShaderBackend() const { + return shader_backend; + } + private: static bool TestVariableAoffi(); static bool TestPreciseBug(); - std::string vendor_name; - std::array max_uniform_buffers{}; - std::array base_bindings{}; + std::array max_uniform_buffers{}; size_t uniform_buffer_alignment{}; size_t shader_storage_alignment{}; u32 max_vertex_attributes{}; u32 max_varyings{}; u32 max_compute_shared_memory_size{}; + u32 max_glasm_storage_buffer_blocks{}; + + Settings::ShaderBackend shader_backend{}; + bool has_warp_intrinsics{}; bool has_shader_ballot{}; bool has_vertex_viewport_layer{}; @@ -153,11 +183,21 @@ private: bool has_broken_texture_view_formats{}; bool has_fast_buffer_sub_data{}; bool has_nv_viewport_array2{}; + bool has_derivative_control{}; bool has_debugging_tool_attached{}; bool use_assembly_shaders{}; bool use_asynchronous_shaders{}; bool use_driver_cache{}; bool has_depth_buffer_float{}; + bool has_geometry_shader_passthrough{}; + bool has_nv_gpu_shader_5{}; + bool has_shader_int64{}; + bool has_amd_shader_half_float{}; + bool has_sparse_texture_2{}; + bool warp_size_potentially_larger_than_guest{}; + bool need_fastmath_off{}; + + std::string vendor_name; }; } // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_graphics_pipeline.cpp b/src/video_core/renderer_opengl/gl_graphics_pipeline.cpp new file mode 100755 index 000000000..1f19b5825 --- /dev/null +++ b/src/video_core/renderer_opengl/gl_graphics_pipeline.cpp @@ -0,0 +1,560 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include + +#include "common/settings.h" // for enum class Settings::ShaderBackend +#include "common/thread_worker.h" +#include "shader_recompiler/shader_info.h" +#include "video_core/renderer_opengl/gl_graphics_pipeline.h" +#include "video_core/renderer_opengl/gl_shader_manager.h" +#include "video_core/renderer_opengl/gl_shader_util.h" +#include "video_core/renderer_opengl/gl_state_tracker.h" +#include "video_core/shader_notify.h" +#include "video_core/texture_cache/texture_cache.h" + +#if defined(_MSC_VER) && defined(NDEBUG) +#define LAMBDA_FORCEINLINE [[msvc::forceinline]] +#else +#define LAMBDA_FORCEINLINE +#endif + +namespace OpenGL { +namespace { +using Shader::ImageBufferDescriptor; +using Shader::ImageDescriptor; +using Shader::TextureBufferDescriptor; +using Shader::TextureDescriptor; +using Tegra::Texture::TexturePair; +using VideoCommon::ImageId; + +constexpr u32 MAX_TEXTURES = 64; +constexpr u32 MAX_IMAGES = 8; + +template +u32 AccumulateCount(const Range& range) { + u32 num{}; + for (const auto& desc : range) { + num += desc.count; + } + return num; +} + +GLenum Stage(size_t stage_index) { + switch (stage_index) { + case 0: + return GL_VERTEX_SHADER; + case 1: + return GL_TESS_CONTROL_SHADER; + case 2: + return GL_TESS_EVALUATION_SHADER; + case 3: + return GL_GEOMETRY_SHADER; + case 4: + return GL_FRAGMENT_SHADER; + } + UNREACHABLE_MSG("{}", stage_index); + return GL_NONE; +} + +GLenum AssemblyStage(size_t stage_index) { + switch (stage_index) { + case 0: + return GL_VERTEX_PROGRAM_NV; + case 1: + return GL_TESS_CONTROL_PROGRAM_NV; + case 2: + return GL_TESS_EVALUATION_PROGRAM_NV; + case 3: + return GL_GEOMETRY_PROGRAM_NV; + case 4: + return GL_FRAGMENT_PROGRAM_NV; + } + UNREACHABLE_MSG("{}", stage_index); + return GL_NONE; +} + +/// Translates hardware transform feedback indices +/// @param location Hardware location +/// @return Pair of ARB_transform_feedback3 token stream first and third arguments +/// @note Read https://www.khronos.org/registry/OpenGL/extensions/ARB/ARB_transform_feedback3.txt +std::pair TransformFeedbackEnum(u8 location) { + const u8 index = location / 4; + if (index >= 8 && index <= 39) { + return {GL_GENERIC_ATTRIB_NV, index - 8}; + } + if (index >= 48 && index <= 55) { + return {GL_TEXTURE_COORD_NV, index - 48}; + } + switch (index) { + case 7: + return {GL_POSITION, 0}; + case 40: + return {GL_PRIMARY_COLOR_NV, 0}; + case 41: + return {GL_SECONDARY_COLOR_NV, 0}; + case 42: + return {GL_BACK_PRIMARY_COLOR_NV, 0}; + case 43: + return {GL_BACK_SECONDARY_COLOR_NV, 0}; + } + UNIMPLEMENTED_MSG("index={}", index); + return {GL_POSITION, 0}; +} + +template +bool Passes(const std::array& stage_infos, u32 enabled_mask) { + for (size_t stage = 0; stage < stage_infos.size(); ++stage) { + if (!Spec::enabled_stages[stage] && ((enabled_mask >> stage) & 1) != 0) { + return false; + } + const auto& info{stage_infos[stage]}; + if constexpr (!Spec::has_storage_buffers) { + if (!info.storage_buffers_descriptors.empty()) { + return false; + } + } + if constexpr (!Spec::has_texture_buffers) { + if (!info.texture_buffer_descriptors.empty()) { + return false; + } + } + if constexpr (!Spec::has_image_buffers) { + if (!info.image_buffer_descriptors.empty()) { + return false; + } + } + if constexpr (!Spec::has_images) { + if (!info.image_descriptors.empty()) { + return false; + } + } + } + return true; +} + +using ConfigureFuncPtr = void (*)(GraphicsPipeline*, bool); + +template +ConfigureFuncPtr FindSpec(const std::array& stage_infos, u32 enabled_mask) { + if constexpr (sizeof...(Specs) > 0) { + if (!Passes(stage_infos, enabled_mask)) { + return FindSpec(stage_infos, enabled_mask); + } + } + return GraphicsPipeline::MakeConfigureSpecFunc(); +} + +struct SimpleVertexFragmentSpec { + static constexpr std::array enabled_stages{true, false, false, false, true}; + static constexpr bool has_storage_buffers = false; + static constexpr bool has_texture_buffers = false; + static constexpr bool has_image_buffers = false; + static constexpr bool has_images = false; +}; + +struct SimpleVertexSpec { + static constexpr std::array enabled_stages{true, false, false, false, false}; + static constexpr bool has_storage_buffers = false; + static constexpr bool has_texture_buffers = false; + static constexpr bool has_image_buffers = false; + static constexpr bool has_images = false; +}; + +struct DefaultSpec { + static constexpr std::array enabled_stages{true, true, true, true, true}; + static constexpr bool has_storage_buffers = true; + static constexpr bool has_texture_buffers = true; + static constexpr bool has_image_buffers = true; + static constexpr bool has_images = true; +}; + +ConfigureFuncPtr ConfigureFunc(const std::array& infos, u32 enabled_mask) { + return FindSpec(infos, enabled_mask); +} +} // Anonymous namespace + +GraphicsPipeline::GraphicsPipeline( + const Device& device, TextureCache& texture_cache_, BufferCache& buffer_cache_, + Tegra::MemoryManager& gpu_memory_, Tegra::Engines::Maxwell3D& maxwell3d_, + ProgramManager& program_manager_, StateTracker& state_tracker_, ShaderWorker* thread_worker, + VideoCore::ShaderNotify* shader_notify, std::array sources, + std::array, 5> sources_spirv, const std::array& infos, + const GraphicsPipelineKey& key_) + : texture_cache{texture_cache_}, buffer_cache{buffer_cache_}, + gpu_memory{gpu_memory_}, maxwell3d{maxwell3d_}, program_manager{program_manager_}, + state_tracker{state_tracker_}, key{key_} { + if (shader_notify) { + shader_notify->MarkShaderBuilding(); + } + u32 num_textures{}; + u32 num_images{}; + u32 num_storage_buffers{}; + for (size_t stage = 0; stage < base_uniform_bindings.size(); ++stage) { + auto& info{stage_infos[stage]}; + if (infos[stage]) { + info = *infos[stage]; + enabled_stages_mask |= 1u << stage; + } + if (stage < 4) { + base_uniform_bindings[stage + 1] = base_uniform_bindings[stage]; + base_storage_bindings[stage + 1] = base_storage_bindings[stage]; + + base_uniform_bindings[stage + 1] += AccumulateCount(info.constant_buffer_descriptors); + base_storage_bindings[stage + 1] += AccumulateCount(info.storage_buffers_descriptors); + } + enabled_uniform_buffer_masks[stage] = info.constant_buffer_mask; + std::ranges::copy(info.constant_buffer_used_sizes, uniform_buffer_sizes[stage].begin()); + + const u32 num_tex_buffer_bindings{AccumulateCount(info.texture_buffer_descriptors)}; + num_texture_buffers[stage] += num_tex_buffer_bindings; + num_textures += num_tex_buffer_bindings; + + const u32 num_img_buffers_bindings{AccumulateCount(info.image_buffer_descriptors)}; + num_image_buffers[stage] += num_img_buffers_bindings; + num_images += num_img_buffers_bindings; + + num_textures += AccumulateCount(info.texture_descriptors); + num_images += AccumulateCount(info.image_descriptors); + num_storage_buffers += AccumulateCount(info.storage_buffers_descriptors); + + writes_global_memory |= std::ranges::any_of( + info.storage_buffers_descriptors, [](const auto& desc) { return desc.is_written; }); + } + ASSERT(num_textures <= MAX_TEXTURES); + ASSERT(num_images <= MAX_IMAGES); + + const bool assembly_shaders{assembly_programs[0].handle != 0}; + use_storage_buffers = + !assembly_shaders || num_storage_buffers <= device.GetMaxGLASMStorageBufferBlocks(); + writes_global_memory &= !use_storage_buffers; + configure_func = ConfigureFunc(stage_infos, enabled_stages_mask); + + if (key.xfb_enabled && device.UseAssemblyShaders()) { + GenerateTransformFeedbackState(); + } + auto func{ + [this, device, sources, sources_spirv, shader_notify](ShaderContext::Context*) mutable { + if (!device.UseAssemblyShaders()) { + program.handle = glCreateProgram(); + } + for (size_t stage = 0; stage < 5; ++stage) { + switch (device.GetShaderBackend()) { + case Settings::ShaderBackend::GLSL: { + const auto code{sources[stage]}; + if (code.empty()) { + continue; + } + AttachShader(Stage(stage), program.handle, code); + } break; + case Settings::ShaderBackend::GLASM: { + const auto code{sources[stage]}; + if (code.empty()) { + continue; + } + assembly_programs[stage] = CompileProgram(code, AssemblyStage(stage)); + } break; + case Settings::ShaderBackend::SPIRV: { + const auto code{sources_spirv[stage]}; + if (code.empty()) { + continue; + } + AttachShader(Stage(stage), program.handle, code); + } break; + } + } + if (!device.UseAssemblyShaders()) { + LinkProgram(program.handle); + } + if (shader_notify) { + shader_notify->MarkShaderComplete(); + } + is_built.store(true, std::memory_order_relaxed); + }}; + if (thread_worker) { + thread_worker->QueueWork(std::move(func)); + } else { + func(nullptr); + } +} + +template +void GraphicsPipeline::ConfigureImpl(bool is_indexed) { + std::array image_view_ids; + std::array image_view_indices; + std::array samplers; + size_t image_view_index{}; + GLsizei sampler_binding{}; + + texture_cache.SynchronizeGraphicsDescriptors(); + + buffer_cache.SetUniformBuffersState(enabled_uniform_buffer_masks, &uniform_buffer_sizes); + buffer_cache.runtime.SetBaseUniformBindings(base_uniform_bindings); + buffer_cache.runtime.SetBaseStorageBindings(base_storage_bindings); + buffer_cache.runtime.SetEnableStorageBuffers(use_storage_buffers); + + const auto& regs{maxwell3d.regs}; + const bool via_header_index{regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex}; + const auto config_stage{[&](size_t stage) LAMBDA_FORCEINLINE { + const Shader::Info& info{stage_infos[stage]}; + buffer_cache.UnbindGraphicsStorageBuffers(stage); + if constexpr (Spec::has_storage_buffers) { + size_t ssbo_index{}; + for (const auto& desc : info.storage_buffers_descriptors) { + ASSERT(desc.count == 1); + buffer_cache.BindGraphicsStorageBuffer(stage, ssbo_index, desc.cbuf_index, + desc.cbuf_offset, desc.is_written); + ++ssbo_index; + } + } + const auto& cbufs{maxwell3d.state.shader_stages[stage].const_buffers}; + const auto read_handle{[&](const auto& desc, u32 index) { + ASSERT(cbufs[desc.cbuf_index].enabled); + const u32 index_offset{index << desc.size_shift}; + const u32 offset{desc.cbuf_offset + index_offset}; + const GPUVAddr addr{cbufs[desc.cbuf_index].address + offset}; + if constexpr (std::is_same_v || + std::is_same_v) { + if (desc.has_secondary) { + ASSERT(cbufs[desc.secondary_cbuf_index].enabled); + const u32 second_offset{desc.secondary_cbuf_offset + index_offset}; + const GPUVAddr separate_addr{cbufs[desc.secondary_cbuf_index].address + + second_offset}; + const u32 lhs_raw{gpu_memory.Read(addr)}; + const u32 rhs_raw{gpu_memory.Read(separate_addr)}; + const u32 raw{lhs_raw | rhs_raw}; + return TexturePair(raw, via_header_index); + } + } + return TexturePair(gpu_memory.Read(addr), via_header_index); + }}; + const auto add_image{[&](const auto& desc) { + for (u32 index = 0; index < desc.count; ++index) { + const auto handle{read_handle(desc, index)}; + image_view_indices[image_view_index++] = handle.first; + } + }}; + if constexpr (Spec::has_texture_buffers) { + for (const auto& desc : info.texture_buffer_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + const auto handle{read_handle(desc, index)}; + image_view_indices[image_view_index++] = handle.first; + samplers[sampler_binding++] = 0; + } + } + } + if constexpr (Spec::has_image_buffers) { + for (const auto& desc : info.image_buffer_descriptors) { + add_image(desc); + } + } + for (const auto& desc : info.texture_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + const auto handle{read_handle(desc, index)}; + image_view_indices[image_view_index++] = handle.first; + + Sampler* const sampler{texture_cache.GetGraphicsSampler(handle.second)}; + samplers[sampler_binding++] = sampler->Handle(); + } + } + if constexpr (Spec::has_images) { + for (const auto& desc : info.image_descriptors) { + add_image(desc); + } + } + }}; + if constexpr (Spec::enabled_stages[0]) { + config_stage(0); + } + if constexpr (Spec::enabled_stages[1]) { + config_stage(1); + } + if constexpr (Spec::enabled_stages[2]) { + config_stage(2); + } + if constexpr (Spec::enabled_stages[3]) { + config_stage(3); + } + if constexpr (Spec::enabled_stages[4]) { + config_stage(4); + } + const std::span indices_span(image_view_indices.data(), image_view_index); + texture_cache.FillGraphicsImageViews(indices_span, image_view_ids); + + texture_cache.UpdateRenderTargets(false); + state_tracker.BindFramebuffer(texture_cache.GetFramebuffer()->Handle()); + + ImageId* texture_buffer_index{image_view_ids.data()}; + const auto bind_stage_info{[&](size_t stage) LAMBDA_FORCEINLINE { + size_t index{}; + const auto add_buffer{[&](const auto& desc) { + constexpr bool is_image = std::is_same_v; + for (u32 i = 0; i < desc.count; ++i) { + bool is_written{false}; + if constexpr (is_image) { + is_written = desc.is_written; + } + ImageView& image_view{texture_cache.GetImageView(*texture_buffer_index)}; + buffer_cache.BindGraphicsTextureBuffer(stage, index, image_view.GpuAddr(), + image_view.BufferSize(), image_view.format, + is_written, is_image); + ++index; + ++texture_buffer_index; + } + }}; + const Shader::Info& info{stage_infos[stage]}; + buffer_cache.UnbindGraphicsTextureBuffers(stage); + + if constexpr (Spec::has_texture_buffers) { + for (const auto& desc : info.texture_buffer_descriptors) { + add_buffer(desc); + } + } + if constexpr (Spec::has_image_buffers) { + for (const auto& desc : info.image_buffer_descriptors) { + add_buffer(desc); + } + } + for (const auto& desc : info.texture_descriptors) { + texture_buffer_index += desc.count; + } + if constexpr (Spec::has_images) { + for (const auto& desc : info.image_descriptors) { + texture_buffer_index += desc.count; + } + } + }}; + if constexpr (Spec::enabled_stages[0]) { + bind_stage_info(0); + } + if constexpr (Spec::enabled_stages[1]) { + bind_stage_info(1); + } + if constexpr (Spec::enabled_stages[2]) { + bind_stage_info(2); + } + if constexpr (Spec::enabled_stages[3]) { + bind_stage_info(3); + } + if constexpr (Spec::enabled_stages[4]) { + bind_stage_info(4); + } + buffer_cache.UpdateGraphicsBuffers(is_indexed); + buffer_cache.BindHostGeometryBuffers(is_indexed); + + if (assembly_programs[0].handle != 0) { + program_manager.BindAssemblyPrograms(assembly_programs, enabled_stages_mask); + } else { + program_manager.BindProgram(program.handle); + } + const ImageId* views_it{image_view_ids.data()}; + GLsizei texture_binding = 0; + GLsizei image_binding = 0; + std::array textures; + std::array images; + const auto prepare_stage{[&](size_t stage) { + buffer_cache.runtime.SetImagePointers(&textures[texture_binding], &images[image_binding]); + buffer_cache.BindHostStageBuffers(stage); + + texture_binding += num_texture_buffers[stage]; + image_binding += num_image_buffers[stage]; + + views_it += num_texture_buffers[stage]; + views_it += num_image_buffers[stage]; + + const auto& info{stage_infos[stage]}; + for (const auto& desc : info.texture_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + ImageView& image_view{texture_cache.GetImageView(*(views_it++))}; + textures[texture_binding++] = image_view.Handle(desc.type); + } + } + for (const auto& desc : info.image_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + ImageView& image_view{texture_cache.GetImageView(*(views_it++))}; + if (desc.is_written) { + texture_cache.MarkModification(image_view.image_id); + } + images[image_binding++] = image_view.StorageView(desc.type, desc.format); + } + } + }}; + if constexpr (Spec::enabled_stages[0]) { + prepare_stage(0); + } + if constexpr (Spec::enabled_stages[1]) { + prepare_stage(1); + } + if constexpr (Spec::enabled_stages[2]) { + prepare_stage(2); + } + if constexpr (Spec::enabled_stages[3]) { + prepare_stage(3); + } + if constexpr (Spec::enabled_stages[4]) { + prepare_stage(4); + } + if (texture_binding != 0) { + ASSERT(texture_binding == sampler_binding); + glBindTextures(0, texture_binding, textures.data()); + glBindSamplers(0, sampler_binding, samplers.data()); + } + if (image_binding != 0) { + glBindImageTextures(0, image_binding, images.data()); + } +} + +void GraphicsPipeline::ConfigureTransformFeedbackImpl() const { + glTransformFeedbackStreamAttribsNV(num_xfb_attribs, xfb_attribs.data(), num_xfb_strides, + xfb_streams.data(), GL_INTERLEAVED_ATTRIBS); +} + +void GraphicsPipeline::GenerateTransformFeedbackState() { + // TODO(Rodrigo): Inject SKIP_COMPONENTS*_NV when required. An unimplemented message will signal + // when this is required. + GLint* cursor{xfb_attribs.data()}; + GLint* current_stream{xfb_streams.data()}; + + for (size_t feedback = 0; feedback < Maxwell::NumTransformFeedbackBuffers; ++feedback) { + const auto& layout = key.xfb_state.layouts[feedback]; + UNIMPLEMENTED_IF_MSG(layout.stride != layout.varying_count * 4, "Stride padding"); + if (layout.varying_count == 0) { + continue; + } + *current_stream = static_cast(feedback); + if (current_stream != xfb_streams.data()) { + // When stepping one stream, push the expected token + cursor[0] = GL_NEXT_BUFFER_NV; + cursor[1] = 0; + cursor[2] = 0; + cursor += XFB_ENTRY_STRIDE; + } + ++current_stream; + + const auto& locations = key.xfb_state.varyings[feedback]; + std::optional current_index; + for (u32 offset = 0; offset < layout.varying_count; ++offset) { + const u8 location = locations[offset]; + const u8 index = location / 4; + + if (current_index == index) { + // Increase number of components of the previous attachment + ++cursor[-2]; + continue; + } + current_index = index; + + std::tie(cursor[0], cursor[2]) = TransformFeedbackEnum(location); + cursor[1] = 1; + cursor += XFB_ENTRY_STRIDE; + } + } + num_xfb_attribs = static_cast((cursor - xfb_attribs.data()) / XFB_ENTRY_STRIDE); + num_xfb_strides = static_cast(current_stream - xfb_streams.data()); +} + +} // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_graphics_pipeline.h b/src/video_core/renderer_opengl/gl_graphics_pipeline.h new file mode 100755 index 000000000..5f5d57385 --- /dev/null +++ b/src/video_core/renderer_opengl/gl_graphics_pipeline.h @@ -0,0 +1,164 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include +#include + +#include "common/bit_field.h" +#include "common/cityhash.h" +#include "common/common_types.h" +#include "shader_recompiler/shader_info.h" +#include "video_core/engines/maxwell_3d.h" +#include "video_core/memory_manager.h" +#include "video_core/renderer_opengl/gl_buffer_cache.h" +#include "video_core/renderer_opengl/gl_resource_manager.h" +#include "video_core/renderer_opengl/gl_texture_cache.h" +#include "video_core/transform_feedback.h" + +namespace OpenGL { + +namespace ShaderContext { +struct Context; +} + +class Device; +class ProgramManager; + +using Maxwell = Tegra::Engines::Maxwell3D::Regs; +using ShaderWorker = Common::StatefulThreadWorker; + +struct GraphicsPipelineKey { + std::array unique_hashes; + union { + u32 raw; + BitField<0, 1, u32> xfb_enabled; + BitField<1, 1, u32> early_z; + BitField<2, 4, Maxwell::PrimitiveTopology> gs_input_topology; + BitField<6, 2, Maxwell::TessellationPrimitive> tessellation_primitive; + BitField<8, 2, Maxwell::TessellationSpacing> tessellation_spacing; + BitField<10, 1, u32> tessellation_clockwise; + }; + std::array padding; + VideoCommon::TransformFeedbackState xfb_state; + + size_t Hash() const noexcept { + return static_cast(Common::CityHash64(reinterpret_cast(this), Size())); + } + + bool operator==(const GraphicsPipelineKey& rhs) const noexcept { + return std::memcmp(this, &rhs, Size()) == 0; + } + + bool operator!=(const GraphicsPipelineKey& rhs) const noexcept { + return !operator==(rhs); + } + + [[nodiscard]] size_t Size() const noexcept { + if (xfb_enabled != 0) { + return sizeof(GraphicsPipelineKey); + } else { + return offsetof(GraphicsPipelineKey, padding); + } + } +}; +static_assert(std::has_unique_object_representations_v); +static_assert(std::is_trivially_copyable_v); +static_assert(std::is_trivially_constructible_v); + +class GraphicsPipeline { +public: + explicit GraphicsPipeline(const Device& device, TextureCache& texture_cache_, + BufferCache& buffer_cache_, Tegra::MemoryManager& gpu_memory_, + Tegra::Engines::Maxwell3D& maxwell3d_, + ProgramManager& program_manager_, StateTracker& state_tracker_, + ShaderWorker* thread_worker, VideoCore::ShaderNotify* shader_notify, + std::array sources, + std::array, 5> sources_spirv, + const std::array& infos, + const GraphicsPipelineKey& key_); + + void Configure(bool is_indexed) { + configure_func(this, is_indexed); + } + + void ConfigureTransformFeedback() const { + if (num_xfb_attribs != 0) { + ConfigureTransformFeedbackImpl(); + } + } + + [[nodiscard]] const GraphicsPipelineKey& Key() const noexcept { + return key; + } + + [[nodiscard]] bool WritesGlobalMemory() const noexcept { + return writes_global_memory; + } + + [[nodiscard]] bool IsBuilt() const noexcept { + return is_built.load(std::memory_order::relaxed); + } + + template + static auto MakeConfigureSpecFunc() { + return [](GraphicsPipeline* pipeline, bool is_indexed) { + pipeline->ConfigureImpl(is_indexed); + }; + } + +private: + template + void ConfigureImpl(bool is_indexed); + + void ConfigureTransformFeedbackImpl() const; + + void GenerateTransformFeedbackState(); + + TextureCache& texture_cache; + BufferCache& buffer_cache; + Tegra::MemoryManager& gpu_memory; + Tegra::Engines::Maxwell3D& maxwell3d; + ProgramManager& program_manager; + StateTracker& state_tracker; + const GraphicsPipelineKey key; + + void (*configure_func)(GraphicsPipeline*, bool){}; + + OGLProgram program; + std::array assembly_programs; + u32 enabled_stages_mask{}; + + std::array stage_infos{}; + std::array enabled_uniform_buffer_masks{}; + VideoCommon::UniformBufferSizes uniform_buffer_sizes{}; + std::array base_uniform_bindings{}; + std::array base_storage_bindings{}; + std::array num_texture_buffers{}; + std::array num_image_buffers{}; + + bool use_storage_buffers{}; + bool writes_global_memory{}; + std::atomic_bool is_built{false}; + + static constexpr std::size_t XFB_ENTRY_STRIDE = 3; + GLsizei num_xfb_attribs{}; + GLsizei num_xfb_strides{}; + std::array xfb_attribs{}; + std::array xfb_streams{}; +}; + +} // namespace OpenGL + +namespace std { +template <> +struct hash { + size_t operator()(const OpenGL::GraphicsPipelineKey& k) const noexcept { + return k.Hash(); + } +}; +} // namespace std diff --git a/src/video_core/renderer_opengl/gl_rasterizer.cpp b/src/video_core/renderer_opengl/gl_rasterizer.cpp index 183861a23..da492d773 100755 --- a/src/video_core/renderer_opengl/gl_rasterizer.cpp +++ b/src/video_core/renderer_opengl/gl_rasterizer.cpp @@ -23,7 +23,6 @@ #include "core/memory.h" #include "video_core/engines/kepler_compute.h" #include "video_core/engines/maxwell_3d.h" -#include "video_core/engines/shader_type.h" #include "video_core/memory_manager.h" #include "video_core/renderer_opengl/gl_device.h" #include "video_core/renderer_opengl/gl_query_cache.h" @@ -40,7 +39,6 @@ namespace OpenGL { using Maxwell = Tegra::Engines::Maxwell3D::Regs; using GLvec4 = std::array; -using Tegra::Engines::ShaderType; using VideoCore::Surface::PixelFormat; using VideoCore::Surface::SurfaceTarget; using VideoCore::Surface::SurfaceType; @@ -51,112 +49,11 @@ MICROPROFILE_DEFINE(OpenGL_Blits, "OpenGL", "Blits", MP_RGB(128, 128, 192)); MICROPROFILE_DEFINE(OpenGL_CacheManagement, "OpenGL", "Cache Management", MP_RGB(100, 255, 100)); namespace { - constexpr size_t NUM_SUPPORTED_VERTEX_ATTRIBUTES = 16; -struct TextureHandle { - constexpr TextureHandle(u32 data, bool via_header_index) { - const Tegra::Texture::TextureHandle handle{data}; - image = handle.tic_id; - sampler = via_header_index ? image : handle.tsc_id.Value(); - } - - u32 image; - u32 sampler; -}; - -template -TextureHandle GetTextureInfo(const Engine& engine, bool via_header_index, const Entry& entry, - ShaderType shader_type, size_t index = 0) { - if constexpr (std::is_same_v) { - if (entry.is_separated) { - const u32 buffer_1 = entry.buffer; - const u32 buffer_2 = entry.secondary_buffer; - const u32 offset_1 = entry.offset; - const u32 offset_2 = entry.secondary_offset; - const u32 handle_1 = engine.AccessConstBuffer32(shader_type, buffer_1, offset_1); - const u32 handle_2 = engine.AccessConstBuffer32(shader_type, buffer_2, offset_2); - return TextureHandle(handle_1 | handle_2, via_header_index); - } - } - if (entry.is_bindless) { - const u32 raw = engine.AccessConstBuffer32(shader_type, entry.buffer, entry.offset); - return TextureHandle(raw, via_header_index); - } - const u32 buffer = engine.GetBoundBuffer(); - const u64 offset = (entry.offset + index) * sizeof(u32); - return TextureHandle(engine.AccessConstBuffer32(shader_type, buffer, offset), via_header_index); -} - -/// Translates hardware transform feedback indices -/// @param location Hardware location -/// @return Pair of ARB_transform_feedback3 token stream first and third arguments -/// @note Read https://www.khronos.org/registry/OpenGL/extensions/ARB/ARB_transform_feedback3.txt -std::pair TransformFeedbackEnum(u8 location) { - const u8 index = location / 4; - if (index >= 8 && index <= 39) { - return {GL_GENERIC_ATTRIB_NV, index - 8}; - } - if (index >= 48 && index <= 55) { - return {GL_TEXTURE_COORD_NV, index - 48}; - } - switch (index) { - case 7: - return {GL_POSITION, 0}; - case 40: - return {GL_PRIMARY_COLOR_NV, 0}; - case 41: - return {GL_SECONDARY_COLOR_NV, 0}; - case 42: - return {GL_BACK_PRIMARY_COLOR_NV, 0}; - case 43: - return {GL_BACK_SECONDARY_COLOR_NV, 0}; - } - UNIMPLEMENTED_MSG("index={}", index); - return {GL_POSITION, 0}; -} - void oglEnable(GLenum cap, bool state) { (state ? glEnable : glDisable)(cap); } - -ImageViewType ImageViewTypeFromEntry(const SamplerEntry& entry) { - if (entry.is_buffer) { - return ImageViewType::Buffer; - } - switch (entry.type) { - case Tegra::Shader::TextureType::Texture1D: - return entry.is_array ? ImageViewType::e1DArray : ImageViewType::e1D; - case Tegra::Shader::TextureType::Texture2D: - return entry.is_array ? ImageViewType::e2DArray : ImageViewType::e2D; - case Tegra::Shader::TextureType::Texture3D: - return ImageViewType::e3D; - case Tegra::Shader::TextureType::TextureCube: - return entry.is_array ? ImageViewType::CubeArray : ImageViewType::Cube; - } - UNREACHABLE(); - return ImageViewType::e2D; -} - -ImageViewType ImageViewTypeFromEntry(const ImageEntry& entry) { - switch (entry.type) { - case Tegra::Shader::ImageType::Texture1D: - return ImageViewType::e1D; - case Tegra::Shader::ImageType::Texture1DArray: - return ImageViewType::e1DArray; - case Tegra::Shader::ImageType::Texture2D: - return ImageViewType::e2D; - case Tegra::Shader::ImageType::Texture2DArray: - return ImageViewType::e2DArray; - case Tegra::Shader::ImageType::Texture3D: - return ImageViewType::e3D; - case Tegra::Shader::ImageType::TextureBuffer: - return ImageViewType::Buffer; - } - UNREACHABLE(); - return ImageViewType::e2D; -} - } // Anonymous namespace RasterizerOpenGL::RasterizerOpenGL(Core::Frontend::EmuWindow& emu_window_, Tegra::GPU& gpu_, @@ -170,14 +67,10 @@ RasterizerOpenGL::RasterizerOpenGL(Core::Frontend::EmuWindow& emu_window_, Tegra texture_cache(texture_cache_runtime, *this, maxwell3d, kepler_compute, gpu_memory), buffer_cache_runtime(device), buffer_cache(*this, maxwell3d, kepler_compute, gpu_memory, cpu_memory_, buffer_cache_runtime), - shader_cache(*this, emu_window_, gpu, maxwell3d, kepler_compute, gpu_memory, device), + shader_cache(*this, emu_window_, maxwell3d, kepler_compute, gpu_memory, device, texture_cache, + buffer_cache, program_manager, state_tracker, gpu.ShaderNotify()), query_cache(*this, maxwell3d, gpu_memory), - fence_manager(*this, gpu, texture_cache, buffer_cache, query_cache), - async_shaders(emu_window_) { - if (device.UseAsynchronousShaders()) { - async_shaders.AllocateWorkers(); - } -} + fence_manager(*this, gpu, texture_cache, buffer_cache, query_cache) {} RasterizerOpenGL::~RasterizerOpenGL() = default; @@ -204,7 +97,7 @@ void RasterizerOpenGL::SyncVertexFormats() { const auto gl_index = static_cast(index); // Disable constant attributes. - if (attrib.IsConstant()) { + if (attrib.constant) { glDisableVertexAttribArray(gl_index); continue; } @@ -244,116 +137,9 @@ void RasterizerOpenGL::SyncVertexInstances() { } } -void RasterizerOpenGL::SetupShaders(bool is_indexed) { - u32 clip_distances = 0; - - std::array shaders{}; - image_view_indices.clear(); - sampler_handles.clear(); - - texture_cache.SynchronizeGraphicsDescriptors(); - - for (std::size_t index = 0; index < Maxwell::MaxShaderProgram; ++index) { - const auto& shader_config = maxwell3d.regs.shader_config[index]; - const auto program{static_cast(index)}; - - // Skip stages that are not enabled - if (!maxwell3d.regs.IsShaderConfigEnabled(index)) { - switch (program) { - case Maxwell::ShaderProgram::Geometry: - program_manager.UseGeometryShader(0); - break; - case Maxwell::ShaderProgram::Fragment: - program_manager.UseFragmentShader(0); - break; - default: - break; - } - continue; - } - // Currently this stages are not supported in the OpenGL backend. - // TODO(Blinkhawk): Port tesselation shaders from Vulkan to OpenGL - if (program == Maxwell::ShaderProgram::TesselationControl || - program == Maxwell::ShaderProgram::TesselationEval) { - continue; - } - - Shader* const shader = shader_cache.GetStageProgram(program, async_shaders); - const GLuint program_handle = shader->IsBuilt() ? shader->GetHandle() : 0; - switch (program) { - case Maxwell::ShaderProgram::VertexA: - case Maxwell::ShaderProgram::VertexB: - program_manager.UseVertexShader(program_handle); - break; - case Maxwell::ShaderProgram::Geometry: - program_manager.UseGeometryShader(program_handle); - break; - case Maxwell::ShaderProgram::Fragment: - program_manager.UseFragmentShader(program_handle); - break; - default: - UNIMPLEMENTED_MSG("Unimplemented shader index={}, enable={}, offset=0x{:08X}", index, - shader_config.enable.Value(), shader_config.offset); - break; - } - - // Stage indices are 0 - 5 - const size_t stage = index == 0 ? 0 : index - 1; - shaders[stage] = shader; - - SetupDrawTextures(shader, stage); - SetupDrawImages(shader, stage); - - buffer_cache.SetEnabledUniformBuffers(stage, shader->GetEntries().enabled_uniform_buffers); - - buffer_cache.UnbindGraphicsStorageBuffers(stage); - u32 ssbo_index = 0; - for (const auto& buffer : shader->GetEntries().global_memory_entries) { - buffer_cache.BindGraphicsStorageBuffer(stage, ssbo_index, buffer.cbuf_index, - buffer.cbuf_offset, buffer.is_written); - ++ssbo_index; - } - - // Workaround for Intel drivers. - // When a clip distance is enabled but not set in the shader it crops parts of the screen - // (sometimes it's half the screen, sometimes three quarters). To avoid this, enable the - // clip distances only when it's written by a shader stage. - clip_distances |= shader->GetEntries().clip_distances; - - // When VertexA is enabled, we have dual vertex shaders - if (program == Maxwell::ShaderProgram::VertexA) { - // VertexB was combined with VertexA, so we skip the VertexB iteration - ++index; - } - } - SyncClipEnabled(clip_distances); - maxwell3d.dirty.flags[Dirty::Shaders] = false; - - buffer_cache.UpdateGraphicsBuffers(is_indexed); - - const std::span indices_span(image_view_indices.data(), image_view_indices.size()); - texture_cache.FillGraphicsImageViews(indices_span, image_view_ids); - - buffer_cache.BindHostGeometryBuffers(is_indexed); - - size_t image_view_index = 0; - size_t texture_index = 0; - size_t image_index = 0; - for (size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) { - const Shader* const shader = shaders[stage]; - if (!shader) { - continue; - } - buffer_cache.BindHostStageBuffers(stage); - const auto& base = device.GetBaseBindings(stage); - BindTextures(shader->GetEntries(), base.sampler, base.image, image_view_index, - texture_index, image_index); - } -} - void RasterizerOpenGL::LoadDiskResources(u64 title_id, std::stop_token stop_loading, const VideoCore::DiskResourceLoadCallback& callback) { - shader_cache.LoadDiskCache(title_id, stop_loading, callback); + shader_cache.LoadDiskResources(title_id, stop_loading, callback); } void RasterizerOpenGL::Clear() { @@ -432,16 +218,15 @@ void RasterizerOpenGL::Draw(bool is_indexed, bool is_instanced) { SyncState(); - // Setup shaders and their used resources. + GraphicsPipeline* const pipeline{shader_cache.CurrentGraphicsPipeline()}; + if (!pipeline) { + return; + } std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex}; - SetupShaders(is_indexed); - - texture_cache.UpdateRenderTargets(false); - state_tracker.BindFramebuffer(texture_cache.GetFramebuffer()->Handle()); - program_manager.BindGraphicsPipeline(); + pipeline->Configure(is_indexed); const GLenum primitive_mode = MaxwellToGL::PrimitiveTopology(maxwell3d.regs.draw.topology); - BeginTransformFeedback(primitive_mode); + BeginTransformFeedback(pipeline, primitive_mode); const GLuint base_instance = static_cast(maxwell3d.regs.vb_base_instance); const GLsizei num_instances = @@ -480,35 +265,24 @@ void RasterizerOpenGL::Draw(bool is_indexed, bool is_instanced) { num_instances, base_instance); } } - EndTransformFeedback(); ++num_queued_commands; + has_written_global_memory |= pipeline->WritesGlobalMemory(); gpu.TickWork(); } -void RasterizerOpenGL::DispatchCompute(GPUVAddr code_addr) { - Shader* const kernel = shader_cache.GetComputeKernel(code_addr); - - std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex}; - BindComputeTextures(kernel); - - const auto& entries = kernel->GetEntries(); - buffer_cache.SetEnabledComputeUniformBuffers(entries.enabled_uniform_buffers); - buffer_cache.UnbindComputeStorageBuffers(); - u32 ssbo_index = 0; - for (const auto& buffer : entries.global_memory_entries) { - buffer_cache.BindComputeStorageBuffer(ssbo_index, buffer.cbuf_index, buffer.cbuf_offset, - buffer.is_written); - ++ssbo_index; +void RasterizerOpenGL::DispatchCompute() { + ComputePipeline* const pipeline{shader_cache.CurrentComputePipeline()}; + if (!pipeline) { + return; } - buffer_cache.UpdateComputeBuffers(); - buffer_cache.BindHostComputeBuffers(); - - const auto& launch_desc = kepler_compute.launch_description; - glDispatchCompute(launch_desc.grid_dim_x, launch_desc.grid_dim_y, launch_desc.grid_dim_z); + pipeline->Configure(); + const auto& qmd{kepler_compute.launch_description}; + glDispatchCompute(qmd.grid_dim_x, qmd.grid_dim_y, qmd.grid_dim_z); ++num_queued_commands; + has_written_global_memory |= pipeline->WritesGlobalMemory(); } void RasterizerOpenGL::ResetCounter(VideoCore::QueryType type) { @@ -681,7 +455,7 @@ void RasterizerOpenGL::WaitForIdle() { } void RasterizerOpenGL::FragmentBarrier() { - glMemoryBarrier(GL_FRAMEBUFFER_BARRIER_BIT); + glMemoryBarrier(GL_FRAMEBUFFER_BARRIER_BIT | GL_TEXTURE_FETCH_BARRIER_BIT); } void RasterizerOpenGL::TiledCacheBarrier() { @@ -694,6 +468,13 @@ void RasterizerOpenGL::FlushCommands() { return; } num_queued_commands = 0; + + // Make sure memory stored from the previous GL command stream is visible + // This is only needed on assembly shaders where we write to GPU memory with raw pointers + if (has_written_global_memory) { + has_written_global_memory = false; + glMemoryBarrier(GL_BUFFER_UPDATE_BARRIER_BIT); + } glFlush(); } @@ -737,111 +518,11 @@ bool RasterizerOpenGL::AccelerateDisplay(const Tegra::FramebufferConfig& config, // ASSERT_MSG(image_view->size.width == config.width, "Framebuffer width is different"); // ASSERT_MSG(image_view->size.height == config.height, "Framebuffer height is different"); - screen_info.display_texture = image_view->Handle(ImageViewType::e2D); + screen_info.display_texture = image_view->Handle(Shader::TextureType::Color2D); screen_info.display_srgb = VideoCore::Surface::IsPixelFormatSRGB(image_view->format); return true; } -void RasterizerOpenGL::BindComputeTextures(Shader* kernel) { - image_view_indices.clear(); - sampler_handles.clear(); - - texture_cache.SynchronizeComputeDescriptors(); - - SetupComputeTextures(kernel); - SetupComputeImages(kernel); - - const std::span indices_span(image_view_indices.data(), image_view_indices.size()); - texture_cache.FillComputeImageViews(indices_span, image_view_ids); - - program_manager.BindCompute(kernel->GetHandle()); - size_t image_view_index = 0; - size_t texture_index = 0; - size_t image_index = 0; - BindTextures(kernel->GetEntries(), 0, 0, image_view_index, texture_index, image_index); -} - -void RasterizerOpenGL::BindTextures(const ShaderEntries& entries, GLuint base_texture, - GLuint base_image, size_t& image_view_index, - size_t& texture_index, size_t& image_index) { - const GLuint* const samplers = sampler_handles.data() + texture_index; - const GLuint* const textures = texture_handles.data() + texture_index; - const GLuint* const images = image_handles.data() + image_index; - - const size_t num_samplers = entries.samplers.size(); - for (const auto& sampler : entries.samplers) { - for (size_t i = 0; i < sampler.size; ++i) { - const ImageViewId image_view_id = image_view_ids[image_view_index++]; - const ImageView& image_view = texture_cache.GetImageView(image_view_id); - const GLuint handle = image_view.Handle(ImageViewTypeFromEntry(sampler)); - texture_handles[texture_index++] = handle; - } - } - const size_t num_images = entries.images.size(); - for (size_t unit = 0; unit < num_images; ++unit) { - // TODO: Mark as modified - const ImageViewId image_view_id = image_view_ids[image_view_index++]; - const ImageView& image_view = texture_cache.GetImageView(image_view_id); - const GLuint handle = image_view.Handle(ImageViewTypeFromEntry(entries.images[unit])); - image_handles[image_index] = handle; - ++image_index; - } - if (num_samplers > 0) { - glBindSamplers(base_texture, static_cast(num_samplers), samplers); - glBindTextures(base_texture, static_cast(num_samplers), textures); - } - if (num_images > 0) { - glBindImageTextures(base_image, static_cast(num_images), images); - } -} - -void RasterizerOpenGL::SetupDrawTextures(const Shader* shader, size_t stage_index) { - const bool via_header_index = - maxwell3d.regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex; - for (const auto& entry : shader->GetEntries().samplers) { - const auto shader_type = static_cast(stage_index); - for (size_t index = 0; index < entry.size; ++index) { - const auto handle = - GetTextureInfo(maxwell3d, via_header_index, entry, shader_type, index); - const Sampler* const sampler = texture_cache.GetGraphicsSampler(handle.sampler); - sampler_handles.push_back(sampler->Handle()); - image_view_indices.push_back(handle.image); - } - } -} - -void RasterizerOpenGL::SetupComputeTextures(const Shader* kernel) { - const bool via_header_index = kepler_compute.launch_description.linked_tsc; - for (const auto& entry : kernel->GetEntries().samplers) { - for (size_t i = 0; i < entry.size; ++i) { - const auto handle = - GetTextureInfo(kepler_compute, via_header_index, entry, ShaderType::Compute, i); - const Sampler* const sampler = texture_cache.GetComputeSampler(handle.sampler); - sampler_handles.push_back(sampler->Handle()); - image_view_indices.push_back(handle.image); - } - } -} - -void RasterizerOpenGL::SetupDrawImages(const Shader* shader, size_t stage_index) { - const bool via_header_index = - maxwell3d.regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex; - for (const auto& entry : shader->GetEntries().images) { - const auto shader_type = static_cast(stage_index); - const auto handle = GetTextureInfo(maxwell3d, via_header_index, entry, shader_type); - image_view_indices.push_back(handle.image); - } -} - -void RasterizerOpenGL::SetupComputeImages(const Shader* shader) { - const bool via_header_index = kepler_compute.launch_description.linked_tsc; - for (const auto& entry : shader->GetEntries().images) { - const auto handle = - GetTextureInfo(kepler_compute, via_header_index, entry, ShaderType::Compute); - image_view_indices.push_back(handle.image); - } -} - void RasterizerOpenGL::SyncState() { SyncViewport(); SyncRasterizeEnable(); @@ -957,7 +638,7 @@ void RasterizerOpenGL::SyncDepthClamp() { void RasterizerOpenGL::SyncClipEnabled(u32 clip_mask) { auto& flags = maxwell3d.dirty.flags; - if (!flags[Dirty::ClipDistances] && !flags[Dirty::Shaders]) { + if (!flags[Dirty::ClipDistances] && !flags[VideoCommon::Dirty::Shaders]) { return; } flags[Dirty::ClipDistances] = false; @@ -1334,68 +1015,13 @@ void RasterizerOpenGL::SyncFramebufferSRGB() { oglEnable(GL_FRAMEBUFFER_SRGB, maxwell3d.regs.framebuffer_srgb); } -void RasterizerOpenGL::SyncTransformFeedback() { - // TODO(Rodrigo): Inject SKIP_COMPONENTS*_NV when required. An unimplemented message will signal - // when this is required. - const auto& regs = maxwell3d.regs; - - static constexpr std::size_t STRIDE = 3; - std::array attribs; - std::array streams; - - GLint* cursor = attribs.data(); - GLint* current_stream = streams.data(); - - for (std::size_t feedback = 0; feedback < Maxwell::NumTransformFeedbackBuffers; ++feedback) { - const auto& layout = regs.tfb_layouts[feedback]; - UNIMPLEMENTED_IF_MSG(layout.stride != layout.varying_count * 4, "Stride padding"); - if (layout.varying_count == 0) { - continue; - } - - *current_stream = static_cast(feedback); - if (current_stream != streams.data()) { - // When stepping one stream, push the expected token - cursor[0] = GL_NEXT_BUFFER_NV; - cursor[1] = 0; - cursor[2] = 0; - cursor += STRIDE; - } - ++current_stream; - - const auto& locations = regs.tfb_varying_locs[feedback]; - std::optional current_index; - for (u32 offset = 0; offset < layout.varying_count; ++offset) { - const u8 location = locations[offset]; - const u8 index = location / 4; - - if (current_index == index) { - // Increase number of components of the previous attachment - ++cursor[-2]; - continue; - } - current_index = index; - - std::tie(cursor[0], cursor[2]) = TransformFeedbackEnum(location); - cursor[1] = 1; - cursor += STRIDE; - } - } - - const GLsizei num_attribs = static_cast((cursor - attribs.data()) / STRIDE); - const GLsizei num_strides = static_cast(current_stream - streams.data()); - glTransformFeedbackStreamAttribsNV(num_attribs, attribs.data(), num_strides, streams.data(), - GL_INTERLEAVED_ATTRIBS); -} - -void RasterizerOpenGL::BeginTransformFeedback(GLenum primitive_mode) { +void RasterizerOpenGL::BeginTransformFeedback(GraphicsPipeline* program, GLenum primitive_mode) { const auto& regs = maxwell3d.regs; if (regs.tfb_enabled == 0) { return; } - if (device.UseAssemblyShaders()) { - SyncTransformFeedback(); - } + program->ConfigureTransformFeedback(); + UNIMPLEMENTED_IF(regs.IsShaderConfigEnabled(Maxwell::ShaderProgram::TesselationControl) || regs.IsShaderConfigEnabled(Maxwell::ShaderProgram::TesselationEval) || regs.IsShaderConfigEnabled(Maxwell::ShaderProgram::Geometry)); @@ -1409,11 +1035,9 @@ void RasterizerOpenGL::BeginTransformFeedback(GLenum primitive_mode) { } void RasterizerOpenGL::EndTransformFeedback() { - const auto& regs = maxwell3d.regs; - if (regs.tfb_enabled == 0) { - return; + if (maxwell3d.regs.tfb_enabled != 0) { + glEndTransformFeedback(); } - glEndTransformFeedback(); } } // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_rasterizer.h b/src/video_core/renderer_opengl/gl_rasterizer.h index 87f44cd62..faa6cb521 100755 --- a/src/video_core/renderer_opengl/gl_rasterizer.h +++ b/src/video_core/renderer_opengl/gl_rasterizer.h @@ -27,11 +27,9 @@ #include "video_core/renderer_opengl/gl_query_cache.h" #include "video_core/renderer_opengl/gl_resource_manager.h" #include "video_core/renderer_opengl/gl_shader_cache.h" -#include "video_core/renderer_opengl/gl_shader_decompiler.h" #include "video_core/renderer_opengl/gl_shader_manager.h" #include "video_core/renderer_opengl/gl_state_tracker.h" #include "video_core/renderer_opengl/gl_texture_cache.h" -#include "video_core/shader/async_shaders.h" #include "video_core/textures/texture.h" namespace Core::Memory { @@ -68,7 +66,7 @@ public: void Draw(bool is_indexed, bool is_instanced) override; void Clear() override; - void DispatchCompute(GPUVAddr code_addr) override; + void DispatchCompute() override; void ResetCounter(VideoCore::QueryType type) override; void Query(GPUVAddr gpu_addr, VideoCore::QueryType type, std::optional timestamp) override; void BindGraphicsUniformBuffer(size_t stage, u32 index, GPUVAddr gpu_addr, u32 size) override; @@ -106,36 +104,11 @@ public: return num_queued_commands > 0; } - VideoCommon::Shader::AsyncShaders& GetAsyncShaders() { - return async_shaders; - } - - const VideoCommon::Shader::AsyncShaders& GetAsyncShaders() const { - return async_shaders; - } - private: static constexpr size_t MAX_TEXTURES = 192; static constexpr size_t MAX_IMAGES = 48; static constexpr size_t MAX_IMAGE_VIEWS = MAX_TEXTURES + MAX_IMAGES; - void BindComputeTextures(Shader* kernel); - - void BindTextures(const ShaderEntries& entries, GLuint base_texture, GLuint base_image, - size_t& image_view_index, size_t& texture_index, size_t& image_index); - - /// Configures the current textures to use for the draw command. - void SetupDrawTextures(const Shader* shader, size_t stage_index); - - /// Configures the textures used in a compute shader. - void SetupComputeTextures(const Shader* kernel); - - /// Configures images in a graphics shader. - void SetupDrawImages(const Shader* shader, size_t stage_index); - - /// Configures images in a compute shader. - void SetupComputeImages(const Shader* shader); - /// Syncs state to match guest's void SyncState(); @@ -208,18 +181,12 @@ private: /// Syncs vertex instances to match the guest state void SyncVertexInstances(); - /// Syncs transform feedback state to match guest state - /// @note Only valid on assembly shaders - void SyncTransformFeedback(); - /// Begin a transform feedback - void BeginTransformFeedback(GLenum primitive_mode); + void BeginTransformFeedback(GraphicsPipeline* pipeline, GLenum primitive_mode); /// End a transform feedback void EndTransformFeedback(); - void SetupShaders(bool is_indexed); - Tegra::GPU& gpu; Tegra::Engines::Maxwell3D& maxwell3d; Tegra::Engines::KeplerCompute& kepler_compute; @@ -234,12 +201,10 @@ private: TextureCache texture_cache; BufferCacheRuntime buffer_cache_runtime; BufferCache buffer_cache; - ShaderCacheOpenGL shader_cache; + ShaderCache shader_cache; QueryCache query_cache; FenceManagerOpenGL fence_manager; - VideoCommon::Shader::AsyncShaders async_shaders; - boost::container::static_vector image_view_indices; std::array image_view_ids; boost::container::static_vector sampler_handles; @@ -247,7 +212,8 @@ private: std::array image_handles{}; /// Number of commands queued to the OpenGL driver. Resetted on flush. - std::size_t num_queued_commands = 0; + size_t num_queued_commands = 0; + bool has_written_global_memory = false; u32 last_clip_distance_mask = 0; }; diff --git a/src/video_core/renderer_opengl/gl_resource_manager.cpp b/src/video_core/renderer_opengl/gl_resource_manager.cpp index 3428e5e21..8695c29e3 100755 --- a/src/video_core/renderer_opengl/gl_resource_manager.cpp +++ b/src/video_core/renderer_opengl/gl_resource_manager.cpp @@ -83,18 +83,6 @@ void OGLSampler::Release() { handle = 0; } -void OGLShader::Create(std::string_view source, GLenum type) { - if (handle != 0) { - return; - } - if (source.empty()) { - return; - } - - MICROPROFILE_SCOPE(OpenGL_ResourceCreation); - handle = GLShader::LoadShader(source, type); -} - void OGLShader::Release() { if (handle == 0) return; @@ -104,21 +92,6 @@ void OGLShader::Release() { handle = 0; } -void OGLProgram::CreateFromSource(const char* vert_shader, const char* geo_shader, - const char* frag_shader, bool separable_program, - bool hint_retrievable) { - OGLShader vert, geo, frag; - if (vert_shader) - vert.Create(vert_shader, GL_VERTEX_SHADER); - if (geo_shader) - geo.Create(geo_shader, GL_GEOMETRY_SHADER); - if (frag_shader) - frag.Create(frag_shader, GL_FRAGMENT_SHADER); - - MICROPROFILE_SCOPE(OpenGL_ResourceCreation); - Create(separable_program, hint_retrievable, vert.handle, geo.handle, frag.handle); -} - void OGLProgram::Release() { if (handle == 0) return; diff --git a/src/video_core/renderer_opengl/gl_resource_manager.h b/src/video_core/renderer_opengl/gl_resource_manager.h index 552d79db4..b2d5bfd3b 100755 --- a/src/video_core/renderer_opengl/gl_resource_manager.h +++ b/src/video_core/renderer_opengl/gl_resource_manager.h @@ -8,7 +8,6 @@ #include #include #include "common/common_types.h" -#include "video_core/renderer_opengl/gl_shader_util.h" namespace OpenGL { @@ -128,8 +127,6 @@ public: return *this; } - void Create(std::string_view source, GLenum type); - void Release(); GLuint handle = 0; @@ -151,17 +148,6 @@ public: return *this; } - template - void Create(bool separable_program, bool hint_retrievable, T... shaders) { - if (handle != 0) - return; - handle = GLShader::LoadProgram(separable_program, hint_retrievable, shaders...); - } - - /// Creates a new internal OpenGL resource and stores the handle - void CreateFromSource(const char* vert_shader, const char* geo_shader, const char* frag_shader, - bool separable_program = false, bool hint_retrievable = false); - /// Deletes the internal OpenGL resource void Release(); diff --git a/src/video_core/renderer_opengl/gl_shader_cache.cpp b/src/video_core/renderer_opengl/gl_shader_cache.cpp index 5a01c59ec..58a4f0fb4 100755 --- a/src/video_core/renderer_opengl/gl_shader_cache.cpp +++ b/src/video_core/renderer_opengl/gl_shader_cache.cpp @@ -3,606 +3,539 @@ // Refer to the license.txt file included. #include +#include #include #include -#include #include #include -#include #include "common/alignment.h" #include "common/assert.h" +#include "common/fs/fs.h" +#include "common/fs/path_util.h" #include "common/logging/log.h" #include "common/scope_exit.h" +#include "common/settings.h" +#include "common/thread_worker.h" #include "core/core.h" -#include "core/frontend/emu_window.h" +#include "shader_recompiler/backend/glasm/emit_glasm.h" +#include "shader_recompiler/backend/glsl/emit_glsl.h" +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/frontend/ir/program.h" +#include "shader_recompiler/frontend/maxwell/control_flow.h" +#include "shader_recompiler/frontend/maxwell/translate_program.h" +#include "shader_recompiler/profile.h" #include "video_core/engines/kepler_compute.h" #include "video_core/engines/maxwell_3d.h" -#include "video_core/engines/shader_type.h" #include "video_core/memory_manager.h" -#include "video_core/renderer_opengl/gl_arb_decompiler.h" #include "video_core/renderer_opengl/gl_rasterizer.h" #include "video_core/renderer_opengl/gl_resource_manager.h" #include "video_core/renderer_opengl/gl_shader_cache.h" -#include "video_core/renderer_opengl/gl_shader_decompiler.h" -#include "video_core/renderer_opengl/gl_shader_disk_cache.h" +#include "video_core/renderer_opengl/gl_shader_util.h" #include "video_core/renderer_opengl/gl_state_tracker.h" -#include "video_core/shader/memory_util.h" -#include "video_core/shader/registry.h" -#include "video_core/shader/shader_ir.h" #include "video_core/shader_cache.h" +#include "video_core/shader_environment.h" #include "video_core/shader_notify.h" namespace OpenGL { - -using Tegra::Engines::ShaderType; -using VideoCommon::Shader::GetShaderAddress; -using VideoCommon::Shader::GetShaderCode; -using VideoCommon::Shader::GetUniqueIdentifier; -using VideoCommon::Shader::KERNEL_MAIN_OFFSET; -using VideoCommon::Shader::ProgramCode; -using VideoCommon::Shader::Registry; -using VideoCommon::Shader::ShaderIR; -using VideoCommon::Shader::STAGE_MAIN_OFFSET; - namespace { +using Shader::Backend::GLASM::EmitGLASM; +using Shader::Backend::GLSL::EmitGLSL; +using Shader::Backend::SPIRV::EmitSPIRV; +using Shader::Maxwell::MergeDualVertexPrograms; +using Shader::Maxwell::TranslateProgram; +using VideoCommon::ComputeEnvironment; +using VideoCommon::FileEnvironment; +using VideoCommon::GenericEnvironment; +using VideoCommon::GraphicsEnvironment; +using VideoCommon::SerializePipeline; +using Context = ShaderContext::Context; -constexpr VideoCommon::Shader::CompilerSettings COMPILER_SETTINGS{}; +template +auto MakeSpan(Container& container) { + return std::span(container.data(), container.size()); +} -/// Gets the shader type from a Maxwell program type -constexpr GLenum GetGLShaderType(ShaderType shader_type) { - switch (shader_type) { - case ShaderType::Vertex: - return GL_VERTEX_SHADER; - case ShaderType::Geometry: - return GL_GEOMETRY_SHADER; - case ShaderType::Fragment: - return GL_FRAGMENT_SHADER; - case ShaderType::Compute: - return GL_COMPUTE_SHADER; +Shader::RuntimeInfo MakeRuntimeInfo(const GraphicsPipelineKey& key, + const Shader::IR::Program& program, + const Shader::IR::Program* previous_program, + bool glasm_use_storage_buffers, bool use_assembly_shaders) { + Shader::RuntimeInfo info; + if (previous_program) { + info.previous_stage_stores = previous_program->info.stores; + } else { + // Mark all stores as available for vertex shaders + info.previous_stage_stores.mask.set(); + } + switch (program.stage) { + case Shader::Stage::VertexB: + case Shader::Stage::Geometry: + if (!use_assembly_shaders && key.xfb_enabled != 0) { + info.xfb_varyings = VideoCommon::MakeTransformFeedbackVaryings(key.xfb_state); + } + break; + case Shader::Stage::TessellationEval: + info.tess_clockwise = key.tessellation_clockwise != 0; + info.tess_primitive = [&key] { + switch (key.tessellation_primitive) { + case Maxwell::TessellationPrimitive::Isolines: + return Shader::TessPrimitive::Isolines; + case Maxwell::TessellationPrimitive::Triangles: + return Shader::TessPrimitive::Triangles; + case Maxwell::TessellationPrimitive::Quads: + return Shader::TessPrimitive::Quads; + } + UNREACHABLE(); + return Shader::TessPrimitive::Triangles; + }(); + info.tess_spacing = [&] { + switch (key.tessellation_spacing) { + case Maxwell::TessellationSpacing::Equal: + return Shader::TessSpacing::Equal; + case Maxwell::TessellationSpacing::FractionalOdd: + return Shader::TessSpacing::FractionalOdd; + case Maxwell::TessellationSpacing::FractionalEven: + return Shader::TessSpacing::FractionalEven; + } + UNREACHABLE(); + return Shader::TessSpacing::Equal; + }(); + break; + case Shader::Stage::Fragment: + info.force_early_z = key.early_z != 0; + break; default: - return GL_NONE; + break; } + switch (key.gs_input_topology) { + case Maxwell::PrimitiveTopology::Points: + info.input_topology = Shader::InputTopology::Points; + break; + case Maxwell::PrimitiveTopology::Lines: + case Maxwell::PrimitiveTopology::LineLoop: + case Maxwell::PrimitiveTopology::LineStrip: + info.input_topology = Shader::InputTopology::Lines; + break; + case Maxwell::PrimitiveTopology::Triangles: + case Maxwell::PrimitiveTopology::TriangleStrip: + case Maxwell::PrimitiveTopology::TriangleFan: + case Maxwell::PrimitiveTopology::Quads: + case Maxwell::PrimitiveTopology::QuadStrip: + case Maxwell::PrimitiveTopology::Polygon: + case Maxwell::PrimitiveTopology::Patches: + info.input_topology = Shader::InputTopology::Triangles; + break; + case Maxwell::PrimitiveTopology::LinesAdjacency: + case Maxwell::PrimitiveTopology::LineStripAdjacency: + info.input_topology = Shader::InputTopology::LinesAdjacency; + break; + case Maxwell::PrimitiveTopology::TrianglesAdjacency: + case Maxwell::PrimitiveTopology::TriangleStripAdjacency: + info.input_topology = Shader::InputTopology::TrianglesAdjacency; + break; + } + info.glasm_use_storage_buffers = glasm_use_storage_buffers; + return info; } -constexpr const char* GetShaderTypeName(ShaderType shader_type) { - switch (shader_type) { - case ShaderType::Vertex: - return "VS"; - case ShaderType::TesselationControl: - return "HS"; - case ShaderType::TesselationEval: - return "DS"; - case ShaderType::Geometry: - return "GS"; - case ShaderType::Fragment: - return "FS"; - case ShaderType::Compute: - return "CS"; - } - return "UNK"; +void SetXfbState(VideoCommon::TransformFeedbackState& state, const Maxwell& regs) { + std::ranges::transform(regs.tfb_layouts, state.layouts.begin(), [](const auto& layout) { + return VideoCommon::TransformFeedbackState::Layout{ + .stream = layout.stream, + .varying_count = layout.varying_count, + .stride = layout.stride, + }; + }); + state.varyings = regs.tfb_varying_locs; } - -constexpr ShaderType GetShaderType(Maxwell::ShaderProgram program_type) { - switch (program_type) { - case Maxwell::ShaderProgram::VertexA: - case Maxwell::ShaderProgram::VertexB: - return ShaderType::Vertex; - case Maxwell::ShaderProgram::TesselationControl: - return ShaderType::TesselationControl; - case Maxwell::ShaderProgram::TesselationEval: - return ShaderType::TesselationEval; - case Maxwell::ShaderProgram::Geometry: - return ShaderType::Geometry; - case Maxwell::ShaderProgram::Fragment: - return ShaderType::Fragment; - } - return {}; -} - -constexpr GLenum AssemblyEnum(ShaderType shader_type) { - switch (shader_type) { - case ShaderType::Vertex: - return GL_VERTEX_PROGRAM_NV; - case ShaderType::TesselationControl: - return GL_TESS_CONTROL_PROGRAM_NV; - case ShaderType::TesselationEval: - return GL_TESS_EVALUATION_PROGRAM_NV; - case ShaderType::Geometry: - return GL_GEOMETRY_PROGRAM_NV; - case ShaderType::Fragment: - return GL_FRAGMENT_PROGRAM_NV; - case ShaderType::Compute: - return GL_COMPUTE_PROGRAM_NV; - } - return {}; -} - -std::string MakeShaderID(u64 unique_identifier, ShaderType shader_type) { - return fmt::format("{}{:016X}", GetShaderTypeName(shader_type), unique_identifier); -} - -std::shared_ptr MakeRegistry(const ShaderDiskCacheEntry& entry) { - const VideoCore::GuestDriverProfile guest_profile{entry.texture_handler_size}; - const VideoCommon::Shader::SerializedRegistryInfo info{guest_profile, entry.bound_buffer, - entry.graphics_info, entry.compute_info}; - auto registry = std::make_shared(entry.type, info); - for (const auto& [address, value] : entry.keys) { - const auto [buffer, offset] = address; - registry->InsertKey(buffer, offset, value); - } - for (const auto& [offset, sampler] : entry.bound_samplers) { - registry->InsertBoundSampler(offset, sampler); - } - for (const auto& [key, sampler] : entry.bindless_samplers) { - const auto [buffer, offset] = key; - registry->InsertBindlessSampler(buffer, offset, sampler); - } - return registry; -} - -std::unordered_set GetSupportedFormats() { - GLint num_formats; - glGetIntegerv(GL_NUM_PROGRAM_BINARY_FORMATS, &num_formats); - - std::vector formats(num_formats); - glGetIntegerv(GL_PROGRAM_BINARY_FORMATS, formats.data()); - - std::unordered_set supported_formats; - for (const GLint format : formats) { - supported_formats.insert(static_cast(format)); - } - return supported_formats; -} - } // Anonymous namespace -ProgramSharedPtr BuildShader(const Device& device, ShaderType shader_type, u64 unique_identifier, - const ShaderIR& ir, const Registry& registry, bool hint_retrievable) { - if (device.UseDriverCache()) { - // Ignore hint retrievable if we are using the driver cache - hint_retrievable = false; - } - const std::string shader_id = MakeShaderID(unique_identifier, shader_type); - LOG_INFO(Render_OpenGL, "{}", shader_id); +ShaderCache::ShaderCache(RasterizerOpenGL& rasterizer_, Core::Frontend::EmuWindow& emu_window_, + Tegra::Engines::Maxwell3D& maxwell3d_, + Tegra::Engines::KeplerCompute& kepler_compute_, + Tegra::MemoryManager& gpu_memory_, const Device& device_, + TextureCache& texture_cache_, BufferCache& buffer_cache_, + ProgramManager& program_manager_, StateTracker& state_tracker_, + VideoCore::ShaderNotify& shader_notify_) + : VideoCommon::ShaderCache{rasterizer_, gpu_memory_, maxwell3d_, kepler_compute_}, + emu_window{emu_window_}, device{device_}, texture_cache{texture_cache_}, + buffer_cache{buffer_cache_}, program_manager{program_manager_}, state_tracker{state_tracker_}, + shader_notify{shader_notify_}, use_asynchronous_shaders{device.UseAsynchronousShaders()}, + profile{ + .supported_spirv = 0x00010000, - auto program = std::make_shared(); + .unified_descriptor_binding = false, + .support_descriptor_aliasing = false, + .support_int8 = false, + .support_int16 = false, + .support_int64 = device.HasShaderInt64(), + .support_vertex_instance_id = true, + .support_float_controls = false, + .support_separate_denorm_behavior = false, + .support_separate_rounding_mode = false, + .support_fp16_denorm_preserve = false, + .support_fp32_denorm_preserve = false, + .support_fp16_denorm_flush = false, + .support_fp32_denorm_flush = false, + .support_fp16_signed_zero_nan_preserve = false, + .support_fp32_signed_zero_nan_preserve = false, + .support_fp64_signed_zero_nan_preserve = false, + .support_explicit_workgroup_layout = false, + .support_vote = true, + .support_viewport_index_layer_non_geometry = + device.HasNvViewportArray2() || device.HasVertexViewportLayer(), + .support_viewport_mask = device.HasNvViewportArray2(), + .support_typeless_image_loads = device.HasImageLoadFormatted(), + .support_demote_to_helper_invocation = false, + .support_int64_atomics = false, + .support_derivative_control = device.HasDerivativeControl(), + .support_geometry_shader_passthrough = device.HasGeometryShaderPassthrough(), + .support_gl_nv_gpu_shader_5 = device.HasNvGpuShader5(), + .support_gl_amd_gpu_shader_half_float = device.HasAmdShaderHalfFloat(), + .support_gl_texture_shadow_lod = device.HasTextureShadowLod(), + .support_gl_warp_intrinsics = false, + .support_gl_variable_aoffi = device.HasVariableAoffi(), + .support_gl_sparse_textures = device.HasSparseTexture2(), + .support_gl_derivative_control = device.HasDerivativeControl(), - if (device.UseAssemblyShaders()) { - const std::string arb = - DecompileAssemblyShader(device, ir, registry, shader_type, shader_id); + .warp_size_potentially_larger_than_guest = device.IsWarpSizePotentiallyLargerThanGuest(), - GLuint& arb_prog = program->assembly_program.handle; + .lower_left_origin_mode = true, + .need_declared_frag_colors = true, + .need_fastmath_off = device.NeedsFastmathOff(), -// Commented out functions signal OpenGL errors but are compatible with apitrace. -// Use them only to capture and replay on apitrace. -#if 0 - glGenProgramsNV(1, &arb_prog); - glLoadProgramNV(AssemblyEnum(shader_type), arb_prog, static_cast(arb.size()), - reinterpret_cast(arb.data())); -#else - glGenProgramsARB(1, &arb_prog); - glNamedProgramStringEXT(arb_prog, AssemblyEnum(shader_type), GL_PROGRAM_FORMAT_ASCII_ARB, - static_cast(arb.size()), arb.data()); -#endif - const auto err = reinterpret_cast(glGetString(GL_PROGRAM_ERROR_STRING_NV)); - if (err && *err) { - LOG_CRITICAL(Render_OpenGL, "{}", err); - LOG_INFO(Render_OpenGL, "\n{}", arb); - } - } else { - const std::string glsl = DecompileShader(device, ir, registry, shader_type, shader_id); - OGLShader shader; - shader.Create(glsl.c_str(), GetGLShaderType(shader_type)); - - program->source_program.Create(true, hint_retrievable, shader.handle); - } - - return program; -} - -Shader::Shader(std::shared_ptr registry_, ShaderEntries entries_, - ProgramSharedPtr program_, bool is_built_) - : registry{std::move(registry_)}, entries{std::move(entries_)}, program{std::move(program_)}, - is_built{is_built_} { - handle = program->assembly_program.handle; - if (handle == 0) { - handle = program->source_program.handle; - } - if (is_built) { - ASSERT(handle != 0); + .has_broken_spirv_clamp = true, + .has_broken_unsigned_image_offsets = true, + .has_broken_signed_operations = true, + .has_broken_fp16_float_controls = false, + .has_gl_component_indexing_bug = device.HasComponentIndexingBug(), + .has_gl_precise_bug = device.HasPreciseBug(), + .ignore_nan_fp_comparisons = true, + }, + host_info{ + .support_float16 = false, + .support_int64 = device.HasShaderInt64(), + } { + if (use_asynchronous_shaders) { + workers = CreateWorkers(); } } -Shader::~Shader() = default; +ShaderCache::~ShaderCache() = default; -GLuint Shader::GetHandle() const { - DEBUG_ASSERT(registry->IsConsistent()); - return handle; -} - -bool Shader::IsBuilt() const { - return is_built; -} - -void Shader::AsyncOpenGLBuilt(OGLProgram new_program) { - program->source_program = std::move(new_program); - handle = program->source_program.handle; - is_built = true; -} - -void Shader::AsyncGLASMBuilt(OGLAssemblyProgram new_program) { - program->assembly_program = std::move(new_program); - handle = program->assembly_program.handle; - is_built = true; -} - -std::unique_ptr Shader::CreateStageFromMemory( - const ShaderParameters& params, Maxwell::ShaderProgram program_type, ProgramCode code, - ProgramCode code_b, VideoCommon::Shader::AsyncShaders& async_shaders, VAddr cpu_addr) { - const auto shader_type = GetShaderType(program_type); - - auto& gpu = params.gpu; - gpu.ShaderNotify().MarkSharderBuilding(); - - auto registry = std::make_shared(shader_type, gpu.Maxwell3D()); - if (!async_shaders.IsShaderAsync(gpu) || !params.device.UseAsynchronousShaders()) { - const ShaderIR ir(code, STAGE_MAIN_OFFSET, COMPILER_SETTINGS, *registry); - // TODO(Rodrigo): Handle VertexA shaders - // std::optional ir_b; - // if (!code_b.empty()) { - // ir_b.emplace(code_b, STAGE_MAIN_OFFSET); - // } - auto program = - BuildShader(params.device, shader_type, params.unique_identifier, ir, *registry); - ShaderDiskCacheEntry entry; - entry.type = shader_type; - entry.code = std::move(code); - entry.code_b = std::move(code_b); - entry.unique_identifier = params.unique_identifier; - entry.bound_buffer = registry->GetBoundBuffer(); - entry.graphics_info = registry->GetGraphicsInfo(); - entry.keys = registry->GetKeys(); - entry.bound_samplers = registry->GetBoundSamplers(); - entry.bindless_samplers = registry->GetBindlessSamplers(); - params.disk_cache.SaveEntry(std::move(entry)); - - gpu.ShaderNotify().MarkShaderComplete(); - - return std::unique_ptr(new Shader(std::move(registry), - MakeEntries(params.device, ir, shader_type), - std::move(program), true)); - } else { - // Required for entries - const ShaderIR ir(code, STAGE_MAIN_OFFSET, COMPILER_SETTINGS, *registry); - auto entries = MakeEntries(params.device, ir, shader_type); - - async_shaders.QueueOpenGLShader(params.device, shader_type, params.unique_identifier, - std::move(code), std::move(code_b), STAGE_MAIN_OFFSET, - COMPILER_SETTINGS, *registry, cpu_addr); - - auto program = std::make_shared(); - return std::unique_ptr( - new Shader(std::move(registry), std::move(entries), std::move(program), false)); - } -} - -std::unique_ptr Shader::CreateKernelFromMemory(const ShaderParameters& params, - ProgramCode code) { - auto& gpu = params.gpu; - gpu.ShaderNotify().MarkSharderBuilding(); - - auto registry = std::make_shared(ShaderType::Compute, params.engine); - const ShaderIR ir(code, KERNEL_MAIN_OFFSET, COMPILER_SETTINGS, *registry); - const u64 uid = params.unique_identifier; - auto program = BuildShader(params.device, ShaderType::Compute, uid, ir, *registry); - - ShaderDiskCacheEntry entry; - entry.type = ShaderType::Compute; - entry.code = std::move(code); - entry.unique_identifier = uid; - entry.bound_buffer = registry->GetBoundBuffer(); - entry.compute_info = registry->GetComputeInfo(); - entry.keys = registry->GetKeys(); - entry.bound_samplers = registry->GetBoundSamplers(); - entry.bindless_samplers = registry->GetBindlessSamplers(); - params.disk_cache.SaveEntry(std::move(entry)); - - gpu.ShaderNotify().MarkShaderComplete(); - - return std::unique_ptr(new Shader(std::move(registry), - MakeEntries(params.device, ir, ShaderType::Compute), - std::move(program))); -} - -std::unique_ptr Shader::CreateFromCache(const ShaderParameters& params, - const PrecompiledShader& precompiled_shader) { - return std::unique_ptr(new Shader( - precompiled_shader.registry, precompiled_shader.entries, precompiled_shader.program)); -} - -ShaderCacheOpenGL::ShaderCacheOpenGL(RasterizerOpenGL& rasterizer_, - Core::Frontend::EmuWindow& emu_window_, Tegra::GPU& gpu_, - Tegra::Engines::Maxwell3D& maxwell3d_, - Tegra::Engines::KeplerCompute& kepler_compute_, - Tegra::MemoryManager& gpu_memory_, const Device& device_) - : ShaderCache{rasterizer_}, emu_window{emu_window_}, gpu{gpu_}, gpu_memory{gpu_memory_}, - maxwell3d{maxwell3d_}, kepler_compute{kepler_compute_}, device{device_} {} - -ShaderCacheOpenGL::~ShaderCacheOpenGL() = default; - -void ShaderCacheOpenGL::LoadDiskCache(u64 title_id, std::stop_token stop_loading, - const VideoCore::DiskResourceLoadCallback& callback) { - disk_cache.BindTitleID(title_id); - const std::optional transferable = disk_cache.LoadTransferable(); - - LOG_INFO(Render_OpenGL, "Total Shader Count: {}", - transferable.has_value() ? transferable->size() : 0); - - if (!transferable) { +void ShaderCache::LoadDiskResources(u64 title_id, std::stop_token stop_loading, + const VideoCore::DiskResourceLoadCallback& callback) { + if (title_id == 0) { return; } - - std::vector gl_cache; - if (!device.UseAssemblyShaders() && !device.UseDriverCache()) { - // Only load precompiled cache when we are not using assembly shaders - gl_cache = disk_cache.LoadPrecompiled(); + const auto shader_dir{Common::FS::GetYuzuPath(Common::FS::YuzuPath::ShaderDir)}; + const auto base_dir{shader_dir / fmt::format("{:016x}", title_id)}; + if (!Common::FS::CreateDir(shader_dir) || !Common::FS::CreateDir(base_dir)) { + LOG_ERROR(Common_Filesystem, "Failed to create shader cache directories"); + return; } - const auto supported_formats = GetSupportedFormats(); + shader_cache_filename = base_dir / "opengl.bin"; - // Track if precompiled cache was altered during loading to know if we have to - // serialize the virtual precompiled cache file back to the hard drive - bool precompiled_cache_altered = false; - - // Inform the frontend about shader build initialization - if (callback) { - callback(VideoCore::LoadCallbackStage::Build, 0, transferable->size()); + if (!workers) { + workers = CreateWorkers(); } + struct { + std::mutex mutex; + size_t total{}; + size_t built{}; + bool has_loaded{}; + } state; - std::mutex mutex; - std::size_t built_shaders = 0; // It doesn't have be atomic since it's used behind a mutex - std::atomic_bool gl_cache_failed = false; - - const auto find_precompiled = [&gl_cache](u64 id) { - return std::ranges::find(gl_cache, id, &ShaderDiskCachePrecompiled::unique_identifier); - }; - - const auto worker = [&](Core::Frontend::GraphicsContext* context, std::size_t begin, - std::size_t end) { - const auto scope = context->Acquire(); - - for (std::size_t i = begin; i < end; ++i) { - if (stop_loading.stop_requested()) { - return; - } - const auto& entry = (*transferable)[i]; - const u64 uid = entry.unique_identifier; - const auto it = find_precompiled(uid); - const auto precompiled_entry = it != gl_cache.end() ? &*it : nullptr; - - const bool is_compute = entry.type == ShaderType::Compute; - const u32 main_offset = is_compute ? KERNEL_MAIN_OFFSET : STAGE_MAIN_OFFSET; - auto registry = MakeRegistry(entry); - const ShaderIR ir(entry.code, main_offset, COMPILER_SETTINGS, *registry); - - ProgramSharedPtr program; - if (precompiled_entry) { - // If the shader is precompiled, attempt to load it with - program = GeneratePrecompiledProgram(entry, *precompiled_entry, supported_formats); - if (!program) { - gl_cache_failed = true; + const auto load_compute{[&](std::ifstream& file, FileEnvironment env) { + ComputePipelineKey key; + file.read(reinterpret_cast(&key), sizeof(key)); + workers->QueueWork( + [this, key, env = std::move(env), &state, &callback](Context* ctx) mutable { + ctx->pools.ReleaseContents(); + auto pipeline{CreateComputePipeline(ctx->pools, key, env)}; + std::lock_guard lock{state.mutex}; + if (pipeline) { + compute_cache.emplace(key, std::move(pipeline)); } - } - if (!program) { - // Otherwise compile it from GLSL - program = BuildShader(device, entry.type, uid, ir, *registry, true); - } + ++state.built; + if (state.has_loaded) { + callback(VideoCore::LoadCallbackStage::Build, state.built, state.total); + } + }); + ++state.total; + }}; + const auto load_graphics{[&](std::ifstream& file, std::vector envs) { + GraphicsPipelineKey key; + file.read(reinterpret_cast(&key), sizeof(key)); + workers->QueueWork( + [this, key, envs = std::move(envs), &state, &callback](Context* ctx) mutable { + boost::container::static_vector env_ptrs; + for (auto& env : envs) { + env_ptrs.push_back(&env); + } + ctx->pools.ReleaseContents(); + auto pipeline{CreateGraphicsPipeline(ctx->pools, key, MakeSpan(env_ptrs), false)}; + std::lock_guard lock{state.mutex}; + if (pipeline) { + graphics_cache.emplace(key, std::move(pipeline)); + } + ++state.built; + if (state.has_loaded) { + callback(VideoCore::LoadCallbackStage::Build, state.built, state.total); + } + }); + ++state.total; + }}; + VideoCommon::LoadPipelines(stop_loading, shader_cache_filename, load_compute, load_graphics); - PrecompiledShader shader; - shader.program = std::move(program); - shader.registry = std::move(registry); - shader.entries = MakeEntries(device, ir, entry.type); + std::unique_lock lock{state.mutex}; + callback(VideoCore::LoadCallbackStage::Build, 0, state.total); + state.has_loaded = true; + lock.unlock(); - std::scoped_lock lock{mutex}; - if (callback) { - callback(VideoCore::LoadCallbackStage::Build, ++built_shaders, - transferable->size()); - } - runtime_cache.emplace(entry.unique_identifier, std::move(shader)); - } + workers->WaitForRequests(); + if (!use_asynchronous_shaders) { + workers.reset(); + } +} + +GraphicsPipeline* ShaderCache::CurrentGraphicsPipeline() { + if (!RefreshStages(graphics_key.unique_hashes)) { + current_pipeline = nullptr; + return nullptr; + } + const auto& regs{maxwell3d.regs}; + graphics_key.raw = 0; + graphics_key.early_z.Assign(regs.force_early_fragment_tests != 0 ? 1 : 0); + graphics_key.gs_input_topology.Assign(graphics_key.unique_hashes[4] != 0 + ? regs.draw.topology.Value() + : Maxwell::PrimitiveTopology{}); + graphics_key.tessellation_primitive.Assign(regs.tess_mode.prim.Value()); + graphics_key.tessellation_spacing.Assign(regs.tess_mode.spacing.Value()); + graphics_key.tessellation_clockwise.Assign(regs.tess_mode.cw.Value()); + graphics_key.xfb_enabled.Assign(regs.tfb_enabled != 0 ? 1 : 0); + if (graphics_key.xfb_enabled) { + SetXfbState(graphics_key.xfb_state, regs); + } + if (current_pipeline && graphics_key == current_pipeline->Key()) { + return current_pipeline->IsBuilt() ? current_pipeline : nullptr; + } + return CurrentGraphicsPipelineSlowPath(); +} + +GraphicsPipeline* ShaderCache::CurrentGraphicsPipelineSlowPath() { + const auto [pair, is_new]{graphics_cache.try_emplace(graphics_key)}; + auto& pipeline{pair->second}; + if (is_new) { + pipeline = CreateGraphicsPipeline(); + } + if (!pipeline) { + return nullptr; + } + current_pipeline = pipeline.get(); + return BuiltPipeline(current_pipeline); +} + +GraphicsPipeline* ShaderCache::BuiltPipeline(GraphicsPipeline* pipeline) const noexcept { + if (pipeline->IsBuilt()) { + return pipeline; + } + if (!use_asynchronous_shaders) { + return pipeline; + } + // If something is using depth, we can assume that games are not rendering anything which + // will be used one time. + if (maxwell3d.regs.zeta_enable) { + return nullptr; + } + // If games are using a small index count, we can assume these are full screen quads. + // Usually these shaders are only used once for building textures so we can assume they + // can't be built async + if (maxwell3d.regs.index_array.count <= 6 || maxwell3d.regs.vertex_buffer.count <= 6) { + return pipeline; + } + return nullptr; +} + +ComputePipeline* ShaderCache::CurrentComputePipeline() { + const VideoCommon::ShaderInfo* const shader{ComputeShader()}; + if (!shader) { + return nullptr; + } + const auto& qmd{kepler_compute.launch_description}; + const ComputePipelineKey key{ + .unique_hash = shader->unique_hash, + .shared_memory_size = qmd.shared_alloc, + .workgroup_size{qmd.block_dim_x, qmd.block_dim_y, qmd.block_dim_z}, }; - - const std::size_t num_workers{std::max(1U, std::thread::hardware_concurrency())}; - const std::size_t bucket_size{transferable->size() / num_workers}; - std::vector> contexts(num_workers); - std::vector threads(num_workers); - for (std::size_t i = 0; i < num_workers; ++i) { - const bool is_last_worker = i + 1 == num_workers; - const std::size_t start{bucket_size * i}; - const std::size_t end{is_last_worker ? transferable->size() : start + bucket_size}; - - // On some platforms the shared context has to be created from the GUI thread - contexts[i] = emu_window.CreateSharedContext(); - threads[i] = std::thread(worker, contexts[i].get(), start, end); - } - for (auto& thread : threads) { - thread.join(); - } - - if (gl_cache_failed) { - // Invalidate the precompiled cache if a shader dumped shader was rejected - disk_cache.InvalidatePrecompiled(); - precompiled_cache_altered = true; - return; - } - if (stop_loading.stop_requested()) { - return; - } - - if (device.UseAssemblyShaders() || device.UseDriverCache()) { - // Don't store precompiled binaries for assembly shaders or when using the driver cache - return; - } - - // TODO(Rodrigo): Do state tracking for transferable shaders and do a dummy draw - // before precompiling them - - for (std::size_t i = 0; i < transferable->size(); ++i) { - const u64 id = (*transferable)[i].unique_identifier; - const auto it = find_precompiled(id); - if (it == gl_cache.end()) { - const GLuint program = runtime_cache.at(id).program->source_program.handle; - disk_cache.SavePrecompiled(id, program); - precompiled_cache_altered = true; - } - } - - if (precompiled_cache_altered) { - disk_cache.SaveVirtualPrecompiledFile(); + const auto [pair, is_new]{compute_cache.try_emplace(key)}; + auto& pipeline{pair->second}; + if (!is_new) { + return pipeline.get(); } + pipeline = CreateComputePipeline(key, shader); + return pipeline.get(); } -ProgramSharedPtr ShaderCacheOpenGL::GeneratePrecompiledProgram( - const ShaderDiskCacheEntry& entry, const ShaderDiskCachePrecompiled& precompiled_entry, - const std::unordered_set& supported_formats) { - if (!supported_formats.contains(precompiled_entry.binary_format)) { - LOG_INFO(Render_OpenGL, "Precompiled cache entry with unsupported format, removing"); - return {}; +std::unique_ptr ShaderCache::CreateGraphicsPipeline() { + GraphicsEnvironments environments; + GetGraphicsEnvironments(environments, graphics_key.unique_hashes); + + main_pools.ReleaseContents(); + auto pipeline{CreateGraphicsPipeline(main_pools, graphics_key, environments.Span(), + use_asynchronous_shaders)}; + if (!pipeline || shader_cache_filename.empty()) { + return pipeline; } - - auto program = std::make_shared(); - GLuint& handle = program->source_program.handle; - handle = glCreateProgram(); - glProgramParameteri(handle, GL_PROGRAM_SEPARABLE, GL_TRUE); - glProgramBinary(handle, precompiled_entry.binary_format, precompiled_entry.binary.data(), - static_cast(precompiled_entry.binary.size())); - - GLint link_status; - glGetProgramiv(handle, GL_LINK_STATUS, &link_status); - if (link_status == GL_FALSE) { - LOG_INFO(Render_OpenGL, "Precompiled cache rejected by the driver, removing"); - return {}; - } - - return program; -} - -Shader* ShaderCacheOpenGL::GetStageProgram(Maxwell::ShaderProgram program, - VideoCommon::Shader::AsyncShaders& async_shaders) { - if (!maxwell3d.dirty.flags[Dirty::Shaders]) { - auto* last_shader = last_shaders[static_cast(program)]; - if (last_shader->IsBuilt()) { - return last_shader; + boost::container::static_vector env_ptrs; + for (size_t index = 0; index < Maxwell::MaxShaderProgram; ++index) { + if (graphics_key.unique_hashes[index] != 0) { + env_ptrs.push_back(&environments.envs[index]); } } + SerializePipeline(graphics_key, env_ptrs, shader_cache_filename); + return pipeline; +} - const GPUVAddr address{GetShaderAddress(maxwell3d, program)}; +std::unique_ptr ShaderCache::CreateGraphicsPipeline( + ShaderContext::ShaderPools& pools, const GraphicsPipelineKey& key, + std::span envs, bool build_in_parallel) try { + LOG_INFO(Render_OpenGL, "0x{:016x}", key.Hash()); + size_t env_index{}; + u32 total_storage_buffers{}; + std::array programs; + const bool uses_vertex_a{key.unique_hashes[0] != 0}; + const bool uses_vertex_b{key.unique_hashes[1] != 0}; + for (size_t index = 0; index < Maxwell::MaxShaderProgram; ++index) { + if (key.unique_hashes[index] == 0) { + continue; + } + Shader::Environment& env{*envs[env_index]}; + ++env_index; - if (device.UseAsynchronousShaders() && async_shaders.HasCompletedWork()) { - auto completed_work = async_shaders.GetCompletedWork(); - for (auto& work : completed_work) { - Shader* shader = TryGet(work.cpu_address); - gpu.ShaderNotify().MarkShaderComplete(); - if (shader == nullptr) { - continue; + const u32 cfg_offset{static_cast(env.StartAddress() + sizeof(Shader::ProgramHeader))}; + Shader::Maxwell::Flow::CFG cfg(env, pools.flow_block, cfg_offset, index == 0); + if (!uses_vertex_a || index != 1) { + // Normal path + programs[index] = TranslateProgram(pools.inst, pools.block, env, cfg, host_info); + + for (const auto& desc : programs[index].info.storage_buffers_descriptors) { + total_storage_buffers += desc.count; } - using namespace VideoCommon::Shader; - if (work.backend == AsyncShaders::Backend::OpenGL) { - shader->AsyncOpenGLBuilt(std::move(work.program.opengl)); - } else if (work.backend == AsyncShaders::Backend::GLASM) { - shader->AsyncGLASMBuilt(std::move(work.program.glasm)); + } else { + // VertexB path when VertexA is present. + auto& program_va{programs[0]}; + auto program_vb{TranslateProgram(pools.inst, pools.block, env, cfg, host_info)}; + for (const auto& desc : program_vb.info.storage_buffers_descriptors) { + total_storage_buffers += desc.count; } - - auto& registry = shader->GetRegistry(); - - ShaderDiskCacheEntry entry; - entry.type = work.shader_type; - entry.code = std::move(work.code); - entry.code_b = std::move(work.code_b); - entry.unique_identifier = work.uid; - entry.bound_buffer = registry.GetBoundBuffer(); - entry.graphics_info = registry.GetGraphicsInfo(); - entry.keys = registry.GetKeys(); - entry.bound_samplers = registry.GetBoundSamplers(); - entry.bindless_samplers = registry.GetBindlessSamplers(); - disk_cache.SaveEntry(std::move(entry)); + programs[index] = MergeDualVertexPrograms(program_va, program_vb, env); } } + const u32 glasm_storage_buffer_limit{device.GetMaxGLASMStorageBufferBlocks()}; + const bool glasm_use_storage_buffers{total_storage_buffers <= glasm_storage_buffer_limit}; - // Look up shader in the cache based on address - const std::optional cpu_addr{gpu_memory.GpuToCpuAddress(address)}; - if (Shader* const shader{cpu_addr ? TryGet(*cpu_addr) : null_shader.get()}) { - return last_shaders[static_cast(program)] = shader; + std::array infos{}; + + OGLProgram source_program; + std::array sources; + std::array, 5> sources_spirv; + Shader::Backend::Bindings binding; + Shader::IR::Program* previous_program{}; + const bool use_glasm{device.UseAssemblyShaders()}; + const size_t first_index = uses_vertex_a && uses_vertex_b ? 1 : 0; + for (size_t index = first_index; index < Maxwell::MaxShaderProgram; ++index) { + if (key.unique_hashes[index] == 0) { + continue; + } + UNIMPLEMENTED_IF(index == 0); + + Shader::IR::Program& program{programs[index]}; + const size_t stage_index{index - 1}; + infos[stage_index] = &program.info; + + const auto runtime_info{ + MakeRuntimeInfo(key, program, previous_program, glasm_use_storage_buffers, use_glasm)}; + switch (device.GetShaderBackend()) { + case Settings::ShaderBackend::GLSL: + sources[stage_index] = EmitGLSL(profile, runtime_info, program, binding); + break; + case Settings::ShaderBackend::GLASM: + sources[stage_index] = EmitGLASM(profile, runtime_info, program, binding); + break; + case Settings::ShaderBackend::SPIRV: + sources_spirv[stage_index] = EmitSPIRV(profile, runtime_info, program, binding); + break; + } + previous_program = &program; } + auto* const thread_worker{build_in_parallel ? workers.get() : nullptr}; + return std::make_unique( + device, texture_cache, buffer_cache, gpu_memory, maxwell3d, program_manager, state_tracker, + thread_worker, &shader_notify, sources, sources_spirv, infos, key); - const u8* const host_ptr{gpu_memory.GetPointer(address)}; - - // No shader found - create a new one - ProgramCode code{GetShaderCode(gpu_memory, address, host_ptr, false)}; - ProgramCode code_b; - if (program == Maxwell::ShaderProgram::VertexA) { - const GPUVAddr address_b{GetShaderAddress(maxwell3d, Maxwell::ShaderProgram::VertexB)}; - const u8* host_ptr_b = gpu_memory.GetPointer(address_b); - code_b = GetShaderCode(gpu_memory, address_b, host_ptr_b, false); - } - const std::size_t code_size = code.size() * sizeof(u64); - - const u64 unique_identifier = GetUniqueIdentifier( - GetShaderType(program), program == Maxwell::ShaderProgram::VertexA, code, code_b); - - const ShaderParameters params{gpu, maxwell3d, disk_cache, device, - *cpu_addr, host_ptr, unique_identifier}; - - std::unique_ptr shader; - const auto found = runtime_cache.find(unique_identifier); - if (found == runtime_cache.end()) { - shader = Shader::CreateStageFromMemory(params, program, std::move(code), std::move(code_b), - async_shaders, cpu_addr.value_or(0)); - } else { - shader = Shader::CreateFromCache(params, found->second); - } - - Shader* const result = shader.get(); - if (cpu_addr) { - Register(std::move(shader), *cpu_addr, code_size); - } else { - null_shader = std::move(shader); - } - - return last_shaders[static_cast(program)] = result; +} catch (Shader::Exception& exception) { + LOG_ERROR(Render_OpenGL, "{}", exception.what()); + return nullptr; } -Shader* ShaderCacheOpenGL::GetComputeKernel(GPUVAddr code_addr) { - const std::optional cpu_addr{gpu_memory.GpuToCpuAddress(code_addr)}; +std::unique_ptr ShaderCache::CreateComputePipeline( + const ComputePipelineKey& key, const VideoCommon::ShaderInfo* shader) { + const GPUVAddr program_base{kepler_compute.regs.code_loc.Address()}; + const auto& qmd{kepler_compute.launch_description}; + ComputeEnvironment env{kepler_compute, gpu_memory, program_base, qmd.program_start}; + env.SetCachedSize(shader->size_bytes); - if (Shader* const kernel = cpu_addr ? TryGet(*cpu_addr) : null_kernel.get()) { - return kernel; + main_pools.ReleaseContents(); + auto pipeline{CreateComputePipeline(main_pools, key, env)}; + if (!pipeline || shader_cache_filename.empty()) { + return pipeline; + } + SerializePipeline(key, std::array{&env}, shader_cache_filename); + return pipeline; +} + +std::unique_ptr ShaderCache::CreateComputePipeline( + ShaderContext::ShaderPools& pools, const ComputePipelineKey& key, + Shader::Environment& env) try { + LOG_INFO(Render_OpenGL, "0x{:016x}", key.Hash()); + + Shader::Maxwell::Flow::CFG cfg{env, pools.flow_block, env.StartAddress()}; + auto program{TranslateProgram(pools.inst, pools.block, env, cfg, host_info)}; + + u32 num_storage_buffers{}; + for (const auto& desc : program.info.storage_buffers_descriptors) { + num_storage_buffers += desc.count; + } + Shader::RuntimeInfo info; + info.glasm_use_storage_buffers = num_storage_buffers <= device.GetMaxGLASMStorageBufferBlocks(); + + std::string code{}; + std::vector code_spirv; + switch (device.GetShaderBackend()) { + case Settings::ShaderBackend::GLSL: + code = EmitGLSL(profile, program); + break; + case Settings::ShaderBackend::GLASM: + code = EmitGLASM(profile, info, program); + break; + case Settings::ShaderBackend::SPIRV: + code_spirv = EmitSPIRV(profile, program); + break; } - // No kernel found, create a new one - const u8* host_ptr{gpu_memory.GetPointer(code_addr)}; - ProgramCode code{GetShaderCode(gpu_memory, code_addr, host_ptr, true)}; - const std::size_t code_size{code.size() * sizeof(u64)}; - const u64 unique_identifier{GetUniqueIdentifier(ShaderType::Compute, false, code)}; + return std::make_unique(device, texture_cache, buffer_cache, gpu_memory, + kepler_compute, program_manager, program.info, code, + code_spirv); +} catch (Shader::Exception& exception) { + LOG_ERROR(Render_OpenGL, "{}", exception.what()); + return nullptr; +} - const ShaderParameters params{gpu, kepler_compute, disk_cache, device, - *cpu_addr, host_ptr, unique_identifier}; - - std::unique_ptr kernel; - const auto found = runtime_cache.find(unique_identifier); - if (found == runtime_cache.end()) { - kernel = Shader::CreateKernelFromMemory(params, std::move(code)); - } else { - kernel = Shader::CreateFromCache(params, found->second); - } - - Shader* const result = kernel.get(); - if (cpu_addr) { - Register(std::move(kernel), *cpu_addr, code_size); - } else { - null_kernel = std::move(kernel); - } - return result; +std::unique_ptr ShaderCache::CreateWorkers() const { + return std::make_unique(std::max(std::thread::hardware_concurrency(), 2U) - 1, + "yuzu:ShaderBuilder", + [this] { return Context{emu_window}; }); } } // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_shader_cache.h b/src/video_core/renderer_opengl/gl_shader_cache.h index b30308b6f..a34110b37 100755 --- a/src/video_core/renderer_opengl/gl_shader_cache.h +++ b/src/video_core/renderer_opengl/gl_shader_cache.h @@ -5,157 +5,93 @@ #pragma once #include -#include -#include -#include -#include -#include +#include +#include #include -#include -#include #include #include "common/common_types.h" -#include "video_core/engines/shader_type.h" -#include "video_core/renderer_opengl/gl_resource_manager.h" -#include "video_core/renderer_opengl/gl_shader_decompiler.h" -#include "video_core/renderer_opengl/gl_shader_disk_cache.h" -#include "video_core/shader/registry.h" -#include "video_core/shader/shader_ir.h" +#include "common/thread_worker.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/host_translate_info.h" +#include "shader_recompiler/object_pool.h" +#include "shader_recompiler/profile.h" +#include "video_core/renderer_opengl/gl_compute_pipeline.h" +#include "video_core/renderer_opengl/gl_graphics_pipeline.h" +#include "video_core/renderer_opengl/gl_shader_context.h" #include "video_core/shader_cache.h" namespace Tegra { class MemoryManager; } -namespace Core::Frontend { -class EmuWindow; -} - -namespace VideoCommon::Shader { -class AsyncShaders; -} - namespace OpenGL { class Device; +class ProgramManager; class RasterizerOpenGL; +using ShaderWorker = Common::StatefulThreadWorker; -using Maxwell = Tegra::Engines::Maxwell3D::Regs; - -struct ProgramHandle { - OGLProgram source_program; - OGLAssemblyProgram assembly_program; -}; -using ProgramSharedPtr = std::shared_ptr; - -struct PrecompiledShader { - ProgramSharedPtr program; - std::shared_ptr registry; - ShaderEntries entries; -}; - -struct ShaderParameters { - Tegra::GPU& gpu; - Tegra::Engines::ConstBufferEngineInterface& engine; - ShaderDiskCacheOpenGL& disk_cache; - const Device& device; - VAddr cpu_addr; - const u8* host_ptr; - u64 unique_identifier; -}; - -ProgramSharedPtr BuildShader(const Device& device, Tegra::Engines::ShaderType shader_type, - u64 unique_identifier, const VideoCommon::Shader::ShaderIR& ir, - const VideoCommon::Shader::Registry& registry, - bool hint_retrievable = false); - -class Shader final { +class ShaderCache : public VideoCommon::ShaderCache { public: - ~Shader(); + explicit ShaderCache(RasterizerOpenGL& rasterizer_, Core::Frontend::EmuWindow& emu_window_, + Tegra::Engines::Maxwell3D& maxwell3d_, + Tegra::Engines::KeplerCompute& kepler_compute_, + Tegra::MemoryManager& gpu_memory_, const Device& device_, + TextureCache& texture_cache_, BufferCache& buffer_cache_, + ProgramManager& program_manager_, StateTracker& state_tracker_, + VideoCore::ShaderNotify& shader_notify_); + ~ShaderCache(); - /// Gets the GL program handle for the shader - GLuint GetHandle() const; + void LoadDiskResources(u64 title_id, std::stop_token stop_loading, + const VideoCore::DiskResourceLoadCallback& callback); - bool IsBuilt() const; + [[nodiscard]] GraphicsPipeline* CurrentGraphicsPipeline(); - /// Gets the shader entries for the shader - const ShaderEntries& GetEntries() const { - return entries; - } - - const VideoCommon::Shader::Registry& GetRegistry() const { - return *registry; - } - - /// Mark a OpenGL shader as built - void AsyncOpenGLBuilt(OGLProgram new_program); - - /// Mark a GLASM shader as built - void AsyncGLASMBuilt(OGLAssemblyProgram new_program); - - static std::unique_ptr CreateStageFromMemory( - const ShaderParameters& params, Maxwell::ShaderProgram program_type, - ProgramCode program_code, ProgramCode program_code_b, - VideoCommon::Shader::AsyncShaders& async_shaders, VAddr cpu_addr); - - static std::unique_ptr CreateKernelFromMemory(const ShaderParameters& params, - ProgramCode code); - - static std::unique_ptr CreateFromCache(const ShaderParameters& params, - const PrecompiledShader& precompiled_shader); + [[nodiscard]] ComputePipeline* CurrentComputePipeline(); private: - explicit Shader(std::shared_ptr registry, ShaderEntries entries, - ProgramSharedPtr program, bool is_built_ = true); + GraphicsPipeline* CurrentGraphicsPipelineSlowPath(); - std::shared_ptr registry; - ShaderEntries entries; - ProgramSharedPtr program; - GLuint handle = 0; - bool is_built{}; -}; + [[nodiscard]] GraphicsPipeline* BuiltPipeline(GraphicsPipeline* pipeline) const noexcept; -class ShaderCacheOpenGL final : public VideoCommon::ShaderCache { -public: - explicit ShaderCacheOpenGL(RasterizerOpenGL& rasterizer_, - Core::Frontend::EmuWindow& emu_window_, Tegra::GPU& gpu, - Tegra::Engines::Maxwell3D& maxwell3d_, - Tegra::Engines::KeplerCompute& kepler_compute_, - Tegra::MemoryManager& gpu_memory_, const Device& device_); - ~ShaderCacheOpenGL() override; + std::unique_ptr CreateGraphicsPipeline(); - /// Loads disk cache for the current game - void LoadDiskCache(u64 title_id, std::stop_token stop_loading, - const VideoCore::DiskResourceLoadCallback& callback); + std::unique_ptr CreateGraphicsPipeline( + ShaderContext::ShaderPools& pools, const GraphicsPipelineKey& key, + std::span envs, bool build_in_parallel); - /// Gets the current specified shader stage program - Shader* GetStageProgram(Maxwell::ShaderProgram program, - VideoCommon::Shader::AsyncShaders& async_shaders); + std::unique_ptr CreateComputePipeline(const ComputePipelineKey& key, + const VideoCommon::ShaderInfo* shader); - /// Gets a compute kernel in the passed address - Shader* GetComputeKernel(GPUVAddr code_addr); + std::unique_ptr CreateComputePipeline(ShaderContext::ShaderPools& pools, + const ComputePipelineKey& key, + Shader::Environment& env); -private: - ProgramSharedPtr GeneratePrecompiledProgram( - const ShaderDiskCacheEntry& entry, const ShaderDiskCachePrecompiled& precompiled_entry, - const std::unordered_set& supported_formats); + std::unique_ptr CreateWorkers() const; Core::Frontend::EmuWindow& emu_window; - Tegra::GPU& gpu; - Tegra::MemoryManager& gpu_memory; - Tegra::Engines::Maxwell3D& maxwell3d; - Tegra::Engines::KeplerCompute& kepler_compute; const Device& device; + TextureCache& texture_cache; + BufferCache& buffer_cache; + ProgramManager& program_manager; + StateTracker& state_tracker; + VideoCore::ShaderNotify& shader_notify; + const bool use_asynchronous_shaders; - ShaderDiskCacheOpenGL disk_cache; - std::unordered_map runtime_cache; + GraphicsPipelineKey graphics_key{}; + GraphicsPipeline* current_pipeline{}; - std::unique_ptr null_shader; - std::unique_ptr null_kernel; + ShaderContext::ShaderPools main_pools; + std::unordered_map> graphics_cache; + std::unordered_map> compute_cache; - std::array last_shaders{}; + Shader::Profile profile; + Shader::HostTranslateInfo host_info; + + std::filesystem::path shader_cache_filename; + std::unique_ptr workers; }; } // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_shader_context.h b/src/video_core/renderer_opengl/gl_shader_context.h new file mode 100755 index 000000000..6ff34e5d6 --- /dev/null +++ b/src/video_core/renderer_opengl/gl_shader_context.h @@ -0,0 +1,33 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "core/frontend/emu_window.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/maxwell/control_flow.h" + +namespace OpenGL::ShaderContext { +struct ShaderPools { + void ReleaseContents() { + flow_block.ReleaseContents(); + block.ReleaseContents(); + inst.ReleaseContents(); + } + + Shader::ObjectPool inst; + Shader::ObjectPool block; + Shader::ObjectPool flow_block; +}; + +struct Context { + explicit Context(Core::Frontend::EmuWindow& emu_window) + : gl_context{emu_window.CreateSharedContext()}, scoped{*gl_context} {} + + std::unique_ptr gl_context; + Core::Frontend::GraphicsContext::Scoped scoped; + ShaderPools pools; +}; + +} // namespace OpenGL::ShaderContext diff --git a/src/video_core/renderer_opengl/gl_shader_manager.cpp b/src/video_core/renderer_opengl/gl_shader_manager.cpp index 553e6e8d6..399959afb 100755 --- a/src/video_core/renderer_opengl/gl_shader_manager.cpp +++ b/src/video_core/renderer_opengl/gl_shader_manager.cpp @@ -1,149 +1,3 @@ // Copyright 2018 yuzu Emulator Project // Licensed under GPLv2 or any later version // Refer to the license.txt file included. - -#include - -#include "common/common_types.h" -#include "video_core/engines/maxwell_3d.h" -#include "video_core/renderer_opengl/gl_device.h" -#include "video_core/renderer_opengl/gl_shader_manager.h" - -namespace OpenGL { - -namespace { - -void BindProgram(GLenum stage, GLuint current, GLuint old, bool& enabled) { - if (current == old) { - return; - } - if (current == 0) { - if (enabled) { - enabled = false; - glDisable(stage); - } - return; - } - if (!enabled) { - enabled = true; - glEnable(stage); - } - glBindProgramARB(stage, current); -} - -} // Anonymous namespace - -ProgramManager::ProgramManager(const Device& device) - : use_assembly_programs{device.UseAssemblyShaders()} { - if (use_assembly_programs) { - glEnable(GL_COMPUTE_PROGRAM_NV); - } else { - graphics_pipeline.Create(); - glBindProgramPipeline(graphics_pipeline.handle); - } -} - -ProgramManager::~ProgramManager() = default; - -void ProgramManager::BindCompute(GLuint program) { - if (use_assembly_programs) { - glBindProgramARB(GL_COMPUTE_PROGRAM_NV, program); - } else { - is_graphics_bound = false; - glUseProgram(program); - } -} - -void ProgramManager::BindGraphicsPipeline() { - if (!use_assembly_programs) { - UpdateSourcePrograms(); - } -} - -void ProgramManager::BindHostPipeline(GLuint pipeline) { - if (use_assembly_programs) { - if (geometry_enabled) { - geometry_enabled = false; - old_state.geometry = 0; - glDisable(GL_GEOMETRY_PROGRAM_NV); - } - } else { - if (!is_graphics_bound) { - glUseProgram(0); - } - } - glBindProgramPipeline(pipeline); -} - -void ProgramManager::RestoreGuestPipeline() { - if (use_assembly_programs) { - glBindProgramPipeline(0); - } else { - glBindProgramPipeline(graphics_pipeline.handle); - } -} - -void ProgramManager::BindHostCompute(GLuint program) { - if (use_assembly_programs) { - glDisable(GL_COMPUTE_PROGRAM_NV); - } - glUseProgram(program); - is_graphics_bound = false; -} - -void ProgramManager::RestoreGuestCompute() { - if (use_assembly_programs) { - glEnable(GL_COMPUTE_PROGRAM_NV); - glUseProgram(0); - } -} - -void ProgramManager::UseVertexShader(GLuint program) { - if (use_assembly_programs) { - BindProgram(GL_VERTEX_PROGRAM_NV, program, current_state.vertex, vertex_enabled); - } - current_state.vertex = program; -} - -void ProgramManager::UseGeometryShader(GLuint program) { - if (use_assembly_programs) { - BindProgram(GL_GEOMETRY_PROGRAM_NV, program, current_state.vertex, geometry_enabled); - } - current_state.geometry = program; -} - -void ProgramManager::UseFragmentShader(GLuint program) { - if (use_assembly_programs) { - BindProgram(GL_FRAGMENT_PROGRAM_NV, program, current_state.vertex, fragment_enabled); - } - current_state.fragment = program; -} - -void ProgramManager::UpdateSourcePrograms() { - if (!is_graphics_bound) { - is_graphics_bound = true; - glUseProgram(0); - } - - const GLuint handle = graphics_pipeline.handle; - const auto update_state = [handle](GLenum stage, GLuint current, GLuint old) { - if (current == old) { - return; - } - glUseProgramStages(handle, stage, current); - }; - update_state(GL_VERTEX_SHADER_BIT, current_state.vertex, old_state.vertex); - update_state(GL_GEOMETRY_SHADER_BIT, current_state.geometry, old_state.geometry); - update_state(GL_FRAGMENT_SHADER_BIT, current_state.fragment, old_state.fragment); - - old_state = current_state; -} - -void MaxwellUniformData::SetFromRegs(const Tegra::Engines::Maxwell3D& maxwell) { - const auto& regs = maxwell.regs; - - // Y_NEGATE controls what value S2R returns for the Y_DIRECTION system value. - y_direction = regs.screen_y_control.y_negate == 0 ? 1.0f : -1.0f; -} - -} // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_shader_manager.h b/src/video_core/renderer_opengl/gl_shader_manager.h index ad42cce74..88b734bcb 100755 --- a/src/video_core/renderer_opengl/gl_shader_manager.h +++ b/src/video_core/renderer_opengl/gl_shader_manager.h @@ -4,79 +4,86 @@ #pragma once -#include +#include +#include #include +#include "video_core/renderer_opengl/gl_device.h" #include "video_core/renderer_opengl/gl_resource_manager.h" -#include "video_core/renderer_opengl/maxwell_to_gl.h" namespace OpenGL { -class Device; - -/// Uniform structure for the Uniform Buffer Object, all vectors must be 16-byte aligned -/// @note Always keep a vec4 at the end. The GL spec is not clear whether the alignment at -/// the end of a uniform block is included in UNIFORM_BLOCK_DATA_SIZE or not. -/// Not following that rule will cause problems on some AMD drivers. -struct alignas(16) MaxwellUniformData { - void SetFromRegs(const Tegra::Engines::Maxwell3D& maxwell); - - GLfloat y_direction; -}; -static_assert(sizeof(MaxwellUniformData) == 16, "MaxwellUniformData structure size is incorrect"); -static_assert(sizeof(MaxwellUniformData) < 16384, - "MaxwellUniformData structure must be less than 16kb as per the OpenGL spec"); - class ProgramManager { -public: - explicit ProgramManager(const Device& device); - ~ProgramManager(); + static constexpr size_t NUM_STAGES = 5; - /// Binds a compute program - void BindCompute(GLuint program); - - /// Updates bound programs. - void BindGraphicsPipeline(); - - /// Binds an OpenGL pipeline object unsynchronized with the guest state. - void BindHostPipeline(GLuint pipeline); - - /// Rewinds BindHostPipeline state changes. - void RestoreGuestPipeline(); - - /// Binds an OpenGL GLSL program object unsynchronized with the guest state. - void BindHostCompute(GLuint program); - - /// Rewinds BindHostCompute state changes. - void RestoreGuestCompute(); - - void UseVertexShader(GLuint program); - void UseGeometryShader(GLuint program); - void UseFragmentShader(GLuint program); - -private: - struct PipelineState { - GLuint vertex = 0; - GLuint geometry = 0; - GLuint fragment = 0; + static constexpr std::array ASSEMBLY_PROGRAM_ENUMS{ + GL_VERTEX_PROGRAM_NV, GL_TESS_CONTROL_PROGRAM_NV, GL_TESS_EVALUATION_PROGRAM_NV, + GL_GEOMETRY_PROGRAM_NV, GL_FRAGMENT_PROGRAM_NV, }; - /// Update GLSL programs. - void UpdateSourcePrograms(); +public: + explicit ProgramManager(const Device& device) { + if (device.UseAssemblyShaders()) { + glEnable(GL_COMPUTE_PROGRAM_NV); + } + } - OGLPipeline graphics_pipeline; + void BindProgram(GLuint program) { + if (current_source_program == program) { + return; + } + current_source_program = program; + glUseProgram(program); + } - PipelineState current_state; - PipelineState old_state; + void BindComputeAssemblyProgram(GLuint program) { + if (current_compute_assembly_program != program) { + current_compute_assembly_program = program; + glBindProgramARB(GL_COMPUTE_PROGRAM_NV, program); + } + if (current_source_program != 0) { + current_source_program = 0; + glUseProgram(0); + } + } - bool use_assembly_programs = false; + void BindAssemblyPrograms(std::span programs, + u32 stage_mask) { + const u32 changed_mask = current_assembly_mask ^ stage_mask; + current_assembly_mask = stage_mask; - bool is_graphics_bound = true; + if (changed_mask != 0) { + for (size_t stage = 0; stage < NUM_STAGES; ++stage) { + if (((changed_mask >> stage) & 1) != 0) { + if (((stage_mask >> stage) & 1) != 0) { + glEnable(ASSEMBLY_PROGRAM_ENUMS[stage]); + } else { + glDisable(ASSEMBLY_PROGRAM_ENUMS[stage]); + } + } + } + } + for (size_t stage = 0; stage < NUM_STAGES; ++stage) { + if (current_assembly_programs[stage] != programs[stage].handle) { + current_assembly_programs[stage] = programs[stage].handle; + glBindProgramARB(ASSEMBLY_PROGRAM_ENUMS[stage], programs[stage].handle); + } + } + if (current_source_program != 0) { + current_source_program = 0; + glUseProgram(0); + } + } - bool vertex_enabled = false; - bool geometry_enabled = false; - bool fragment_enabled = false; + void RestoreGuestCompute() {} + +private: + GLuint current_source_program = 0; + + u32 current_assembly_mask = 0; + std::array current_assembly_programs{}; + GLuint current_compute_assembly_program = 0; }; } // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_shader_util.cpp b/src/video_core/renderer_opengl/gl_shader_util.cpp index 4bf0d6090..5109985f1 100755 --- a/src/video_core/renderer_opengl/gl_shader_util.cpp +++ b/src/video_core/renderer_opengl/gl_shader_util.cpp @@ -5,57 +5,103 @@ #include #include #include + #include "common/assert.h" #include "common/logging/log.h" +#include "common/settings.h" #include "video_core/renderer_opengl/gl_shader_util.h" -namespace OpenGL::GLShader { +namespace OpenGL { -namespace { - -std::string_view StageDebugName(GLenum type) { - switch (type) { - case GL_VERTEX_SHADER: - return "vertex"; - case GL_GEOMETRY_SHADER: - return "geometry"; - case GL_FRAGMENT_SHADER: - return "fragment"; - case GL_COMPUTE_SHADER: - return "compute"; +static void LogShader(GLuint shader, std::string_view code = {}) { + GLint shader_status{}; + glGetShaderiv(shader, GL_COMPILE_STATUS, &shader_status); + if (shader_status == GL_FALSE) { + LOG_ERROR(Render_OpenGL, "Failed to build shader"); + } + GLint log_length{}; + glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &log_length); + if (log_length == 0) { + return; + } + std::string log(log_length, 0); + glGetShaderInfoLog(shader, log_length, nullptr, log.data()); + if (shader_status == GL_FALSE) { + LOG_ERROR(Render_OpenGL, "{}", log); + if (!code.empty()) { + LOG_INFO(Render_OpenGL, "\n{}", code); + } + } else { + LOG_WARNING(Render_OpenGL, "{}", log); } - UNIMPLEMENTED(); - return "unknown"; } -} // Anonymous namespace +void AttachShader(GLenum stage, GLuint program, std::string_view code) { + OGLShader shader; + shader.handle = glCreateShader(stage); -GLuint LoadShader(std::string_view source, GLenum type) { - const std::string_view debug_type = StageDebugName(type); - const GLuint shader_id = glCreateShader(type); + const GLint length = static_cast(code.size()); + const GLchar* const code_ptr = code.data(); + glShaderSource(shader.handle, 1, &code_ptr, &length); + glCompileShader(shader.handle); + glAttachShader(program, shader.handle); + if (Settings::values.renderer_debug) { + LogShader(shader.handle, code); + } +} - const GLchar* source_string = source.data(); - const GLint source_length = static_cast(source.size()); +void AttachShader(GLenum stage, GLuint program, std::span code) { + OGLShader shader; + shader.handle = glCreateShader(stage); - glShaderSource(shader_id, 1, &source_string, &source_length); - LOG_DEBUG(Render_OpenGL, "Compiling {} shader...", debug_type); - glCompileShader(shader_id); + glShaderBinary(1, &shader.handle, GL_SHADER_BINARY_FORMAT_SPIR_V_ARB, code.data(), + static_cast(code.size_bytes())); + glSpecializeShader(shader.handle, "main", 0, nullptr, nullptr); + glAttachShader(program, shader.handle); + if (Settings::values.renderer_debug) { + LogShader(shader.handle); + } +} - GLint result = GL_FALSE; - GLint info_log_length; - glGetShaderiv(shader_id, GL_COMPILE_STATUS, &result); - glGetShaderiv(shader_id, GL_INFO_LOG_LENGTH, &info_log_length); +void LinkProgram(GLuint program) { + glLinkProgram(program); + if (!Settings::values.renderer_debug) { + return; + } + GLint link_status{}; + glGetProgramiv(program, GL_LINK_STATUS, &link_status); - if (info_log_length > 1) { - std::string shader_error(info_log_length, ' '); - glGetShaderInfoLog(shader_id, info_log_length, nullptr, &shader_error[0]); - if (result == GL_TRUE) { - LOG_DEBUG(Render_OpenGL, "{}", shader_error); - } else { - LOG_ERROR(Render_OpenGL, "Error compiling {} shader:\n{}", debug_type, shader_error); + GLint log_length{}; + glGetProgramiv(program, GL_INFO_LOG_LENGTH, &log_length); + if (log_length == 0) { + return; + } + std::string log(log_length, 0); + glGetProgramInfoLog(program, log_length, nullptr, log.data()); + if (link_status == GL_FALSE) { + LOG_ERROR(Render_OpenGL, "{}", log); + } else { + LOG_WARNING(Render_OpenGL, "{}", log); + } +} + +OGLAssemblyProgram CompileProgram(std::string_view code, GLenum target) { + OGLAssemblyProgram program; + glGenProgramsARB(1, &program.handle); + glNamedProgramStringEXT(program.handle, target, GL_PROGRAM_FORMAT_ASCII_ARB, + static_cast(code.size()), code.data()); + if (Settings::values.renderer_debug) { + const auto err = reinterpret_cast(glGetString(GL_PROGRAM_ERROR_STRING_NV)); + if (err && *err) { + if (std::strstr(err, "error")) { + LOG_CRITICAL(Render_OpenGL, "\n{}", err); + LOG_INFO(Render_OpenGL, "\n{}", code); + } else { + LOG_WARNING(Render_OpenGL, "\n{}", err); + } } } - return shader_id; + return program; } -} // namespace OpenGL::GLShader +} // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_shader_util.h b/src/video_core/renderer_opengl/gl_shader_util.h index 1b770532e..ff5aa024f 100755 --- a/src/video_core/renderer_opengl/gl_shader_util.h +++ b/src/video_core/renderer_opengl/gl_shader_util.h @@ -4,92 +4,25 @@ #pragma once +#include #include +#include #include + #include + #include "common/assert.h" #include "common/logging/log.h" +#include "video_core/renderer_opengl/gl_resource_manager.h" -namespace OpenGL::GLShader { +namespace OpenGL { -/** - * Utility function to log the source code of a list of shaders. - * @param shaders The OpenGL shaders whose source we will print. - */ -template -void LogShaderSource(T... shaders) { - auto shader_list = {shaders...}; +void AttachShader(GLenum stage, GLuint program, std::string_view code); - for (const auto& shader : shader_list) { - if (shader == 0) - continue; +void AttachShader(GLenum stage, GLuint program, std::span code); - GLint source_length; - glGetShaderiv(shader, GL_SHADER_SOURCE_LENGTH, &source_length); +void LinkProgram(GLuint program); - std::string source(source_length, ' '); - glGetShaderSource(shader, source_length, nullptr, &source[0]); - LOG_INFO(Render_OpenGL, "Shader source {}", source); - } -} +OGLAssemblyProgram CompileProgram(std::string_view code, GLenum target); -/** - * Utility function to create and compile an OpenGL GLSL shader - * @param source String of the GLSL shader program - * @param type Type of the shader (GL_VERTEX_SHADER, GL_GEOMETRY_SHADER or GL_FRAGMENT_SHADER) - */ -GLuint LoadShader(std::string_view source, GLenum type); - -/** - * Utility function to create and compile an OpenGL GLSL shader program (vertex + fragment shader) - * @param separable_program whether to create a separable program - * @param shaders ID of shaders to attach to the program - * @returns Handle of the newly created OpenGL program object - */ -template -GLuint LoadProgram(bool separable_program, bool hint_retrievable, T... shaders) { - // Link the program - LOG_DEBUG(Render_OpenGL, "Linking program..."); - - GLuint program_id = glCreateProgram(); - - ((shaders == 0 ? (void)0 : glAttachShader(program_id, shaders)), ...); - - if (separable_program) { - glProgramParameteri(program_id, GL_PROGRAM_SEPARABLE, GL_TRUE); - } - if (hint_retrievable) { - glProgramParameteri(program_id, GL_PROGRAM_BINARY_RETRIEVABLE_HINT, GL_TRUE); - } - - glLinkProgram(program_id); - - // Check the program - GLint result = GL_FALSE; - GLint info_log_length; - glGetProgramiv(program_id, GL_LINK_STATUS, &result); - glGetProgramiv(program_id, GL_INFO_LOG_LENGTH, &info_log_length); - - if (info_log_length > 1) { - std::string program_error(info_log_length, ' '); - glGetProgramInfoLog(program_id, info_log_length, nullptr, &program_error[0]); - if (result == GL_TRUE) { - LOG_DEBUG(Render_OpenGL, "{}", program_error); - } else { - LOG_ERROR(Render_OpenGL, "Error linking shader:\n{}", program_error); - } - } - - if (result == GL_FALSE) { - // There was a problem linking the shader, print the source for debugging purposes. - LogShaderSource(shaders...); - } - - ASSERT_MSG(result == GL_TRUE, "Shader not linked"); - - ((shaders == 0 ? (void)0 : glDetachShader(program_id, shaders)), ...); - - return program_id; -} - -} // namespace OpenGL::GLShader +} // namespace OpenGL diff --git a/src/video_core/renderer_opengl/gl_state_tracker.cpp b/src/video_core/renderer_opengl/gl_state_tracker.cpp index dbdf5230f..586da84e3 100755 --- a/src/video_core/renderer_opengl/gl_state_tracker.cpp +++ b/src/video_core/renderer_opengl/gl_state_tracker.cpp @@ -83,11 +83,6 @@ void SetupDirtyScissors(Tables& tables) { FillBlock(tables[1], OFF(scissor_test), NUM(scissor_test), Scissors); } -void SetupDirtyShaders(Tables& tables) { - FillBlock(tables[0], OFF(shader_config[0]), NUM(shader_config[0]) * Regs::MaxShaderProgram, - Shaders); -} - void SetupDirtyPolygonModes(Tables& tables) { tables[0][OFF(polygon_mode_front)] = PolygonModeFront; tables[0][OFF(polygon_mode_back)] = PolygonModeBack; @@ -217,7 +212,6 @@ StateTracker::StateTracker(Tegra::GPU& gpu) : flags{gpu.Maxwell3D().dirty.flags} SetupDirtyScissors(tables); SetupDirtyVertexInstances(tables); SetupDirtyVertexFormat(tables); - SetupDirtyShaders(tables); SetupDirtyPolygonModes(tables); SetupDirtyDepthTest(tables); SetupDirtyStencilTest(tables); diff --git a/src/video_core/renderer_opengl/gl_state_tracker.h b/src/video_core/renderer_opengl/gl_state_tracker.h index 94c905116..5864c7c07 100755 --- a/src/video_core/renderer_opengl/gl_state_tracker.h +++ b/src/video_core/renderer_opengl/gl_state_tracker.h @@ -52,7 +52,6 @@ enum : u8 { BlendState0, BlendState7 = BlendState0 + 7, - Shaders, ClipDistances, PolygonModes, diff --git a/src/video_core/renderer_opengl/gl_texture_cache.cpp b/src/video_core/renderer_opengl/gl_texture_cache.cpp index ff0f03e99..c373c9cb4 100755 --- a/src/video_core/renderer_opengl/gl_texture_cache.cpp +++ b/src/video_core/renderer_opengl/gl_texture_cache.cpp @@ -24,9 +24,7 @@ #include "video_core/textures/decoders.h" namespace OpenGL { - namespace { - using Tegra::Texture::SwizzleSource; using Tegra::Texture::TextureMipmapFilter; using Tegra::Texture::TextureType; @@ -59,107 +57,6 @@ struct CopyRegion { GLsizei depth; }; -struct FormatTuple { - GLenum internal_format; - GLenum format = GL_NONE; - GLenum type = GL_NONE; -}; - -constexpr std::array FORMAT_TABLE = {{ - {GL_RGBA8, GL_RGBA, GL_UNSIGNED_INT_8_8_8_8_REV}, // A8B8G8R8_UNORM - {GL_RGBA8_SNORM, GL_RGBA, GL_BYTE}, // A8B8G8R8_SNORM - {GL_RGBA8I, GL_RGBA_INTEGER, GL_BYTE}, // A8B8G8R8_SINT - {GL_RGBA8UI, GL_RGBA_INTEGER, GL_UNSIGNED_BYTE}, // A8B8G8R8_UINT - {GL_RGB565, GL_RGB, GL_UNSIGNED_SHORT_5_6_5}, // R5G6B5_UNORM - {GL_RGB565, GL_RGB, GL_UNSIGNED_SHORT_5_6_5_REV}, // B5G6R5_UNORM - {GL_RGB5_A1, GL_BGRA, GL_UNSIGNED_SHORT_1_5_5_5_REV}, // A1R5G5B5_UNORM - {GL_RGB10_A2, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV}, // A2B10G10R10_UNORM - {GL_RGB10_A2UI, GL_RGBA_INTEGER, GL_UNSIGNED_INT_2_10_10_10_REV}, // A2B10G10R10_UINT - {GL_RGB5_A1, GL_RGBA, GL_UNSIGNED_SHORT_1_5_5_5_REV}, // A1B5G5R5_UNORM - {GL_R8, GL_RED, GL_UNSIGNED_BYTE}, // R8_UNORM - {GL_R8_SNORM, GL_RED, GL_BYTE}, // R8_SNORM - {GL_R8I, GL_RED_INTEGER, GL_BYTE}, // R8_SINT - {GL_R8UI, GL_RED_INTEGER, GL_UNSIGNED_BYTE}, // R8_UINT - {GL_RGBA16F, GL_RGBA, GL_HALF_FLOAT}, // R16G16B16A16_FLOAT - {GL_RGBA16, GL_RGBA, GL_UNSIGNED_SHORT}, // R16G16B16A16_UNORM - {GL_RGBA16_SNORM, GL_RGBA, GL_SHORT}, // R16G16B16A16_SNORM - {GL_RGBA16I, GL_RGBA_INTEGER, GL_SHORT}, // R16G16B16A16_SINT - {GL_RGBA16UI, GL_RGBA_INTEGER, GL_UNSIGNED_SHORT}, // R16G16B16A16_UINT - {GL_R11F_G11F_B10F, GL_RGB, GL_UNSIGNED_INT_10F_11F_11F_REV}, // B10G11R11_FLOAT - {GL_RGBA32UI, GL_RGBA_INTEGER, GL_UNSIGNED_INT}, // R32G32B32A32_UINT - {GL_COMPRESSED_RGBA_S3TC_DXT1_EXT}, // BC1_RGBA_UNORM - {GL_COMPRESSED_RGBA_S3TC_DXT3_EXT}, // BC2_UNORM - {GL_COMPRESSED_RGBA_S3TC_DXT5_EXT}, // BC3_UNORM - {GL_COMPRESSED_RED_RGTC1}, // BC4_UNORM - {GL_COMPRESSED_SIGNED_RED_RGTC1}, // BC4_SNORM - {GL_COMPRESSED_RG_RGTC2}, // BC5_UNORM - {GL_COMPRESSED_SIGNED_RG_RGTC2}, // BC5_SNORM - {GL_COMPRESSED_RGBA_BPTC_UNORM}, // BC7_UNORM - {GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT}, // BC6H_UFLOAT - {GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT}, // BC6H_SFLOAT - {GL_COMPRESSED_RGBA_ASTC_4x4_KHR}, // ASTC_2D_4X4_UNORM - {GL_RGBA8, GL_RGBA, GL_UNSIGNED_BYTE}, // B8G8R8A8_UNORM - {GL_RGBA32F, GL_RGBA, GL_FLOAT}, // R32G32B32A32_FLOAT - {GL_RGBA32I, GL_RGBA_INTEGER, GL_INT}, // R32G32B32A32_SINT - {GL_RG32F, GL_RG, GL_FLOAT}, // R32G32_FLOAT - {GL_RG32I, GL_RG_INTEGER, GL_INT}, // R32G32_SINT - {GL_R32F, GL_RED, GL_FLOAT}, // R32_FLOAT - {GL_R16F, GL_RED, GL_HALF_FLOAT}, // R16_FLOAT - {GL_R16, GL_RED, GL_UNSIGNED_SHORT}, // R16_UNORM - {GL_R16_SNORM, GL_RED, GL_SHORT}, // R16_SNORM - {GL_R16UI, GL_RED_INTEGER, GL_UNSIGNED_SHORT}, // R16_UINT - {GL_R16I, GL_RED_INTEGER, GL_SHORT}, // R16_SINT - {GL_RG16, GL_RG, GL_UNSIGNED_SHORT}, // R16G16_UNORM - {GL_RG16F, GL_RG, GL_HALF_FLOAT}, // R16G16_FLOAT - {GL_RG16UI, GL_RG_INTEGER, GL_UNSIGNED_SHORT}, // R16G16_UINT - {GL_RG16I, GL_RG_INTEGER, GL_SHORT}, // R16G16_SINT - {GL_RG16_SNORM, GL_RG, GL_SHORT}, // R16G16_SNORM - {GL_RGB32F, GL_RGB, GL_FLOAT}, // R32G32B32_FLOAT - {GL_SRGB8_ALPHA8, GL_RGBA, GL_UNSIGNED_INT_8_8_8_8_REV}, // A8B8G8R8_SRGB - {GL_RG8, GL_RG, GL_UNSIGNED_BYTE}, // R8G8_UNORM - {GL_RG8_SNORM, GL_RG, GL_BYTE}, // R8G8_SNORM - {GL_RG8I, GL_RG_INTEGER, GL_BYTE}, // R8G8_SINT - {GL_RG8UI, GL_RG_INTEGER, GL_UNSIGNED_BYTE}, // R8G8_UINT - {GL_RG32UI, GL_RG_INTEGER, GL_UNSIGNED_INT}, // R32G32_UINT - {GL_RGB16F, GL_RGBA, GL_HALF_FLOAT}, // R16G16B16X16_FLOAT - {GL_R32UI, GL_RED_INTEGER, GL_UNSIGNED_INT}, // R32_UINT - {GL_R32I, GL_RED_INTEGER, GL_INT}, // R32_SINT - {GL_COMPRESSED_RGBA_ASTC_8x8_KHR}, // ASTC_2D_8X8_UNORM - {GL_COMPRESSED_RGBA_ASTC_8x5_KHR}, // ASTC_2D_8X5_UNORM - {GL_COMPRESSED_RGBA_ASTC_5x4_KHR}, // ASTC_2D_5X4_UNORM - {GL_SRGB8_ALPHA8, GL_RGBA, GL_UNSIGNED_BYTE}, // B8G8R8A8_SRGB - {GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT}, // BC1_RGBA_SRGB - {GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT}, // BC2_SRGB - {GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT}, // BC3_SRGB - {GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM}, // BC7_SRGB - {GL_RGBA4, GL_RGBA, GL_UNSIGNED_SHORT_4_4_4_4_REV}, // A4B4G4R4_UNORM - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR}, // ASTC_2D_4X4_SRGB - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR}, // ASTC_2D_8X8_SRGB - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR}, // ASTC_2D_8X5_SRGB - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR}, // ASTC_2D_5X4_SRGB - {GL_COMPRESSED_RGBA_ASTC_5x5_KHR}, // ASTC_2D_5X5_UNORM - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR}, // ASTC_2D_5X5_SRGB - {GL_COMPRESSED_RGBA_ASTC_10x8_KHR}, // ASTC_2D_10X8_UNORM - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR}, // ASTC_2D_10X8_SRGB - {GL_COMPRESSED_RGBA_ASTC_6x6_KHR}, // ASTC_2D_6X6_UNORM - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR}, // ASTC_2D_6X6_SRGB - {GL_COMPRESSED_RGBA_ASTC_10x10_KHR}, // ASTC_2D_10X10_UNORM - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR}, // ASTC_2D_10X10_SRGB - {GL_COMPRESSED_RGBA_ASTC_12x12_KHR}, // ASTC_2D_12X12_UNORM - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR}, // ASTC_2D_12X12_SRGB - {GL_COMPRESSED_RGBA_ASTC_8x6_KHR}, // ASTC_2D_8X6_UNORM - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR}, // ASTC_2D_8X6_SRGB - {GL_COMPRESSED_RGBA_ASTC_6x5_KHR}, // ASTC_2D_6X5_UNORM - {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR}, // ASTC_2D_6X5_SRGB - {GL_RGB9_E5, GL_RGB, GL_UNSIGNED_INT_5_9_9_9_REV}, // E5B9G9R9_FLOAT - {GL_DEPTH_COMPONENT32F, GL_DEPTH_COMPONENT, GL_FLOAT}, // D32_FLOAT - {GL_DEPTH_COMPONENT16, GL_DEPTH_COMPONENT, GL_UNSIGNED_SHORT}, // D16_UNORM - {GL_DEPTH24_STENCIL8, GL_DEPTH_STENCIL, GL_UNSIGNED_INT_24_8}, // D24_UNORM_S8_UINT - {GL_DEPTH24_STENCIL8, GL_DEPTH_STENCIL, GL_UNSIGNED_INT_24_8}, // S8_UINT_D24_UNORM - {GL_DEPTH32F_STENCIL8, GL_DEPTH_STENCIL, - GL_FLOAT_32_UNSIGNED_INT_24_8_REV}, // D32_FLOAT_S8_UINT -}}; - constexpr std::array ACCELERATED_FORMATS{ GL_RGBA32F, GL_RGBA16F, GL_RG32F, GL_RG16F, GL_R11F_G11F_B10F, GL_R32F, GL_R16F, GL_RGBA32UI, GL_RGBA16UI, GL_RGB10_A2UI, GL_RGBA8UI, GL_RG32UI, @@ -170,11 +67,6 @@ constexpr std::array ACCELERATED_FORMATS{ GL_RG8_SNORM, GL_R16_SNORM, GL_R8_SNORM, }; -const FormatTuple& GetFormatTuple(PixelFormat pixel_format) { - ASSERT(static_cast(pixel_format) < FORMAT_TABLE.size()); - return FORMAT_TABLE[static_cast(pixel_format)]; -} - GLenum ImageTarget(const VideoCommon::ImageInfo& info) { switch (info.type) { case ImageType::e1D: @@ -195,26 +87,24 @@ GLenum ImageTarget(const VideoCommon::ImageInfo& info) { return GL_NONE; } -GLenum ImageTarget(ImageViewType type, int num_samples = 1) { +GLenum ImageTarget(Shader::TextureType type, int num_samples = 1) { const bool is_multisampled = num_samples > 1; switch (type) { - case ImageViewType::e1D: + case Shader::TextureType::Color1D: return GL_TEXTURE_1D; - case ImageViewType::e2D: + case Shader::TextureType::Color2D: return is_multisampled ? GL_TEXTURE_2D_MULTISAMPLE : GL_TEXTURE_2D; - case ImageViewType::Cube: + case Shader::TextureType::ColorCube: return GL_TEXTURE_CUBE_MAP; - case ImageViewType::e3D: + case Shader::TextureType::Color3D: return GL_TEXTURE_3D; - case ImageViewType::e1DArray: + case Shader::TextureType::ColorArray1D: return GL_TEXTURE_1D_ARRAY; - case ImageViewType::e2DArray: + case Shader::TextureType::ColorArray2D: return is_multisampled ? GL_TEXTURE_2D_MULTISAMPLE_ARRAY : GL_TEXTURE_2D_ARRAY; - case ImageViewType::CubeArray: + case Shader::TextureType::ColorArrayCube: return GL_TEXTURE_CUBE_MAP_ARRAY; - case ImageViewType::Rect: - return GL_TEXTURE_RECTANGLE; - case ImageViewType::Buffer: + case Shader::TextureType::Buffer: return GL_TEXTURE_BUFFER; } UNREACHABLE_MSG("Invalid image view type={}", type); @@ -322,7 +212,7 @@ void ApplySwizzle(GLuint handle, PixelFormat format, std::arrayflags & VideoCommon::ImageViewFlagBits::Slice)) { - const GLuint texture = image_view->DefaultHandle(); - glNamedFramebufferTexture(fbo, attachment, texture, 0); + glNamedFramebufferTexture(fbo, attachment, image_view->DefaultHandle(), 0); return; } - const GLuint texture = image_view->Handle(ImageViewType::e3D); + const GLuint texture = image_view->Handle(Shader::TextureType::Color3D); if (image_view->range.extent.layers > 1) { // TODO: OpenGL doesn't support rendering to a fixed number of slices glNamedFramebufferTexture(fbo, attachment, texture, 0); @@ -439,6 +328,28 @@ void AttachTexture(GLuint fbo, GLenum attachment, const ImageView* image_view) { } } +[[nodiscard]] GLenum ShaderFormat(Shader::ImageFormat format) { + switch (format) { + case Shader::ImageFormat::Typeless: + break; + case Shader::ImageFormat::R8_SINT: + return GL_R8I; + case Shader::ImageFormat::R8_UINT: + return GL_R8UI; + case Shader::ImageFormat::R16_UINT: + return GL_R16UI; + case Shader::ImageFormat::R16_SINT: + return GL_R16I; + case Shader::ImageFormat::R32_UINT: + return GL_R32UI; + case Shader::ImageFormat::R32G32_UINT: + return GL_RG32UI; + case Shader::ImageFormat::R32G32B32A32_UINT: + return GL_RGBA32UI; + } + UNREACHABLE_MSG("Invalid image format={}", format); + return GL_R32UI; +} } // Anonymous namespace ImageBufferMap::~ImageBufferMap() { @@ -453,7 +364,7 @@ TextureCacheRuntime::TextureCacheRuntime(const Device& device_, ProgramManager& static constexpr std::array TARGETS{GL_TEXTURE_1D_ARRAY, GL_TEXTURE_2D_ARRAY, GL_TEXTURE_3D}; for (size_t i = 0; i < TARGETS.size(); ++i) { const GLenum target = TARGETS[i]; - for (const FormatTuple& tuple : FORMAT_TABLE) { + for (const MaxwellToGL::FormatTuple& tuple : MaxwellToGL::FORMAT_TABLE) { const GLenum format = tuple.internal_format; GLint compat_class; GLint compat_type; @@ -475,11 +386,9 @@ TextureCacheRuntime::TextureCacheRuntime(const Device& device_, ProgramManager& null_image_1d_array.Create(GL_TEXTURE_1D_ARRAY); null_image_cube_array.Create(GL_TEXTURE_CUBE_MAP_ARRAY); null_image_3d.Create(GL_TEXTURE_3D); - null_image_rect.Create(GL_TEXTURE_RECTANGLE); glTextureStorage2D(null_image_1d_array.handle, 1, GL_R8, 1, 1); glTextureStorage3D(null_image_cube_array.handle, 1, GL_R8, 1, 1, 6); glTextureStorage3D(null_image_3d.handle, 1, GL_R8, 1, 1, 1); - glTextureStorage2D(null_image_rect.handle, 1, GL_R8, 1, 1); std::array new_handles; glGenTextures(static_cast(new_handles.size()), new_handles.data()); @@ -496,29 +405,28 @@ TextureCacheRuntime::TextureCacheRuntime(const Device& device_, ProgramManager& glTextureView(null_image_view_cube.handle, GL_TEXTURE_CUBE_MAP, null_image_cube_array.handle, GL_R8, 0, 1, 0, 6); const std::array texture_handles{ - null_image_1d_array.handle, null_image_cube_array.handle, null_image_3d.handle, - null_image_rect.handle, null_image_view_1d.handle, null_image_view_2d.handle, - null_image_view_2d_array.handle, null_image_view_cube.handle, + null_image_1d_array.handle, null_image_cube_array.handle, null_image_3d.handle, + null_image_view_1d.handle, null_image_view_2d.handle, null_image_view_2d_array.handle, + null_image_view_cube.handle, }; for (const GLuint handle : texture_handles) { static constexpr std::array NULL_SWIZZLE{GL_ZERO, GL_ZERO, GL_ZERO, GL_ZERO}; glTextureParameteriv(handle, GL_TEXTURE_SWIZZLE_RGBA, NULL_SWIZZLE.data()); } - const auto set_view = [this](ImageViewType type, GLuint handle) { + const auto set_view = [this](Shader::TextureType type, GLuint handle) { if (device.HasDebuggingToolAttached()) { const std::string name = fmt::format("NullImage {}", type); glObjectLabel(GL_TEXTURE, handle, static_cast(name.size()), name.data()); } null_image_views[static_cast(type)] = handle; }; - set_view(ImageViewType::e1D, null_image_view_1d.handle); - set_view(ImageViewType::e2D, null_image_view_2d.handle); - set_view(ImageViewType::Cube, null_image_view_cube.handle); - set_view(ImageViewType::e3D, null_image_3d.handle); - set_view(ImageViewType::e1DArray, null_image_1d_array.handle); - set_view(ImageViewType::e2DArray, null_image_view_2d_array.handle); - set_view(ImageViewType::CubeArray, null_image_cube_array.handle); - set_view(ImageViewType::Rect, null_image_rect.handle); + set_view(Shader::TextureType::Color1D, null_image_view_1d.handle); + set_view(Shader::TextureType::Color2D, null_image_view_2d.handle); + set_view(Shader::TextureType::ColorCube, null_image_view_cube.handle); + set_view(Shader::TextureType::Color3D, null_image_3d.handle); + set_view(Shader::TextureType::ColorArray1D, null_image_1d_array.handle); + set_view(Shader::TextureType::ColorArray2D, null_image_view_2d_array.handle); + set_view(Shader::TextureType::ColorArrayCube, null_image_cube_array.handle); } TextureCacheRuntime::~TextureCacheRuntime() = default; @@ -710,7 +618,7 @@ Image::Image(TextureCacheRuntime& runtime, const VideoCommon::ImageInfo& info_, gl_format = GL_RGBA; gl_type = GL_UNSIGNED_INT_8_8_8_8_REV; } else { - const auto& tuple = GetFormatTuple(info.format); + const auto& tuple = MaxwellToGL::GetFormatTuple(info.format); gl_internal_format = tuple.internal_format; gl_format = tuple.format; gl_type = tuple.type; @@ -750,8 +658,7 @@ Image::Image(TextureCacheRuntime& runtime, const VideoCommon::ImageInfo& info_, glTextureStorage3D(handle, num_levels, gl_internal_format, width, height, depth); break; case GL_TEXTURE_BUFFER: - buffer.Create(); - glNamedBufferStorage(buffer.handle, guest_size_bytes, nullptr, 0); + UNREACHABLE(); break; default: UNREACHABLE_MSG("Invalid target=0x{:x}", target); @@ -789,14 +696,6 @@ void Image::UploadMemory(const ImageBufferMap& map, } } -void Image::UploadMemory(const ImageBufferMap& map, - std::span copies) { - for (const VideoCommon::BufferCopy& copy : copies) { - glCopyNamedBufferSubData(map.buffer, buffer.handle, copy.src_offset + map.offset, - copy.dst_offset, copy.size); - } -} - void Image::DownloadMemory(ImageBufferMap& map, std::span copies) { glMemoryBarrier(GL_PIXEL_BUFFER_BARRIER_BIT); // TODO: Move this to its own API @@ -958,23 +857,30 @@ ImageView::ImageView(TextureCacheRuntime& runtime, const VideoCommon::ImageViewI if (True(image.flags & ImageFlagBits::Converted)) { internal_format = IsPixelFormatSRGB(info.format) ? GL_SRGB8_ALPHA8 : GL_RGBA8; } else { - internal_format = GetFormatTuple(format).internal_format; + internal_format = MaxwellToGL::GetFormatTuple(format).internal_format; + } + full_range = info.range; + flat_range = info.range; + set_object_label = device.HasDebuggingToolAttached(); + is_render_target = info.IsRenderTarget(); + original_texture = image.texture.handle; + num_samples = image.info.num_samples; + if (!is_render_target) { + swizzle[0] = info.x_source; + swizzle[1] = info.y_source; + swizzle[2] = info.z_source; + swizzle[3] = info.w_source; } - VideoCommon::SubresourceRange flatten_range = info.range; - std::array handles; - stored_views.reserve(2); - switch (info.type) { case ImageViewType::e1DArray: - flatten_range.extent.layers = 1; + flat_range.extent.layers = 1; [[fallthrough]]; case ImageViewType::e1D: - glGenTextures(2, handles.data()); - SetupView(device, image, ImageViewType::e1D, handles[0], info, flatten_range); - SetupView(device, image, ImageViewType::e1DArray, handles[1], info, info.range); + SetupView(Shader::TextureType::Color1D); + SetupView(Shader::TextureType::ColorArray1D); break; case ImageViewType::e2DArray: - flatten_range.extent.layers = 1; + flat_range.extent.layers = 1; [[fallthrough]]; case ImageViewType::e2D: if (True(flags & VideoCommon::ImageViewFlagBits::Slice)) { @@ -984,63 +890,126 @@ ImageView::ImageView(TextureCacheRuntime& runtime, const VideoCommon::ImageViewI .base = {.level = info.range.base.level, .layer = 0}, .extent = {.levels = 1, .layers = 1}, }; - glGenTextures(1, handles.data()); - SetupView(device, image, ImageViewType::e3D, handles[0], info, slice_range); - break; + full_range = slice_range; + + SetupView(Shader::TextureType::Color3D); + } else { + SetupView(Shader::TextureType::Color2D); + SetupView(Shader::TextureType::ColorArray2D); } - glGenTextures(2, handles.data()); - SetupView(device, image, ImageViewType::e2D, handles[0], info, flatten_range); - SetupView(device, image, ImageViewType::e2DArray, handles[1], info, info.range); break; case ImageViewType::e3D: - glGenTextures(1, handles.data()); - SetupView(device, image, ImageViewType::e3D, handles[0], info, info.range); + SetupView(Shader::TextureType::Color3D); break; case ImageViewType::CubeArray: - flatten_range.extent.layers = 6; + flat_range.extent.layers = 6; [[fallthrough]]; case ImageViewType::Cube: - glGenTextures(2, handles.data()); - SetupView(device, image, ImageViewType::Cube, handles[0], info, flatten_range); - SetupView(device, image, ImageViewType::CubeArray, handles[1], info, info.range); + SetupView(Shader::TextureType::ColorCube); + SetupView(Shader::TextureType::ColorArrayCube); break; case ImageViewType::Rect: - glGenTextures(1, handles.data()); - SetupView(device, image, ImageViewType::Rect, handles[0], info, info.range); + UNIMPLEMENTED(); break; case ImageViewType::Buffer: - glCreateTextures(GL_TEXTURE_BUFFER, 1, handles.data()); - SetupView(device, image, ImageViewType::Buffer, handles[0], info, info.range); + UNREACHABLE(); + break; + } + switch (info.type) { + case ImageViewType::e1D: + default_handle = Handle(Shader::TextureType::Color1D); + break; + case ImageViewType::e1DArray: + default_handle = Handle(Shader::TextureType::ColorArray1D); + break; + case ImageViewType::e2D: + default_handle = Handle(Shader::TextureType::Color2D); + break; + case ImageViewType::e2DArray: + default_handle = Handle(Shader::TextureType::ColorArray2D); + break; + case ImageViewType::e3D: + default_handle = Handle(Shader::TextureType::Color3D); + break; + case ImageViewType::Cube: + default_handle = Handle(Shader::TextureType::ColorCube); + break; + case ImageViewType::CubeArray: + default_handle = Handle(Shader::TextureType::ColorArrayCube); + break; + default: break; } - default_handle = Handle(info.type); } +ImageView::ImageView(TextureCacheRuntime&, const VideoCommon::ImageInfo& info, + const VideoCommon::ImageViewInfo& view_info, GPUVAddr gpu_addr_) + : VideoCommon::ImageViewBase{info, view_info}, gpu_addr{gpu_addr_}, + buffer_size{VideoCommon::CalculateGuestSizeInBytes(info)} {} + +ImageView::ImageView(TextureCacheRuntime&, const VideoCommon::ImageInfo& info, + const VideoCommon::ImageViewInfo& view_info) + : VideoCommon::ImageViewBase{info, view_info} {} + ImageView::ImageView(TextureCacheRuntime& runtime, const VideoCommon::NullImageParams& params) : VideoCommon::ImageViewBase{params}, views{runtime.null_image_views} {} -void ImageView::SetupView(const Device& device, Image& image, ImageViewType view_type, - GLuint handle, const VideoCommon::ImageViewInfo& info, - VideoCommon::SubresourceRange view_range) { - if (info.type == ImageViewType::Buffer) { - // TODO: Take offset from buffer cache - glTextureBufferRange(handle, internal_format, image.buffer.handle, 0, - image.guest_size_bytes); - } else { - const GLuint parent = image.texture.handle; - const GLenum target = ImageTarget(view_type, image.info.num_samples); - glTextureView(handle, target, parent, internal_format, view_range.base.level, - view_range.extent.levels, view_range.base.layer, view_range.extent.layers); - if (!info.IsRenderTarget()) { - ApplySwizzle(handle, format, info.Swizzle()); - } +GLuint ImageView::StorageView(Shader::TextureType texture_type, Shader::ImageFormat image_format) { + if (image_format == Shader::ImageFormat::Typeless) { + return Handle(texture_type); } - if (device.HasDebuggingToolAttached()) { - const std::string name = VideoCommon::Name(*this, view_type); - glObjectLabel(GL_TEXTURE, handle, static_cast(name.size()), name.data()); + const bool is_signed{image_format == Shader::ImageFormat::R8_SINT || + image_format == Shader::ImageFormat::R16_SINT}; + if (!storage_views) { + storage_views = std::make_unique(); } - stored_views.emplace_back().handle = handle; - views[static_cast(view_type)] = handle; + auto& type_views{is_signed ? storage_views->signeds : storage_views->unsigneds}; + GLuint& view{type_views[static_cast(texture_type)]}; + if (view == 0) { + view = MakeView(texture_type, ShaderFormat(image_format)); + } + return view; +} + +void ImageView::SetupView(Shader::TextureType view_type) { + views[static_cast(view_type)] = MakeView(view_type, internal_format); +} + +GLuint ImageView::MakeView(Shader::TextureType view_type, GLenum view_format) { + VideoCommon::SubresourceRange view_range; + switch (view_type) { + case Shader::TextureType::Color1D: + case Shader::TextureType::Color2D: + case Shader::TextureType::ColorCube: + view_range = flat_range; + break; + case Shader::TextureType::ColorArray1D: + case Shader::TextureType::ColorArray2D: + case Shader::TextureType::Color3D: + case Shader::TextureType::ColorArrayCube: + view_range = full_range; + break; + default: + UNREACHABLE(); + } + OGLTextureView& view = stored_views.emplace_back(); + view.Create(); + + const GLenum target = ImageTarget(view_type, num_samples); + glTextureView(view.handle, target, original_texture, view_format, view_range.base.level, + view_range.extent.levels, view_range.base.layer, view_range.extent.layers); + if (!is_render_target) { + std::array casted_swizzle; + std::ranges::transform(swizzle, casted_swizzle.begin(), [](u8 component_swizzle) { + return static_cast(component_swizzle); + }); + ApplySwizzle(view.handle, format, casted_swizzle); + } + if (set_object_label) { + const std::string name = VideoCommon::Name(*this); + glObjectLabel(GL_TEXTURE, view.handle, static_cast(name.size()), name.data()); + } + return view.handle; } Sampler::Sampler(TextureCacheRuntime& runtime, const TSCEntry& config) { diff --git a/src/video_core/renderer_opengl/gl_texture_cache.h b/src/video_core/renderer_opengl/gl_texture_cache.h index 25fe61566..86ef14108 100755 --- a/src/video_core/renderer_opengl/gl_texture_cache.h +++ b/src/video_core/renderer_opengl/gl_texture_cache.h @@ -9,6 +9,7 @@ #include +#include "shader_recompiler/shader_info.h" #include "video_core/renderer_opengl/gl_resource_manager.h" #include "video_core/renderer_opengl/util_shaders.h" #include "video_core/texture_cache/texture_cache.h" @@ -127,13 +128,12 @@ private: OGLTexture null_image_1d_array; OGLTexture null_image_cube_array; OGLTexture null_image_3d; - OGLTexture null_image_rect; OGLTextureView null_image_view_1d; OGLTextureView null_image_view_2d; OGLTextureView null_image_view_2d_array; OGLTextureView null_image_view_cube; - std::array null_image_views; + std::array null_image_views{}; }; class Image : public VideoCommon::ImageBase { @@ -154,8 +154,6 @@ public: void UploadMemory(const ImageBufferMap& map, std::span copies); - void UploadMemory(const ImageBufferMap& map, std::span copies); - void DownloadMemory(ImageBufferMap& map, std::span copies); GLuint StorageHandle() noexcept; @@ -170,7 +168,6 @@ private: void CopyImageToBuffer(const VideoCommon::BufferImageCopy& copy, size_t buffer_offset); OGLTexture texture; - OGLBuffer buffer; OGLTextureView store_view; GLenum gl_internal_format = GL_NONE; GLenum gl_format = GL_NONE; @@ -182,10 +179,17 @@ class ImageView : public VideoCommon::ImageViewBase { public: explicit ImageView(TextureCacheRuntime&, const VideoCommon::ImageViewInfo&, ImageId, Image&); + explicit ImageView(TextureCacheRuntime&, const VideoCommon::ImageInfo&, + const VideoCommon::ImageViewInfo&, GPUVAddr); + explicit ImageView(TextureCacheRuntime&, const VideoCommon::ImageInfo& info, + const VideoCommon::ImageViewInfo& view_info); explicit ImageView(TextureCacheRuntime&, const VideoCommon::NullImageParams&); - [[nodiscard]] GLuint Handle(ImageViewType query_type) const noexcept { - return views[static_cast(query_type)]; + [[nodiscard]] GLuint StorageView(Shader::TextureType texture_type, + Shader::ImageFormat image_format); + + [[nodiscard]] GLuint Handle(Shader::TextureType handle_type) const noexcept { + return views[static_cast(handle_type)]; } [[nodiscard]] GLuint DefaultHandle() const noexcept { @@ -196,15 +200,38 @@ public: return internal_format; } -private: - void SetupView(const Device& device, Image& image, ImageViewType view_type, GLuint handle, - const VideoCommon::ImageViewInfo& info, - VideoCommon::SubresourceRange view_range); + [[nodiscard]] GPUVAddr GpuAddr() const noexcept { + return gpu_addr; + } - std::array views{}; + [[nodiscard]] u32 BufferSize() const noexcept { + return buffer_size; + } + +private: + struct StorageViews { + std::array signeds{}; + std::array unsigneds{}; + }; + + void SetupView(Shader::TextureType view_type); + + GLuint MakeView(Shader::TextureType view_type, GLenum view_format); + + std::array views{}; std::vector stored_views; - GLuint default_handle = 0; + std::unique_ptr storage_views; GLenum internal_format = GL_NONE; + GLuint default_handle = 0; + GPUVAddr gpu_addr = 0; + u32 buffer_size = 0; + GLuint original_texture = 0; + int num_samples = 0; + VideoCommon::SubresourceRange flat_range; + VideoCommon::SubresourceRange full_range; + std::array swizzle{}; + bool set_object_label = false; + bool is_render_target = false; }; class ImageAlloc : public VideoCommon::ImageAllocBase {}; diff --git a/src/video_core/renderer_opengl/maxwell_to_gl.h b/src/video_core/renderer_opengl/maxwell_to_gl.h index f7ad8f370..672f94bfc 100755 --- a/src/video_core/renderer_opengl/maxwell_to_gl.h +++ b/src/video_core/renderer_opengl/maxwell_to_gl.h @@ -5,12 +5,120 @@ #pragma once #include + #include "video_core/engines/maxwell_3d.h" +#include "video_core/surface.h" namespace OpenGL::MaxwellToGL { using Maxwell = Tegra::Engines::Maxwell3D::Regs; +struct FormatTuple { + GLenum internal_format; + GLenum format = GL_NONE; + GLenum type = GL_NONE; +}; + +constexpr std::array FORMAT_TABLE = {{ + {GL_RGBA8, GL_RGBA, GL_UNSIGNED_INT_8_8_8_8_REV}, // A8B8G8R8_UNORM + {GL_RGBA8_SNORM, GL_RGBA, GL_BYTE}, // A8B8G8R8_SNORM + {GL_RGBA8I, GL_RGBA_INTEGER, GL_BYTE}, // A8B8G8R8_SINT + {GL_RGBA8UI, GL_RGBA_INTEGER, GL_UNSIGNED_BYTE}, // A8B8G8R8_UINT + {GL_RGB565, GL_RGB, GL_UNSIGNED_SHORT_5_6_5}, // R5G6B5_UNORM + {GL_RGB565, GL_RGB, GL_UNSIGNED_SHORT_5_6_5_REV}, // B5G6R5_UNORM + {GL_RGB5_A1, GL_BGRA, GL_UNSIGNED_SHORT_1_5_5_5_REV}, // A1R5G5B5_UNORM + {GL_RGB10_A2, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV}, // A2B10G10R10_UNORM + {GL_RGB10_A2UI, GL_RGBA_INTEGER, GL_UNSIGNED_INT_2_10_10_10_REV}, // A2B10G10R10_UINT + {GL_RGB5_A1, GL_RGBA, GL_UNSIGNED_SHORT_1_5_5_5_REV}, // A1B5G5R5_UNORM + {GL_R8, GL_RED, GL_UNSIGNED_BYTE}, // R8_UNORM + {GL_R8_SNORM, GL_RED, GL_BYTE}, // R8_SNORM + {GL_R8I, GL_RED_INTEGER, GL_BYTE}, // R8_SINT + {GL_R8UI, GL_RED_INTEGER, GL_UNSIGNED_BYTE}, // R8_UINT + {GL_RGBA16F, GL_RGBA, GL_HALF_FLOAT}, // R16G16B16A16_FLOAT + {GL_RGBA16, GL_RGBA, GL_UNSIGNED_SHORT}, // R16G16B16A16_UNORM + {GL_RGBA16_SNORM, GL_RGBA, GL_SHORT}, // R16G16B16A16_SNORM + {GL_RGBA16I, GL_RGBA_INTEGER, GL_SHORT}, // R16G16B16A16_SINT + {GL_RGBA16UI, GL_RGBA_INTEGER, GL_UNSIGNED_SHORT}, // R16G16B16A16_UINT + {GL_R11F_G11F_B10F, GL_RGB, GL_UNSIGNED_INT_10F_11F_11F_REV}, // B10G11R11_FLOAT + {GL_RGBA32UI, GL_RGBA_INTEGER, GL_UNSIGNED_INT}, // R32G32B32A32_UINT + {GL_COMPRESSED_RGBA_S3TC_DXT1_EXT}, // BC1_RGBA_UNORM + {GL_COMPRESSED_RGBA_S3TC_DXT3_EXT}, // BC2_UNORM + {GL_COMPRESSED_RGBA_S3TC_DXT5_EXT}, // BC3_UNORM + {GL_COMPRESSED_RED_RGTC1}, // BC4_UNORM + {GL_COMPRESSED_SIGNED_RED_RGTC1}, // BC4_SNORM + {GL_COMPRESSED_RG_RGTC2}, // BC5_UNORM + {GL_COMPRESSED_SIGNED_RG_RGTC2}, // BC5_SNORM + {GL_COMPRESSED_RGBA_BPTC_UNORM}, // BC7_UNORM + {GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT}, // BC6H_UFLOAT + {GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT}, // BC6H_SFLOAT + {GL_COMPRESSED_RGBA_ASTC_4x4_KHR}, // ASTC_2D_4X4_UNORM + {GL_RGBA8, GL_RGBA, GL_UNSIGNED_BYTE}, // B8G8R8A8_UNORM + {GL_RGBA32F, GL_RGBA, GL_FLOAT}, // R32G32B32A32_FLOAT + {GL_RGBA32I, GL_RGBA_INTEGER, GL_INT}, // R32G32B32A32_SINT + {GL_RG32F, GL_RG, GL_FLOAT}, // R32G32_FLOAT + {GL_RG32I, GL_RG_INTEGER, GL_INT}, // R32G32_SINT + {GL_R32F, GL_RED, GL_FLOAT}, // R32_FLOAT + {GL_R16F, GL_RED, GL_HALF_FLOAT}, // R16_FLOAT + {GL_R16, GL_RED, GL_UNSIGNED_SHORT}, // R16_UNORM + {GL_R16_SNORM, GL_RED, GL_SHORT}, // R16_SNORM + {GL_R16UI, GL_RED_INTEGER, GL_UNSIGNED_SHORT}, // R16_UINT + {GL_R16I, GL_RED_INTEGER, GL_SHORT}, // R16_SINT + {GL_RG16, GL_RG, GL_UNSIGNED_SHORT}, // R16G16_UNORM + {GL_RG16F, GL_RG, GL_HALF_FLOAT}, // R16G16_FLOAT + {GL_RG16UI, GL_RG_INTEGER, GL_UNSIGNED_SHORT}, // R16G16_UINT + {GL_RG16I, GL_RG_INTEGER, GL_SHORT}, // R16G16_SINT + {GL_RG16_SNORM, GL_RG, GL_SHORT}, // R16G16_SNORM + {GL_RGB32F, GL_RGB, GL_FLOAT}, // R32G32B32_FLOAT + {GL_SRGB8_ALPHA8, GL_RGBA, GL_UNSIGNED_INT_8_8_8_8_REV}, // A8B8G8R8_SRGB + {GL_RG8, GL_RG, GL_UNSIGNED_BYTE}, // R8G8_UNORM + {GL_RG8_SNORM, GL_RG, GL_BYTE}, // R8G8_SNORM + {GL_RG8I, GL_RG_INTEGER, GL_BYTE}, // R8G8_SINT + {GL_RG8UI, GL_RG_INTEGER, GL_UNSIGNED_BYTE}, // R8G8_UINT + {GL_RG32UI, GL_RG_INTEGER, GL_UNSIGNED_INT}, // R32G32_UINT + {GL_RGB16F, GL_RGBA, GL_HALF_FLOAT}, // R16G16B16X16_FLOAT + {GL_R32UI, GL_RED_INTEGER, GL_UNSIGNED_INT}, // R32_UINT + {GL_R32I, GL_RED_INTEGER, GL_INT}, // R32_SINT + {GL_COMPRESSED_RGBA_ASTC_8x8_KHR}, // ASTC_2D_8X8_UNORM + {GL_COMPRESSED_RGBA_ASTC_8x5_KHR}, // ASTC_2D_8X5_UNORM + {GL_COMPRESSED_RGBA_ASTC_5x4_KHR}, // ASTC_2D_5X4_UNORM + {GL_SRGB8_ALPHA8, GL_RGBA, GL_UNSIGNED_BYTE}, // B8G8R8A8_SRGB + {GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT}, // BC1_RGBA_SRGB + {GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT}, // BC2_SRGB + {GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT}, // BC3_SRGB + {GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM}, // BC7_SRGB + {GL_RGBA4, GL_RGBA, GL_UNSIGNED_SHORT_4_4_4_4_REV}, // A4B4G4R4_UNORM + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR}, // ASTC_2D_4X4_SRGB + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR}, // ASTC_2D_8X8_SRGB + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR}, // ASTC_2D_8X5_SRGB + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR}, // ASTC_2D_5X4_SRGB + {GL_COMPRESSED_RGBA_ASTC_5x5_KHR}, // ASTC_2D_5X5_UNORM + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR}, // ASTC_2D_5X5_SRGB + {GL_COMPRESSED_RGBA_ASTC_10x8_KHR}, // ASTC_2D_10X8_UNORM + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR}, // ASTC_2D_10X8_SRGB + {GL_COMPRESSED_RGBA_ASTC_6x6_KHR}, // ASTC_2D_6X6_UNORM + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR}, // ASTC_2D_6X6_SRGB + {GL_COMPRESSED_RGBA_ASTC_10x10_KHR}, // ASTC_2D_10X10_UNORM + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR}, // ASTC_2D_10X10_SRGB + {GL_COMPRESSED_RGBA_ASTC_12x12_KHR}, // ASTC_2D_12X12_UNORM + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR}, // ASTC_2D_12X12_SRGB + {GL_COMPRESSED_RGBA_ASTC_8x6_KHR}, // ASTC_2D_8X6_UNORM + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR}, // ASTC_2D_8X6_SRGB + {GL_COMPRESSED_RGBA_ASTC_6x5_KHR}, // ASTC_2D_6X5_UNORM + {GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR}, // ASTC_2D_6X5_SRGB + {GL_RGB9_E5, GL_RGB, GL_UNSIGNED_INT_5_9_9_9_REV}, // E5B9G9R9_FLOAT + {GL_DEPTH_COMPONENT32F, GL_DEPTH_COMPONENT, GL_FLOAT}, // D32_FLOAT + {GL_DEPTH_COMPONENT16, GL_DEPTH_COMPONENT, GL_UNSIGNED_SHORT}, // D16_UNORM + {GL_DEPTH24_STENCIL8, GL_DEPTH_STENCIL, GL_UNSIGNED_INT_24_8}, // D24_UNORM_S8_UINT + {GL_DEPTH24_STENCIL8, GL_DEPTH_STENCIL, GL_UNSIGNED_INT_24_8}, // S8_UINT_D24_UNORM + {GL_DEPTH32F_STENCIL8, GL_DEPTH_STENCIL, + GL_FLOAT_32_UNSIGNED_INT_24_8_REV}, // D32_FLOAT_S8_UINT +}}; + +inline const FormatTuple& GetFormatTuple(VideoCore::Surface::PixelFormat pixel_format) { + ASSERT(static_cast(pixel_format) < FORMAT_TABLE.size()); + return FORMAT_TABLE[static_cast(pixel_format)]; +} + inline GLenum VertexFormat(Maxwell::VertexAttribute attrib) { switch (attrib.type) { case Maxwell::VertexAttribute::Type::UnsignedNorm: diff --git a/src/video_core/renderer_opengl/renderer_opengl.cpp b/src/video_core/renderer_opengl/renderer_opengl.cpp index a718bff7a..4a41db25d 100755 --- a/src/video_core/renderer_opengl/renderer_opengl.cpp +++ b/src/video_core/renderer_opengl/renderer_opengl.cpp @@ -24,6 +24,7 @@ #include "video_core/host_shaders/opengl_present_frag.h" #include "video_core/host_shaders/opengl_present_vert.h" #include "video_core/renderer_opengl/gl_rasterizer.h" +#include "video_core/renderer_opengl/gl_shader_util.h" #include "video_core/renderer_opengl/gl_shader_manager.h" #include "video_core/renderer_opengl/renderer_opengl.h" #include "video_core/textures/decoders.h" @@ -139,6 +140,26 @@ RendererOpenGL::RendererOpenGL(Core::TelemetrySession& telemetry_session_, } AddTelemetryFields(); InitOpenGLObjects(); + + // Initialize default attributes to match hardware's disabled attributes + GLint max_attribs{}; + glGetIntegerv(GL_MAX_VERTEX_ATTRIBS, &max_attribs); + for (GLint attrib = 0; attrib < max_attribs; ++attrib) { + glVertexAttrib4f(attrib, 0.0f, 0.0f, 0.0f, 0.0f); + } + // Enable seamless cubemaps when per texture parameters are not available + if (!GLAD_GL_ARB_seamless_cubemap_per_texture && !GLAD_GL_AMD_seamless_cubemap_per_texture) { + glEnable(GL_TEXTURE_CUBE_MAP_SEAMLESS); + } + // Enable unified vertex attributes and query vertex buffer address when the driver supports it + if (device.HasVertexBufferUnifiedMemory()) { + glEnableClientState(GL_VERTEX_ATTRIB_ARRAY_UNIFIED_NV); + glEnableClientState(GL_ELEMENT_ARRAY_UNIFIED_NV); + + glMakeNamedBufferResidentNV(vertex_buffer.handle, GL_READ_ONLY); + glGetNamedBufferParameterui64vNV(vertex_buffer.handle, GL_BUFFER_GPU_ADDRESS_NV, + &vertex_buffer_address); + } } RendererOpenGL::~RendererOpenGL() = default; @@ -233,18 +254,10 @@ void RendererOpenGL::InitOpenGLObjects() { Settings::values.bg_blue.GetValue(), 0.0f); // Create shader programs - OGLShader vertex_shader; - vertex_shader.Create(HostShaders::OPENGL_PRESENT_VERT, GL_VERTEX_SHADER); - - OGLShader fragment_shader; - fragment_shader.Create(HostShaders::OPENGL_PRESENT_FRAG, GL_FRAGMENT_SHADER); - - vertex_program.Create(true, false, vertex_shader.handle); - fragment_program.Create(true, false, fragment_shader.handle); - - pipeline.Create(); - glUseProgramStages(pipeline.handle, GL_VERTEX_SHADER_BIT, vertex_program.handle); - glUseProgramStages(pipeline.handle, GL_FRAGMENT_SHADER_BIT, fragment_program.handle); + present_program.handle = glCreateProgram(); + AttachShader(GL_VERTEX_SHADER, present_program.handle, HostShaders::OPENGL_PRESENT_VERT); + AttachShader(GL_FRAGMENT_SHADER, present_program.handle, HostShaders::OPENGL_PRESENT_FRAG); + LinkProgram(present_program.handle); // Generate presentation sampler present_sampler.Create(); @@ -266,21 +279,6 @@ void RendererOpenGL::InitOpenGLObjects() { // Clear screen to black LoadColorToActiveGLTexture(0, 0, 0, 0, screen_info.texture); - - // Enable seamless cubemaps when per texture parameters are not available - if (!GLAD_GL_ARB_seamless_cubemap_per_texture && !GLAD_GL_AMD_seamless_cubemap_per_texture) { - glEnable(GL_TEXTURE_CUBE_MAP_SEAMLESS); - } - - // Enable unified vertex attributes and query vertex buffer address when the driver supports it - if (device.HasVertexBufferUnifiedMemory()) { - glEnableClientState(GL_VERTEX_ATTRIB_ARRAY_UNIFIED_NV); - glEnableClientState(GL_ELEMENT_ARRAY_UNIFIED_NV); - - glMakeNamedBufferResidentNV(vertex_buffer.handle, GL_READ_ONLY); - glGetNamedBufferParameterui64vNV(vertex_buffer.handle, GL_BUFFER_GPU_ADDRESS_NV, - &vertex_buffer_address); - } } void RendererOpenGL::AddTelemetryFields() { @@ -340,12 +338,11 @@ void RendererOpenGL::DrawScreen(const Layout::FramebufferLayout& layout) { glClearColor(Settings::values.bg_red.GetValue(), Settings::values.bg_green.GetValue(), Settings::values.bg_blue.GetValue(), 0.0f); } - // Set projection matrix const std::array ortho_matrix = MakeOrthographicMatrix(static_cast(layout.width), static_cast(layout.height)); - glProgramUniformMatrix3x2fv(vertex_program.handle, ModelViewMatrixLocation, 1, GL_FALSE, - std::data(ortho_matrix)); + program_manager.BindProgram(present_program.handle); + glUniformMatrix3x2fv(ModelViewMatrixLocation, 1, GL_FALSE, ortho_matrix.data()); const auto& texcoords = screen_info.display_texcoords; auto left = texcoords.left; @@ -406,8 +403,6 @@ void RendererOpenGL::DrawScreen(const Layout::FramebufferLayout& layout) { state_tracker.NotifyClipControl(); state_tracker.NotifyAlphaTest(); - program_manager.BindHostPipeline(pipeline.handle); - state_tracker.ClipControl(GL_LOWER_LEFT, GL_ZERO_TO_ONE); glEnable(GL_CULL_FACE); if (screen_info.display_srgb) { @@ -455,7 +450,8 @@ void RendererOpenGL::DrawScreen(const Layout::FramebufferLayout& layout) { glClear(GL_COLOR_BUFFER_BIT); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); - program_manager.RestoreGuestPipeline(); + // TODO + // program_manager.RestoreGuestPipeline(); } void RendererOpenGL::RenderScreenshot() { diff --git a/src/video_core/renderer_opengl/renderer_opengl.h b/src/video_core/renderer_opengl/renderer_opengl.h index 0b66f8332..b3ee55665 100755 --- a/src/video_core/renderer_opengl/renderer_opengl.h +++ b/src/video_core/renderer_opengl/renderer_opengl.h @@ -12,7 +12,6 @@ #include "video_core/renderer_opengl/gl_device.h" #include "video_core/renderer_opengl/gl_rasterizer.h" #include "video_core/renderer_opengl/gl_resource_manager.h" -#include "video_core/renderer_opengl/gl_shader_manager.h" #include "video_core/renderer_opengl/gl_state_tracker.h" namespace Core { @@ -111,9 +110,7 @@ private: // OpenGL object IDs OGLSampler present_sampler; OGLBuffer vertex_buffer; - OGLProgram vertex_program; - OGLProgram fragment_program; - OGLPipeline pipeline; + OGLProgram present_program; OGLFramebuffer screenshot_framebuffer; // GPU address of the vertex buffer diff --git a/src/video_core/renderer_opengl/util_shaders.cpp b/src/video_core/renderer_opengl/util_shaders.cpp index 8fb5be393..8aa0683c8 100755 --- a/src/video_core/renderer_opengl/util_shaders.cpp +++ b/src/video_core/renderer_opengl/util_shaders.cpp @@ -16,8 +16,8 @@ #include "video_core/host_shaders/opengl_copy_bc4_comp.h" #include "video_core/host_shaders/opengl_copy_bgra_comp.h" #include "video_core/host_shaders/pitch_unswizzle_comp.h" -#include "video_core/renderer_opengl/gl_resource_manager.h" #include "video_core/renderer_opengl/gl_shader_manager.h" +#include "video_core/renderer_opengl/gl_shader_util.h" #include "video_core/renderer_opengl/gl_texture_cache.h" #include "video_core/renderer_opengl/util_shaders.h" #include "video_core/texture_cache/accelerated_swizzle.h" @@ -41,13 +41,12 @@ using VideoCommon::Accelerated::MakeBlockLinearSwizzle3DParams; using VideoCore::Surface::BytesPerBlock; namespace { - OGLProgram MakeProgram(std::string_view source) { - OGLShader shader; - shader.Create(source, GL_COMPUTE_SHADER); - OGLProgram program; - program.Create(true, false, shader.handle); + OGLShader shader; + program.handle = glCreateProgram(); + AttachShader(GL_COMPUTE_SHADER, program.handle, source); + LinkProgram(program.handle); return program; } @@ -55,7 +54,6 @@ size_t NumPixelsInCopy(const VideoCommon::ImageCopy& copy) { return static_cast(copy.extent.width * copy.extent.height * copy.src_subresource.num_layers); } - } // Anonymous namespace UtilShaders::UtilShaders(ProgramManager& program_manager_) @@ -86,7 +84,7 @@ void UtilShaders::ASTCDecode(Image& image, const ImageBufferMap& map, .width = VideoCore::Surface::DefaultBlockWidth(image.info.format), .height = VideoCore::Surface::DefaultBlockHeight(image.info.format), }; - program_manager.BindHostCompute(astc_decoder_program.handle); + program_manager.BindProgram(astc_decoder_program.handle); glBindBufferBase(GL_SHADER_STORAGE_BUFFER, BINDING_SWIZZLE_BUFFER, swizzle_table_buffer.handle); glBindBufferBase(GL_SHADER_STORAGE_BUFFER, BINDING_ENC_BUFFER, astc_buffer.handle); @@ -134,7 +132,7 @@ void UtilShaders::BlockLinearUpload2D(Image& image, const ImageBufferMap& map, static constexpr GLuint BINDING_INPUT_BUFFER = 1; static constexpr GLuint BINDING_OUTPUT_IMAGE = 0; - program_manager.BindHostCompute(block_linear_unswizzle_2d_program.handle); + program_manager.BindProgram(block_linear_unswizzle_2d_program.handle); glFlushMappedNamedBufferRange(map.buffer, map.offset, image.guest_size_bytes); glBindBufferBase(GL_SHADER_STORAGE_BUFFER, BINDING_SWIZZLE_BUFFER, swizzle_table_buffer.handle); @@ -173,7 +171,7 @@ void UtilShaders::BlockLinearUpload3D(Image& image, const ImageBufferMap& map, static constexpr GLuint BINDING_OUTPUT_IMAGE = 0; glFlushMappedNamedBufferRange(map.buffer, map.offset, image.guest_size_bytes); - program_manager.BindHostCompute(block_linear_unswizzle_3d_program.handle); + program_manager.BindProgram(block_linear_unswizzle_3d_program.handle); glBindBufferBase(GL_SHADER_STORAGE_BUFFER, BINDING_SWIZZLE_BUFFER, swizzle_table_buffer.handle); const GLenum store_format = StoreFormat(BytesPerBlock(image.info.format)); @@ -222,7 +220,7 @@ void UtilShaders::PitchUpload(Image& image, const ImageBufferMap& map, UNIMPLEMENTED_IF_MSG(!std::has_single_bit(bytes_per_block), "Non-power of two images are not implemented"); - program_manager.BindHostCompute(pitch_unswizzle_program.handle); + program_manager.BindProgram(pitch_unswizzle_program.handle); glFlushMappedNamedBufferRange(map.buffer, map.offset, image.guest_size_bytes); glUniform2ui(LOC_ORIGIN, 0, 0); glUniform2i(LOC_DESTINATION, 0, 0); @@ -250,7 +248,7 @@ void UtilShaders::CopyBC4(Image& dst_image, Image& src_image, std::span, }; +template +inline constexpr DescriptorBankInfo TEXTURE_DESCRIPTOR_BANK_INFO{ + .uniform_buffers = 0, + .storage_buffers = 0, + .texture_buffers = 0, + .image_buffers = 0, + .textures = num_textures, + .images = 0, + .score = 2, +}; constexpr VkDescriptorSetLayoutCreateInfo TWO_TEXTURES_DESCRIPTOR_SET_LAYOUT_CREATE_INFO{ .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, .pNext = nullptr, @@ -323,18 +333,19 @@ void BindBlitState(vk::CommandBuffer cmdbuf, VkPipelineLayout layout, const Regi cmdbuf.SetScissor(0, scissor); cmdbuf.PushConstants(layout, VK_SHADER_STAGE_VERTEX_BIT, push_constants); } - } // Anonymous namespace BlitImageHelper::BlitImageHelper(const Device& device_, VKScheduler& scheduler_, - StateTracker& state_tracker_, VKDescriptorPool& descriptor_pool) + StateTracker& state_tracker_, DescriptorPool& descriptor_pool) : device{device_}, scheduler{scheduler_}, state_tracker{state_tracker_}, one_texture_set_layout(device.GetLogical().CreateDescriptorSetLayout( ONE_TEXTURE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO)), two_textures_set_layout(device.GetLogical().CreateDescriptorSetLayout( TWO_TEXTURES_DESCRIPTOR_SET_LAYOUT_CREATE_INFO)), - one_texture_descriptor_allocator(descriptor_pool, *one_texture_set_layout), - two_textures_descriptor_allocator(descriptor_pool, *two_textures_set_layout), + one_texture_descriptor_allocator{ + descriptor_pool.Allocator(*one_texture_set_layout, TEXTURE_DESCRIPTOR_BANK_INFO<1>)}, + two_textures_descriptor_allocator{ + descriptor_pool.Allocator(*two_textures_set_layout, TEXTURE_DESCRIPTOR_BANK_INFO<2>)}, one_texture_pipeline_layout(device.GetLogical().CreatePipelineLayout( PipelineLayoutCreateInfo(one_texture_set_layout.address()))), two_textures_pipeline_layout(device.GetLogical().CreatePipelineLayout( @@ -362,7 +373,7 @@ void BlitImageHelper::BlitColor(const Framebuffer* dst_framebuffer, const ImageV .operation = operation, }; const VkPipelineLayout layout = *one_texture_pipeline_layout; - const VkImageView src_view = src_image_view.Handle(ImageViewType::e2D); + const VkImageView src_view = src_image_view.Handle(Shader::TextureType::Color2D); const VkSampler sampler = is_linear ? *linear_sampler : *nearest_sampler; const VkPipeline pipeline = FindOrEmplacePipeline(key); const VkDescriptorSet descriptor_set = one_texture_descriptor_allocator.Commit(); @@ -416,7 +427,6 @@ void BlitImageHelper::ConvertD32ToR32(const Framebuffer* dst_framebuffer, void BlitImageHelper::ConvertR32ToD32(const Framebuffer* dst_framebuffer, const ImageView& src_image_view) { - ConvertColorToDepthPipeline(convert_r32_to_d32_pipeline, dst_framebuffer->RenderPass()); Convert(*convert_r32_to_d32_pipeline, dst_framebuffer, src_image_view); } @@ -436,7 +446,7 @@ void BlitImageHelper::ConvertR16ToD16(const Framebuffer* dst_framebuffer, void BlitImageHelper::Convert(VkPipeline pipeline, const Framebuffer* dst_framebuffer, const ImageView& src_image_view) { const VkPipelineLayout layout = *one_texture_pipeline_layout; - const VkImageView src_view = src_image_view.Handle(ImageViewType::e2D); + const VkImageView src_view = src_image_view.Handle(Shader::TextureType::Color2D); const VkSampler sampler = *nearest_sampler; const VkDescriptorSet descriptor_set = one_texture_descriptor_allocator.Commit(); const VkExtent2D extent{ diff --git a/src/video_core/renderer_vulkan/blit_image.h b/src/video_core/renderer_vulkan/blit_image.h index 0d81a06ed..33ee095c1 100755 --- a/src/video_core/renderer_vulkan/blit_image.h +++ b/src/video_core/renderer_vulkan/blit_image.h @@ -31,7 +31,7 @@ struct BlitImagePipelineKey { class BlitImageHelper { public: explicit BlitImageHelper(const Device& device, VKScheduler& scheduler, - StateTracker& state_tracker, VKDescriptorPool& descriptor_pool); + StateTracker& state_tracker, DescriptorPool& descriptor_pool); ~BlitImageHelper(); void BlitColor(const Framebuffer* dst_framebuffer, const ImageView& src_image_view, diff --git a/src/video_core/renderer_vulkan/fixed_pipeline_state.cpp b/src/video_core/renderer_vulkan/fixed_pipeline_state.cpp index 362278f01..d70153df3 100755 --- a/src/video_core/renderer_vulkan/fixed_pipeline_state.cpp +++ b/src/video_core/renderer_vulkan/fixed_pipeline_state.cpp @@ -15,9 +15,7 @@ #include "video_core/renderer_vulkan/vk_state_tracker.h" namespace Vulkan { - namespace { - constexpr size_t POINT = 0; constexpr size_t LINE = 1; constexpr size_t POLYGON = 2; @@ -39,10 +37,20 @@ constexpr std::array POLYGON_OFFSET_ENABLE_LUT = { POLYGON, // Patches }; +void RefreshXfbState(VideoCommon::TransformFeedbackState& state, const Maxwell& regs) { + std::ranges::transform(regs.tfb_layouts, state.layouts.begin(), [](const auto& layout) { + return VideoCommon::TransformFeedbackState::Layout{ + .stream = layout.stream, + .varying_count = layout.varying_count, + .stride = layout.stride, + }; + }); + state.varyings = regs.tfb_varying_locs; +} } // Anonymous namespace void FixedPipelineState::Refresh(Tegra::Engines::Maxwell3D& maxwell3d, - bool has_extended_dynamic_state) { + bool has_extended_dynamic_state, bool has_dynamic_vertex_input) { const Maxwell& regs = maxwell3d.regs; const std::array enabled_lut{ regs.polygon_offset_point_enable, @@ -52,6 +60,9 @@ void FixedPipelineState::Refresh(Tegra::Engines::Maxwell3D& maxwell3d, const u32 topology_index = static_cast(regs.draw.topology.Value()); raw1 = 0; + extended_dynamic_state.Assign(has_extended_dynamic_state ? 1 : 0); + dynamic_vertex_input.Assign(has_dynamic_vertex_input ? 1 : 0); + xfb_enabled.Assign(regs.tfb_enabled != 0); primitive_restart_enable.Assign(regs.primitive_restart.enabled != 0 ? 1 : 0); depth_bias_enable.Assign(enabled_lut[POLYGON_OFFSET_ENABLE_LUT[topology_index]] != 0 ? 1 : 0); depth_clamp_disabled.Assign(regs.view_volume_clip_control.depth_clamp_disabled.Value()); @@ -63,37 +74,66 @@ void FixedPipelineState::Refresh(Tegra::Engines::Maxwell3D& maxwell3d, tessellation_clockwise.Assign(regs.tess_mode.cw.Value()); logic_op_enable.Assign(regs.logic_op.enable != 0 ? 1 : 0); logic_op.Assign(PackLogicOp(regs.logic_op.operation)); - rasterize_enable.Assign(regs.rasterize_enable != 0 ? 1 : 0); topology.Assign(regs.draw.topology); msaa_mode.Assign(regs.multisample_mode); raw2 = 0; + rasterize_enable.Assign(regs.rasterize_enable != 0 ? 1 : 0); const auto test_func = regs.alpha_test_enabled != 0 ? regs.alpha_test_func : Maxwell::ComparisonOp::Always; alpha_test_func.Assign(PackComparisonOp(test_func)); early_z.Assign(regs.force_early_fragment_tests != 0 ? 1 : 0); + depth_enabled.Assign(regs.zeta_enable != 0 ? 1 : 0); + depth_format.Assign(static_cast(regs.zeta.format)); + y_negate.Assign(regs.screen_y_control.y_negate != 0 ? 1 : 0); + provoking_vertex_last.Assign(regs.provoking_vertex_last != 0 ? 1 : 0); + conservative_raster_enable.Assign(regs.conservative_raster_enable != 0 ? 1 : 0); + smooth_lines.Assign(regs.line_smooth_enable != 0 ? 1 : 0); + for (size_t i = 0; i < regs.rt.size(); ++i) { + color_formats[i] = static_cast(regs.rt[i].format); + } alpha_test_ref = Common::BitCast(regs.alpha_test_ref); point_size = Common::BitCast(regs.point_size); - if (maxwell3d.dirty.flags[Dirty::InstanceDivisors]) { - maxwell3d.dirty.flags[Dirty::InstanceDivisors] = false; - for (size_t index = 0; index < Maxwell::NumVertexArrays; ++index) { - const bool is_enabled = regs.instanced_arrays.IsInstancingEnabled(index); - binding_divisors[index] = is_enabled ? regs.vertex_array[index].divisor : 0; - } - } - if (maxwell3d.dirty.flags[Dirty::VertexAttributes]) { - maxwell3d.dirty.flags[Dirty::VertexAttributes] = false; - for (size_t index = 0; index < Maxwell::NumVertexAttributes; ++index) { - const auto& input = regs.vertex_attrib_format[index]; - auto& attribute = attributes[index]; - attribute.raw = 0; - attribute.enabled.Assign(input.IsConstant() ? 0 : 1); - attribute.buffer.Assign(input.buffer); - attribute.offset.Assign(input.offset); - attribute.type.Assign(static_cast(input.type.Value())); - attribute.size.Assign(static_cast(input.size.Value())); + if (maxwell3d.dirty.flags[Dirty::VertexInput]) { + if (has_dynamic_vertex_input) { + // Dirty flag will be reset by the command buffer update + static constexpr std::array LUT{ + 0u, // Invalid + 1u, // SignedNorm + 1u, // UnsignedNorm + 2u, // SignedInt + 3u, // UnsignedInt + 1u, // UnsignedScaled + 1u, // SignedScaled + 1u, // Float + }; + const auto& attrs = regs.vertex_attrib_format; + attribute_types = 0; + for (size_t i = 0; i < Maxwell::NumVertexAttributes; ++i) { + const u32 mask = attrs[i].constant != 0 ? 0 : 3; + const u32 type = LUT[static_cast(attrs[i].type.Value())]; + attribute_types |= static_cast(type & mask) << (i * 2); + } + } else { + maxwell3d.dirty.flags[Dirty::VertexInput] = false; + enabled_divisors = 0; + for (size_t index = 0; index < Maxwell::NumVertexArrays; ++index) { + const bool is_enabled = regs.instanced_arrays.IsInstancingEnabled(index); + binding_divisors[index] = is_enabled ? regs.vertex_array[index].divisor : 0; + enabled_divisors |= (is_enabled ? u64{1} : 0) << index; + } + for (size_t index = 0; index < Maxwell::NumVertexAttributes; ++index) { + const auto& input = regs.vertex_attrib_format[index]; + auto& attribute = attributes[index]; + attribute.raw = 0; + attribute.enabled.Assign(input.constant ? 0 : 1); + attribute.buffer.Assign(input.buffer); + attribute.offset.Assign(input.offset); + attribute.type.Assign(static_cast(input.type.Value())); + attribute.size.Assign(static_cast(input.size.Value())); + } } } if (maxwell3d.dirty.flags[Dirty::Blending]) { @@ -109,10 +149,12 @@ void FixedPipelineState::Refresh(Tegra::Engines::Maxwell3D& maxwell3d, return static_cast(viewport.swizzle.raw); }); } - if (!has_extended_dynamic_state) { - no_extended_dynamic_state.Assign(1); + if (!extended_dynamic_state) { dynamic_state.Refresh(regs); } + if (xfb_enabled) { + RefreshXfbState(xfb_state, regs); + } } void FixedPipelineState::BlendingAttachment::Refresh(const Maxwell& regs, size_t index) { diff --git a/src/video_core/renderer_vulkan/fixed_pipeline_state.h b/src/video_core/renderer_vulkan/fixed_pipeline_state.h index a0eb83a68..c9be37935 100755 --- a/src/video_core/renderer_vulkan/fixed_pipeline_state.h +++ b/src/video_core/renderer_vulkan/fixed_pipeline_state.h @@ -12,6 +12,7 @@ #include "video_core/engines/maxwell_3d.h" #include "video_core/surface.h" +#include "video_core/transform_feedback.h" namespace Vulkan { @@ -60,7 +61,7 @@ struct FixedPipelineState { void Refresh(const Maxwell& regs, size_t index); - constexpr std::array Mask() const noexcept { + std::array Mask() const noexcept { return {mask_r != 0, mask_g != 0, mask_b != 0, mask_a != 0}; } @@ -97,11 +98,11 @@ struct FixedPipelineState { BitField<20, 3, u32> type; BitField<23, 6, u32> size; - constexpr Maxwell::VertexAttribute::Type Type() const noexcept { + Maxwell::VertexAttribute::Type Type() const noexcept { return static_cast(type.Value()); } - constexpr Maxwell::VertexAttribute::Size Size() const noexcept { + Maxwell::VertexAttribute::Size Size() const noexcept { return static_cast(size.Value()); } }; @@ -167,37 +168,53 @@ struct FixedPipelineState { union { u32 raw1; - BitField<0, 1, u32> no_extended_dynamic_state; - BitField<2, 1, u32> primitive_restart_enable; - BitField<3, 1, u32> depth_bias_enable; - BitField<4, 1, u32> depth_clamp_disabled; - BitField<5, 1, u32> ndc_minus_one_to_one; - BitField<6, 2, u32> polygon_mode; - BitField<8, 5, u32> patch_control_points_minus_one; - BitField<13, 2, u32> tessellation_primitive; - BitField<15, 2, u32> tessellation_spacing; - BitField<17, 1, u32> tessellation_clockwise; - BitField<18, 1, u32> logic_op_enable; - BitField<19, 4, u32> logic_op; - BitField<23, 1, u32> rasterize_enable; + BitField<0, 1, u32> extended_dynamic_state; + BitField<1, 1, u32> dynamic_vertex_input; + BitField<2, 1, u32> xfb_enabled; + BitField<3, 1, u32> primitive_restart_enable; + BitField<4, 1, u32> depth_bias_enable; + BitField<5, 1, u32> depth_clamp_disabled; + BitField<6, 1, u32> ndc_minus_one_to_one; + BitField<7, 2, u32> polygon_mode; + BitField<9, 5, u32> patch_control_points_minus_one; + BitField<14, 2, u32> tessellation_primitive; + BitField<16, 2, u32> tessellation_spacing; + BitField<18, 1, u32> tessellation_clockwise; + BitField<19, 1, u32> logic_op_enable; + BitField<20, 4, u32> logic_op; BitField<24, 4, Maxwell::PrimitiveTopology> topology; BitField<28, 4, Tegra::Texture::MsaaMode> msaa_mode; }; union { u32 raw2; - BitField<0, 3, u32> alpha_test_func; - BitField<3, 1, u32> early_z; + BitField<0, 1, u32> rasterize_enable; + BitField<1, 3, u32> alpha_test_func; + BitField<4, 1, u32> early_z; + BitField<5, 1, u32> depth_enabled; + BitField<6, 5, u32> depth_format; + BitField<11, 1, u32> y_negate; + BitField<12, 1, u32> provoking_vertex_last; + BitField<13, 1, u32> conservative_raster_enable; + BitField<14, 1, u32> smooth_lines; }; + std::array color_formats; u32 alpha_test_ref; u32 point_size; - std::array binding_divisors; - std::array attributes; std::array attachments; std::array viewport_swizzles; - DynamicState dynamic_state; + union { + u64 attribute_types; // Used with VK_EXT_vertex_input_dynamic_state + u64 enabled_divisors; + }; + std::array attributes; + std::array binding_divisors; - void Refresh(Tegra::Engines::Maxwell3D& maxwell3d, bool has_extended_dynamic_state); + DynamicState dynamic_state; + VideoCommon::TransformFeedbackState xfb_state; + + void Refresh(Tegra::Engines::Maxwell3D& maxwell3d, bool has_extended_dynamic_state, + bool has_dynamic_vertex_input); size_t Hash() const noexcept; @@ -208,8 +225,24 @@ struct FixedPipelineState { } size_t Size() const noexcept { - const size_t total_size = sizeof *this; - return total_size - (no_extended_dynamic_state != 0 ? 0 : sizeof(DynamicState)); + if (xfb_enabled) { + // When transform feedback is enabled, use the whole struct + return sizeof(*this); + } + if (dynamic_vertex_input) { + // Exclude dynamic state and attributes + return offsetof(FixedPipelineState, attributes); + } + if (extended_dynamic_state) { + // Exclude dynamic state + return offsetof(FixedPipelineState, dynamic_state); + } + // Default + return offsetof(FixedPipelineState, xfb_state); + } + + u32 DynamicAttributeType(size_t index) const noexcept { + return (attribute_types >> (index * 2)) & 0b11; } }; static_assert(std::has_unique_object_representations_v); diff --git a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp index f088447e9..68a23b602 100755 --- a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp +++ b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp @@ -157,7 +157,7 @@ struct FormatTuple { {VK_FORMAT_R32_SFLOAT, Attachable | Storage}, // R32_FLOAT {VK_FORMAT_R16_SFLOAT, Attachable | Storage}, // R16_FLOAT {VK_FORMAT_R16_UNORM, Attachable | Storage}, // R16_UNORM - {VK_FORMAT_UNDEFINED}, // R16_SNORM + {VK_FORMAT_R16_SNORM, Attachable | Storage}, // R16_SNORM {VK_FORMAT_R16_UINT, Attachable | Storage}, // R16_UINT {VK_FORMAT_UNDEFINED}, // R16_SINT {VK_FORMAT_R16G16_UNORM, Attachable | Storage}, // R16G16_UNORM @@ -266,19 +266,20 @@ FormatInfo SurfaceFormat(const Device& device, FormatType format_type, bool with return {device.GetSupportedFormat(tuple.format, usage, format_type), attachable, storage}; } -VkShaderStageFlagBits ShaderStage(Tegra::Engines::ShaderType stage) { +VkShaderStageFlagBits ShaderStage(Shader::Stage stage) { switch (stage) { - case Tegra::Engines::ShaderType::Vertex: + case Shader::Stage::VertexA: + case Shader::Stage::VertexB: return VK_SHADER_STAGE_VERTEX_BIT; - case Tegra::Engines::ShaderType::TesselationControl: + case Shader::Stage::TessellationControl: return VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT; - case Tegra::Engines::ShaderType::TesselationEval: + case Shader::Stage::TessellationEval: return VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT; - case Tegra::Engines::ShaderType::Geometry: + case Shader::Stage::Geometry: return VK_SHADER_STAGE_GEOMETRY_BIT; - case Tegra::Engines::ShaderType::Fragment: + case Shader::Stage::Fragment: return VK_SHADER_STAGE_FRAGMENT_BIT; - case Tegra::Engines::ShaderType::Compute: + case Shader::Stage::Compute: return VK_SHADER_STAGE_COMPUTE_BIT; } UNIMPLEMENTED_MSG("Unimplemented shader stage={}", stage); @@ -685,6 +686,19 @@ VkCullModeFlagBits CullFace(Maxwell::CullFace cull_face) { return {}; } +VkPolygonMode PolygonMode(Maxwell::PolygonMode polygon_mode) { + switch (polygon_mode) { + case Maxwell::PolygonMode::Point: + return VK_POLYGON_MODE_POINT; + case Maxwell::PolygonMode::Line: + return VK_POLYGON_MODE_LINE; + case Maxwell::PolygonMode::Fill: + return VK_POLYGON_MODE_FILL; + } + UNIMPLEMENTED_MSG("Unimplemented polygon mode={}", polygon_mode); + return {}; +} + VkComponentSwizzle SwizzleSource(Tegra::Texture::SwizzleSource swizzle) { switch (swizzle) { case Tegra::Texture::SwizzleSource::Zero: @@ -741,4 +755,28 @@ VkSamplerReductionMode SamplerReduction(Tegra::Texture::SamplerReduction reducti return VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT; } +VkSampleCountFlagBits MsaaMode(Tegra::Texture::MsaaMode msaa_mode) { + switch (msaa_mode) { + case Tegra::Texture::MsaaMode::Msaa1x1: + return VK_SAMPLE_COUNT_1_BIT; + case Tegra::Texture::MsaaMode::Msaa2x1: + case Tegra::Texture::MsaaMode::Msaa2x1_D3D: + return VK_SAMPLE_COUNT_2_BIT; + case Tegra::Texture::MsaaMode::Msaa2x2: + case Tegra::Texture::MsaaMode::Msaa2x2_VC4: + case Tegra::Texture::MsaaMode::Msaa2x2_VC12: + return VK_SAMPLE_COUNT_4_BIT; + case Tegra::Texture::MsaaMode::Msaa4x2: + case Tegra::Texture::MsaaMode::Msaa4x2_D3D: + case Tegra::Texture::MsaaMode::Msaa4x2_VC8: + case Tegra::Texture::MsaaMode::Msaa4x2_VC24: + return VK_SAMPLE_COUNT_8_BIT; + case Tegra::Texture::MsaaMode::Msaa4x4: + return VK_SAMPLE_COUNT_16_BIT; + default: + UNREACHABLE_MSG("Invalid msaa_mode={}", static_cast(msaa_mode)); + return VK_SAMPLE_COUNT_1_BIT; + } +} + } // namespace Vulkan::MaxwellToVK diff --git a/src/video_core/renderer_vulkan/maxwell_to_vk.h b/src/video_core/renderer_vulkan/maxwell_to_vk.h index e3e06ba38..8a9616039 100755 --- a/src/video_core/renderer_vulkan/maxwell_to_vk.h +++ b/src/video_core/renderer_vulkan/maxwell_to_vk.h @@ -5,6 +5,7 @@ #pragma once #include "common/common_types.h" +#include "shader_recompiler/stage.h" #include "video_core/engines/maxwell_3d.h" #include "video_core/surface.h" #include "video_core/textures/texture.h" @@ -45,7 +46,7 @@ struct FormatInfo { [[nodiscard]] FormatInfo SurfaceFormat(const Device& device, FormatType format_type, bool with_srgb, PixelFormat pixel_format); -VkShaderStageFlagBits ShaderStage(Tegra::Engines::ShaderType stage); +VkShaderStageFlagBits ShaderStage(Shader::Stage stage); VkPrimitiveTopology PrimitiveTopology(const Device& device, Maxwell::PrimitiveTopology topology); @@ -65,10 +66,14 @@ VkFrontFace FrontFace(Maxwell::FrontFace front_face); VkCullModeFlagBits CullFace(Maxwell::CullFace cull_face); +VkPolygonMode PolygonMode(Maxwell::PolygonMode polygon_mode); + VkComponentSwizzle SwizzleSource(Tegra::Texture::SwizzleSource swizzle); VkViewportCoordinateSwizzleNV ViewportSwizzle(Maxwell::ViewportSwizzle swizzle); VkSamplerReductionMode SamplerReduction(Tegra::Texture::SamplerReduction reduction); +VkSampleCountFlagBits MsaaMode(Tegra::Texture::MsaaMode msaa_mode); + } // namespace Vulkan::MaxwellToVK diff --git a/src/video_core/renderer_vulkan/pipeline_helper.h b/src/video_core/renderer_vulkan/pipeline_helper.h new file mode 100755 index 000000000..4847db6b6 --- /dev/null +++ b/src/video_core/renderer_vulkan/pipeline_helper.h @@ -0,0 +1,154 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include + +#include "common/assert.h" +#include "common/common_types.h" +#include "shader_recompiler/shader_info.h" +#include "video_core/renderer_vulkan/vk_texture_cache.h" +#include "video_core/renderer_vulkan/vk_update_descriptor.h" +#include "video_core/texture_cache/texture_cache.h" +#include "video_core/texture_cache/types.h" +#include "video_core/textures/texture.h" +#include "video_core/vulkan_common/vulkan_device.h" + +namespace Vulkan { + +class DescriptorLayoutBuilder { +public: + DescriptorLayoutBuilder(const Device& device_) : device{&device_} {} + + bool CanUsePushDescriptor() const noexcept { + return device->IsKhrPushDescriptorSupported() && + num_descriptors <= device->MaxPushDescriptors(); + } + + vk::DescriptorSetLayout CreateDescriptorSetLayout(bool use_push_descriptor) const { + if (bindings.empty()) { + return nullptr; + } + const VkDescriptorSetLayoutCreateFlags flags = + use_push_descriptor ? VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR : 0; + return device->GetLogical().CreateDescriptorSetLayout({ + .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + .pNext = nullptr, + .flags = flags, + .bindingCount = static_cast(bindings.size()), + .pBindings = bindings.data(), + }); + } + + vk::DescriptorUpdateTemplateKHR CreateTemplate(VkDescriptorSetLayout descriptor_set_layout, + VkPipelineLayout pipeline_layout, + bool use_push_descriptor) const { + if (entries.empty()) { + return nullptr; + } + const VkDescriptorUpdateTemplateType type = + use_push_descriptor ? VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR + : VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR; + return device->GetLogical().CreateDescriptorUpdateTemplateKHR({ + .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, + .pNext = nullptr, + .flags = 0, + .descriptorUpdateEntryCount = static_cast(entries.size()), + .pDescriptorUpdateEntries = entries.data(), + .templateType = type, + .descriptorSetLayout = descriptor_set_layout, + .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS, + .pipelineLayout = pipeline_layout, + .set = 0, + }); + } + + vk::PipelineLayout CreatePipelineLayout(VkDescriptorSetLayout descriptor_set_layout) const { + return device->GetLogical().CreatePipelineLayout({ + .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + .setLayoutCount = descriptor_set_layout ? 1U : 0U, + .pSetLayouts = bindings.empty() ? nullptr : &descriptor_set_layout, + .pushConstantRangeCount = 0, + .pPushConstantRanges = nullptr, + }); + } + + void Add(const Shader::Info& info, VkShaderStageFlags stage) { + Add(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, stage, info.constant_buffer_descriptors); + Add(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, stage, info.storage_buffers_descriptors); + Add(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, stage, info.texture_buffer_descriptors); + Add(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, stage, info.image_buffer_descriptors); + Add(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, stage, info.texture_descriptors); + Add(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, stage, info.image_descriptors); + } + +private: + template + void Add(VkDescriptorType type, VkShaderStageFlags stage, const Descriptors& descriptors) { + const size_t num{descriptors.size()}; + for (size_t i = 0; i < num; ++i) { + bindings.push_back({ + .binding = binding, + .descriptorType = type, + .descriptorCount = descriptors[i].count, + .stageFlags = stage, + .pImmutableSamplers = nullptr, + }); + entries.push_back({ + .dstBinding = binding, + .dstArrayElement = 0, + .descriptorCount = descriptors[i].count, + .descriptorType = type, + .offset = offset, + .stride = sizeof(DescriptorUpdateEntry), + }); + ++binding; + num_descriptors += descriptors[i].count; + offset += sizeof(DescriptorUpdateEntry); + } + } + + const Device* device{}; + boost::container::small_vector bindings; + boost::container::small_vector entries; + u32 binding{}; + u32 num_descriptors{}; + size_t offset{}; +}; + +inline void PushImageDescriptors(const Shader::Info& info, const VkSampler*& samplers, + const ImageId*& image_view_ids, TextureCache& texture_cache, + VKUpdateDescriptorQueue& update_descriptor_queue) { + for (const auto& desc : info.texture_buffer_descriptors) { + image_view_ids += desc.count; + } + for (const auto& desc : info.image_buffer_descriptors) { + image_view_ids += desc.count; + } + for (const auto& desc : info.texture_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + const VkSampler sampler{*(samplers++)}; + ImageView& image_view{texture_cache.GetImageView(*(image_view_ids++))}; + const VkImageView vk_image_view{image_view.Handle(desc.type)}; + update_descriptor_queue.AddSampledImage(vk_image_view, sampler); + } + } + for (const auto& desc : info.image_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + ImageView& image_view{texture_cache.GetImageView(*(image_view_ids++))}; + if (desc.is_written) { + texture_cache.MarkModification(image_view.image_id); + } + const VkImageView vk_image_view{image_view.StorageView(desc.type, desc.format)}; + update_descriptor_queue.AddImage(vk_image_view); + } + } +} + +} // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/renderer_vulkan.cpp b/src/video_core/renderer_vulkan/renderer_vulkan.cpp index bec3a81d9..6fda06a7e 100755 --- a/src/video_core/renderer_vulkan/renderer_vulkan.cpp +++ b/src/video_core/renderer_vulkan/renderer_vulkan.cpp @@ -97,19 +97,14 @@ RendererVulkan::RendererVulkan(Core::TelemetrySession& telemetry_session_, Core::Frontend::EmuWindow& emu_window, Core::Memory::Memory& cpu_memory_, Tegra::GPU& gpu_, std::unique_ptr context_) try - : RendererBase(emu_window, std::move(context_)), - telemetry_session(telemetry_session_), - cpu_memory(cpu_memory_), - gpu(gpu_), - library(OpenLibrary()), + : RendererBase(emu_window, std::move(context_)), telemetry_session(telemetry_session_), + cpu_memory(cpu_memory_), gpu(gpu_), library(OpenLibrary()), instance(CreateInstance(library, dld, VK_API_VERSION_1_1, render_window.GetWindowInfo().type, true, Settings::values.renderer_debug.GetValue())), debug_callback(Settings::values.renderer_debug ? CreateDebugCallback(instance) : nullptr), surface(CreateSurface(instance, render_window)), - device(CreateDevice(instance, dld, *surface)), - memory_allocator(device, false), - state_tracker(gpu), - scheduler(device, state_tracker), + device(CreateDevice(instance, dld, *surface)), memory_allocator(device, false), + state_tracker(gpu), scheduler(device, state_tracker), swapchain(*surface, device, scheduler, render_window.GetFramebufferLayout().width, render_window.GetFramebufferLayout().height, false), blit_screen(cpu_memory, render_window, device, memory_allocator, swapchain, scheduler, @@ -130,35 +125,45 @@ void RendererVulkan::SwapBuffers(const Tegra::FramebufferConfig* framebuffer) { if (!framebuffer) { return; } - const auto& layout = render_window.GetFramebufferLayout(); - if (layout.width > 0 && layout.height > 0 && render_window.IsShown()) { - const VAddr framebuffer_addr = framebuffer->address + framebuffer->offset; - const bool use_accelerated = - rasterizer.AccelerateDisplay(*framebuffer, framebuffer_addr, framebuffer->stride); - const bool is_srgb = use_accelerated && screen_info.is_srgb; - if (swapchain.HasFramebufferChanged(layout) || swapchain.GetSrgbState() != is_srgb) { - swapchain.Create(layout.width, layout.height, is_srgb); - blit_screen.Recreate(); - } - - scheduler.WaitWorker(); - - while (!swapchain.AcquireNextImage()) { - swapchain.Create(layout.width, layout.height, is_srgb); - blit_screen.Recreate(); - } - const VkSemaphore render_semaphore = blit_screen.Draw(*framebuffer, use_accelerated); - - scheduler.Flush(render_semaphore); - - if (swapchain.Present(render_semaphore)) { - blit_screen.Recreate(); - } - gpu.RendererFrameEndNotify(); - rasterizer.TickFrame(); + SCOPE_EXIT({ render_window.OnFrameDisplayed(); }); + if (!render_window.IsShown()) { + return; } + const VAddr framebuffer_addr = framebuffer->address + framebuffer->offset; + const bool use_accelerated = + rasterizer.AccelerateDisplay(*framebuffer, framebuffer_addr, framebuffer->stride); + const bool is_srgb = use_accelerated && screen_info.is_srgb; - render_window.OnFrameDisplayed(); + bool has_been_recreated = false; + const auto recreate_swapchain = [&] { + if (!has_been_recreated) { + has_been_recreated = true; + scheduler.WaitWorker(); + } + const Layout::FramebufferLayout layout = render_window.GetFramebufferLayout(); + swapchain.Create(layout.width, layout.height, is_srgb); + }; + if (swapchain.IsSubOptimal() || swapchain.HasColorSpaceChanged(is_srgb)) { + recreate_swapchain(); + } + bool is_outdated; + do { + swapchain.AcquireNextImage(); + is_outdated = swapchain.IsOutDated(); + if (is_outdated) { + recreate_swapchain(); + } + } while (is_outdated); + if (has_been_recreated) { + blit_screen.Recreate(); + } + const VkSemaphore render_semaphore = blit_screen.Draw(*framebuffer, use_accelerated); + scheduler.Flush(render_semaphore); + scheduler.WaitWorker(); + swapchain.Present(render_semaphore); + + gpu.RendererFrameEndNotify(); + rasterizer.TickFrame(); } void RendererVulkan::Report() const { diff --git a/src/video_core/renderer_vulkan/vk_blit_screen.cpp b/src/video_core/renderer_vulkan/vk_blit_screen.cpp index a1a32aabe..31054cf14 100755 --- a/src/video_core/renderer_vulkan/vk_blit_screen.cpp +++ b/src/video_core/renderer_vulkan/vk_blit_screen.cpp @@ -184,55 +184,51 @@ VkSemaphore VKBlitScreen::Draw(const Tegra::FramebufferConfig& framebuffer, bool .depth = 1, }, }; - scheduler.Record( - [buffer = *buffer, image = *raw_images[image_index], copy](vk::CommandBuffer cmdbuf) { - const VkImageMemoryBarrier base_barrier{ - .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, - .pNext = nullptr, - .srcAccessMask = 0, - .dstAccessMask = 0, - .oldLayout = VK_IMAGE_LAYOUT_GENERAL, - .newLayout = VK_IMAGE_LAYOUT_GENERAL, - .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, - .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, - .image = image, - .subresourceRange = - { - .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT, - .baseMipLevel = 0, - .levelCount = 1, - .baseArrayLayer = 0, - .layerCount = 1, - }, - }; - VkImageMemoryBarrier read_barrier = base_barrier; - read_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT; - read_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; - read_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; + scheduler.Record([this, copy, image_index](vk::CommandBuffer cmdbuf) { + const VkImage image = *raw_images[image_index]; + const VkImageMemoryBarrier base_barrier{ + .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + .pNext = nullptr, + .srcAccessMask = 0, + .dstAccessMask = 0, + .oldLayout = VK_IMAGE_LAYOUT_GENERAL, + .newLayout = VK_IMAGE_LAYOUT_GENERAL, + .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, + .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, + .image = image, + .subresourceRange{ + .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT, + .baseMipLevel = 0, + .levelCount = 1, + .baseArrayLayer = 0, + .layerCount = 1, + }, + }; + VkImageMemoryBarrier read_barrier = base_barrier; + read_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT; + read_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + read_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; - VkImageMemoryBarrier write_barrier = base_barrier; - write_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; - write_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; + VkImageMemoryBarrier write_barrier = base_barrier; + write_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + write_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; - cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, - 0, read_barrier); - cmdbuf.CopyBufferToImage(buffer, image, VK_IMAGE_LAYOUT_GENERAL, copy); - cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, - VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, write_barrier); - }); + cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, + read_barrier); + cmdbuf.CopyBufferToImage(*buffer, image, VK_IMAGE_LAYOUT_GENERAL, copy); + cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, write_barrier); + }); } - scheduler.Record([renderpass = *renderpass, framebuffer = *framebuffers[image_index], - descriptor_set = descriptor_sets[image_index], buffer = *buffer, - size = swapchain.GetSize(), pipeline = *pipeline, - layout = *pipeline_layout](vk::CommandBuffer cmdbuf) { + scheduler.Record([this, image_index, size = swapchain.GetSize()](vk::CommandBuffer cmdbuf) { const VkClearValue clear_color{ .color = {.float32 = {0.0f, 0.0f, 0.0f, 0.0f}}, }; const VkRenderPassBeginInfo renderpass_bi{ .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, .pNext = nullptr, - .renderPass = renderpass, - .framebuffer = framebuffer, + .renderPass = *renderpass, + .framebuffer = *framebuffers[image_index], .renderArea = { .offset = {0, 0}, @@ -254,12 +250,13 @@ VkSemaphore VKBlitScreen::Draw(const Tegra::FramebufferConfig& framebuffer, bool .extent = size, }; cmdbuf.BeginRenderPass(renderpass_bi, VK_SUBPASS_CONTENTS_INLINE); - cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline); + cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline); cmdbuf.SetViewport(0, viewport); cmdbuf.SetScissor(0, scissor); - cmdbuf.BindVertexBuffer(0, buffer, offsetof(BufferData, vertices)); - cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, layout, 0, descriptor_set, {}); + cmdbuf.BindVertexBuffer(0, *buffer, offsetof(BufferData, vertices)); + cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline_layout, 0, + descriptor_sets[image_index], {}); cmdbuf.Draw(4, 1, 0, 0); cmdbuf.EndRenderPass(); }); @@ -301,8 +298,7 @@ void VKBlitScreen::CreateShaders() { void VKBlitScreen::CreateSemaphores() { semaphores.resize(image_count); - std::generate(semaphores.begin(), semaphores.end(), - [this] { return device.GetLogical().CreateSemaphore(); }); + std::ranges::generate(semaphores, [this] { return device.GetLogical().CreateSemaphore(); }); } void VKBlitScreen::CreateDescriptorPool() { @@ -630,8 +626,8 @@ void VKBlitScreen::CreateFramebuffers() { } void VKBlitScreen::ReleaseRawImages() { - for (std::size_t i = 0; i < raw_images.size(); ++i) { - scheduler.Wait(resource_ticks.at(i)); + for (const u64 tick : resource_ticks) { + scheduler.Wait(tick); } raw_images.clear(); raw_buffer_commits.clear(); diff --git a/src/video_core/renderer_vulkan/vk_buffer_cache.cpp b/src/video_core/renderer_vulkan/vk_buffer_cache.cpp index 0df4e1a1c..ed57753c2 100755 --- a/src/video_core/renderer_vulkan/vk_buffer_cache.cpp +++ b/src/video_core/renderer_vulkan/vk_buffer_cache.cpp @@ -60,6 +60,27 @@ std::array MakeQuadIndices(u32 quad, u32 first) { } return indices; } + +vk::Buffer CreateBuffer(const Device& device, u64 size) { + VkBufferUsageFlags flags = + VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | + VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | + VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | + VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT; + if (device.IsExtTransformFeedbackSupported()) { + flags |= VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT; + } + return device.GetLogical().CreateBuffer({ + .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + .size = size, + .usage = flags, + .sharingMode = VK_SHARING_MODE_EXCLUSIVE, + .queueFamilyIndexCount = 0, + .pQueueFamilyIndices = nullptr, + }); +} } // Anonymous namespace Buffer::Buffer(BufferCacheRuntime&, VideoCommon::NullBufferParams null_params) @@ -67,31 +88,46 @@ Buffer::Buffer(BufferCacheRuntime&, VideoCommon::NullBufferParams null_params) Buffer::Buffer(BufferCacheRuntime& runtime, VideoCore::RasterizerInterface& rasterizer_, VAddr cpu_addr_, u64 size_bytes_) - : VideoCommon::BufferBase(rasterizer_, cpu_addr_, size_bytes_) { - buffer = runtime.device.GetLogical().CreateBuffer(VkBufferCreateInfo{ - .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .size = SizeBytes(), - .usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | - VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | - VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | - VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | - VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, - .sharingMode = VK_SHARING_MODE_EXCLUSIVE, - .queueFamilyIndexCount = 0, - .pQueueFamilyIndices = nullptr, - }); + : VideoCommon::BufferBase(rasterizer_, cpu_addr_, size_bytes_), + device{&runtime.device}, buffer{CreateBuffer(*device, SizeBytes())}, + commit{runtime.memory_allocator.Commit(buffer, MemoryUsage::DeviceLocal)} { if (runtime.device.HasDebuggingToolAttached()) { buffer.SetObjectNameEXT(fmt::format("Buffer 0x{:x}", CpuAddr()).c_str()); } - commit = runtime.memory_allocator.Commit(buffer, MemoryUsage::DeviceLocal); +} + +VkBufferView Buffer::View(u32 offset, u32 size, VideoCore::Surface::PixelFormat format) { + if (!device) { + // Null buffer, return a null descriptor + return VK_NULL_HANDLE; + } + const auto it{std::ranges::find_if(views, [offset, size, format](const BufferView& view) { + return offset == view.offset && size == view.size && format == view.format; + })}; + if (it != views.end()) { + return *it->handle; + } + views.push_back({ + .offset = offset, + .size = size, + .format = format, + .handle = device->GetLogical().CreateBufferView({ + .sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + .buffer = *buffer, + .format = MaxwellToVK::SurfaceFormat(*device, FormatType::Buffer, false, format).format, + .offset = offset, + .range = size, + }), + }); + return *views.back().handle; } BufferCacheRuntime::BufferCacheRuntime(const Device& device_, MemoryAllocator& memory_allocator_, VKScheduler& scheduler_, StagingBufferPool& staging_pool_, VKUpdateDescriptorQueue& update_descriptor_queue_, - VKDescriptorPool& descriptor_pool) + DescriptorPool& descriptor_pool) : device{device_}, memory_allocator{memory_allocator_}, scheduler{scheduler_}, staging_pool{staging_pool_}, update_descriptor_queue{update_descriptor_queue_}, uint8_pass(device, scheduler, descriptor_pool, staging_pool, update_descriptor_queue), diff --git a/src/video_core/renderer_vulkan/vk_buffer_cache.h b/src/video_core/renderer_vulkan/vk_buffer_cache.h index 982e92191..d3fa2fba8 100755 --- a/src/video_core/renderer_vulkan/vk_buffer_cache.h +++ b/src/video_core/renderer_vulkan/vk_buffer_cache.h @@ -9,13 +9,14 @@ #include "video_core/renderer_vulkan/vk_compute_pass.h" #include "video_core/renderer_vulkan/vk_staging_buffer_pool.h" #include "video_core/renderer_vulkan/vk_update_descriptor.h" +#include "video_core/surface.h" #include "video_core/vulkan_common/vulkan_memory_allocator.h" #include "video_core/vulkan_common/vulkan_wrapper.h" namespace Vulkan { class Device; -class VKDescriptorPool; +class DescriptorPool; class VKScheduler; class BufferCacheRuntime; @@ -26,6 +27,8 @@ public: explicit Buffer(BufferCacheRuntime& runtime, VideoCore::RasterizerInterface& rasterizer_, VAddr cpu_addr_, u64 size_bytes_); + [[nodiscard]] VkBufferView View(u32 offset, u32 size, VideoCore::Surface::PixelFormat format); + [[nodiscard]] VkBuffer Handle() const noexcept { return *buffer; } @@ -35,8 +38,17 @@ public: } private: + struct BufferView { + u32 offset; + u32 size; + VideoCore::Surface::PixelFormat format; + vk::BufferView handle; + }; + + const Device* device{}; vk::Buffer buffer; MemoryCommit commit; + std::vector views; }; class BufferCacheRuntime { @@ -49,7 +61,7 @@ public: explicit BufferCacheRuntime(const Device& device_, MemoryAllocator& memory_manager_, VKScheduler& scheduler_, StagingBufferPool& staging_pool_, VKUpdateDescriptorQueue& update_descriptor_queue_, - VKDescriptorPool& descriptor_pool); + DescriptorPool& descriptor_pool); void Finish(); @@ -85,6 +97,11 @@ public: BindBuffer(buffer, offset, size); } + void BindTextureBuffer(Buffer& buffer, u32 offset, u32 size, + VideoCore::Surface::PixelFormat format) { + update_descriptor_queue.AddTexelBuffer(buffer.View(offset, size, format)); + } + private: void BindBuffer(VkBuffer buffer, u32 offset, u32 size) { update_descriptor_queue.AddBuffer(buffer, offset, size); @@ -122,6 +139,7 @@ struct BufferCacheParams { static constexpr bool NEEDS_BIND_UNIFORM_INDEX = false; static constexpr bool NEEDS_BIND_STORAGE_INDEX = false; static constexpr bool USE_MEMORY_MAPS = true; + static constexpr bool SEPARATE_IMAGE_BUFFER_BINDINGS = false; }; using BufferCache = VideoCommon::BufferCache; diff --git a/src/video_core/renderer_vulkan/vk_compute_pass.cpp b/src/video_core/renderer_vulkan/vk_compute_pass.cpp index 205cd3b05..5ee67a87c 100755 --- a/src/video_core/renderer_vulkan/vk_compute_pass.cpp +++ b/src/video_core/renderer_vulkan/vk_compute_pass.cpp @@ -41,80 +41,92 @@ constexpr u32 ASTC_BINDING_SWIZZLE_BUFFER = 2; constexpr u32 ASTC_BINDING_OUTPUT_IMAGE = 3; constexpr size_t ASTC_NUM_BINDINGS = 4; -VkPushConstantRange BuildComputePushConstantRange(std::size_t size) { - return { - .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, - .offset = 0, - .size = static_cast(size), - }; -} +template +inline constexpr VkPushConstantRange COMPUTE_PUSH_CONSTANT_RANGE{ + .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, + .offset = 0, + .size = static_cast(size), +}; -std::array BuildInputOutputDescriptorSetBindings() { - return {{ - { - .binding = 0, - .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - .descriptorCount = 1, - .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, - .pImmutableSamplers = nullptr, - }, - { - .binding = 1, - .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - .descriptorCount = 1, - .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, - .pImmutableSamplers = nullptr, - }, - }}; -} - -std::array BuildASTCDescriptorSetBindings() { - return {{ - { - .binding = ASTC_BINDING_INPUT_BUFFER, - .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - .descriptorCount = 1, - .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, - .pImmutableSamplers = nullptr, - }, - { - .binding = ASTC_BINDING_ENC_BUFFER, - .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - .descriptorCount = 1, - .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, - .pImmutableSamplers = nullptr, - }, - { - .binding = ASTC_BINDING_SWIZZLE_BUFFER, - .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - .descriptorCount = 1, - .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, - .pImmutableSamplers = nullptr, - }, - { - .binding = ASTC_BINDING_OUTPUT_IMAGE, - .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, - .descriptorCount = 1, - .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, - .pImmutableSamplers = nullptr, - }, - }}; -} - -VkDescriptorUpdateTemplateEntryKHR BuildInputOutputDescriptorUpdateTemplate() { - return { - .dstBinding = 0, - .dstArrayElement = 0, - .descriptorCount = 2, +constexpr std::array INPUT_OUTPUT_DESCRIPTOR_SET_BINDINGS{{ + { + .binding = 0, .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - .offset = 0, - .stride = sizeof(DescriptorUpdateEntry), - }; -} + .descriptorCount = 1, + .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, + .pImmutableSamplers = nullptr, + }, + { + .binding = 1, + .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + .descriptorCount = 1, + .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, + .pImmutableSamplers = nullptr, + }, +}}; -std::array -BuildASTCPassDescriptorUpdateTemplateEntry() { - return {{ +constexpr DescriptorBankInfo INPUT_OUTPUT_BANK_INFO{ + .uniform_buffers = 0, + .storage_buffers = 2, + .texture_buffers = 0, + .image_buffers = 0, + .textures = 0, + .images = 0, + .score = 2, +}; + +constexpr std::array ASTC_DESCRIPTOR_SET_BINDINGS{{ + { + .binding = ASTC_BINDING_INPUT_BUFFER, + .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + .descriptorCount = 1, + .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, + .pImmutableSamplers = nullptr, + }, + { + .binding = ASTC_BINDING_ENC_BUFFER, + .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + .descriptorCount = 1, + .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, + .pImmutableSamplers = nullptr, + }, + { + .binding = ASTC_BINDING_SWIZZLE_BUFFER, + .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + .descriptorCount = 1, + .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, + .pImmutableSamplers = nullptr, + }, + { + .binding = ASTC_BINDING_OUTPUT_IMAGE, + .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + .descriptorCount = 1, + .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, + .pImmutableSamplers = nullptr, + }, +}}; + +constexpr DescriptorBankInfo ASTC_BANK_INFO{ + .uniform_buffers = 0, + .storage_buffers = 3, + .texture_buffers = 0, + .image_buffers = 0, + .textures = 0, + .images = 1, + .score = 4, +}; + +constexpr VkDescriptorUpdateTemplateEntryKHR INPUT_OUTPUT_DESCRIPTOR_UPDATE_TEMPLATE{ + .dstBinding = 0, + .dstArrayElement = 0, + .descriptorCount = 2, + .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + .offset = 0, + .stride = sizeof(DescriptorUpdateEntry), +}; + +constexpr std::array + ASTC_PASS_DESCRIPTOR_UPDATE_TEMPLATE_ENTRY{{ { .dstBinding = ASTC_BINDING_INPUT_BUFFER, .dstArrayElement = 0, @@ -148,7 +160,6 @@ BuildASTCPassDescriptorUpdateTemplateEntry() { .stride = sizeof(DescriptorUpdateEntry), }, }}; -} struct AstcPushConstants { std::array blocks_dims; @@ -159,14 +170,14 @@ struct AstcPushConstants { u32 block_height; u32 block_height_mask; }; - } // Anonymous namespace -VKComputePass::VKComputePass(const Device& device, VKDescriptorPool& descriptor_pool, - vk::Span bindings, - vk::Span templates, - vk::Span push_constants, - std::span code) { +ComputePass::ComputePass(const Device& device_, DescriptorPool& descriptor_pool, + vk::Span bindings, + vk::Span templates, + const DescriptorBankInfo& bank_info, + vk::Span push_constants, std::span code) + : device{device_} { descriptor_set_layout = device.GetLogical().CreateDescriptorSetLayout({ .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, .pNext = nullptr, @@ -196,8 +207,7 @@ VKComputePass::VKComputePass(const Device& device, VKDescriptorPool& descriptor_ .pipelineLayout = *layout, .set = 0, }); - - descriptor_allocator.emplace(descriptor_pool, *descriptor_set_layout); + descriptor_allocator = descriptor_pool.Allocator(*descriptor_set_layout, bank_info); } module = device.GetLogical().CreateShaderModule({ .sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, @@ -206,43 +216,34 @@ VKComputePass::VKComputePass(const Device& device, VKDescriptorPool& descriptor_ .codeSize = static_cast(code.size_bytes()), .pCode = code.data(), }); + device.SaveShader(code); pipeline = device.GetLogical().CreateComputePipeline({ .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, .pNext = nullptr, .flags = 0, - .stage = - { - .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .stage = VK_SHADER_STAGE_COMPUTE_BIT, - .module = *module, - .pName = "main", - .pSpecializationInfo = nullptr, - }, + .stage{ + .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + .stage = VK_SHADER_STAGE_COMPUTE_BIT, + .module = *module, + .pName = "main", + .pSpecializationInfo = nullptr, + }, .layout = *layout, .basePipelineHandle = nullptr, .basePipelineIndex = 0, }); } -VKComputePass::~VKComputePass() = default; +ComputePass::~ComputePass() = default; -VkDescriptorSet VKComputePass::CommitDescriptorSet( - VKUpdateDescriptorQueue& update_descriptor_queue) { - if (!descriptor_template) { - return nullptr; - } - const VkDescriptorSet set = descriptor_allocator->Commit(); - update_descriptor_queue.Send(*descriptor_template, set); - return set; -} - -Uint8Pass::Uint8Pass(const Device& device, VKScheduler& scheduler_, - VKDescriptorPool& descriptor_pool, StagingBufferPool& staging_buffer_pool_, +Uint8Pass::Uint8Pass(const Device& device_, VKScheduler& scheduler_, + DescriptorPool& descriptor_pool, StagingBufferPool& staging_buffer_pool_, VKUpdateDescriptorQueue& update_descriptor_queue_) - : VKComputePass(device, descriptor_pool, BuildInputOutputDescriptorSetBindings(), - BuildInputOutputDescriptorUpdateTemplate(), {}, VULKAN_UINT8_COMP_SPV), + : ComputePass(device_, descriptor_pool, INPUT_OUTPUT_DESCRIPTOR_SET_BINDINGS, + INPUT_OUTPUT_DESCRIPTOR_UPDATE_TEMPLATE, INPUT_OUTPUT_BANK_INFO, {}, + VULKAN_UINT8_COMP_SPV), scheduler{scheduler_}, staging_buffer_pool{staging_buffer_pool_}, update_descriptor_queue{update_descriptor_queue_} {} @@ -256,11 +257,11 @@ std::pair Uint8Pass::Assemble(u32 num_vertices, VkBuffer update_descriptor_queue.Acquire(); update_descriptor_queue.AddBuffer(src_buffer, src_offset, num_vertices); update_descriptor_queue.AddBuffer(staging.buffer, staging.offset, staging_size); - const VkDescriptorSet set = CommitDescriptorSet(update_descriptor_queue); + const void* const descriptor_data{update_descriptor_queue.UpdateData()}; + const VkBuffer buffer{staging.buffer}; scheduler.RequestOutsideRenderPassOperationContext(); - scheduler.Record([layout = *layout, pipeline = *pipeline, buffer = staging.buffer, set, - num_vertices](vk::CommandBuffer cmdbuf) { + scheduler.Record([this, buffer, descriptor_data, num_vertices](vk::CommandBuffer cmdbuf) { static constexpr u32 DISPATCH_SIZE = 1024; static constexpr VkMemoryBarrier WRITE_BARRIER{ .sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER, @@ -268,8 +269,10 @@ std::pair Uint8Pass::Assemble(u32 num_vertices, VkBuffer .srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT, .dstAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, }; - cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, pipeline); - cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_COMPUTE, layout, 0, set, {}); + const VkDescriptorSet set = descriptor_allocator.Commit(); + device.GetLogical().UpdateDescriptorSet(set, *descriptor_template, descriptor_data); + cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline); + cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_COMPUTE, *layout, 0, set, {}); cmdbuf.Dispatch(Common::DivCeil(num_vertices, DISPATCH_SIZE), 1, 1); cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, WRITE_BARRIER); @@ -278,12 +281,12 @@ std::pair Uint8Pass::Assemble(u32 num_vertices, VkBuffer } QuadIndexedPass::QuadIndexedPass(const Device& device_, VKScheduler& scheduler_, - VKDescriptorPool& descriptor_pool_, + DescriptorPool& descriptor_pool_, StagingBufferPool& staging_buffer_pool_, VKUpdateDescriptorQueue& update_descriptor_queue_) - : VKComputePass(device_, descriptor_pool_, BuildInputOutputDescriptorSetBindings(), - BuildInputOutputDescriptorUpdateTemplate(), - BuildComputePushConstantRange(sizeof(u32) * 2), VULKAN_QUAD_INDEXED_COMP_SPV), + : ComputePass(device_, descriptor_pool_, INPUT_OUTPUT_DESCRIPTOR_SET_BINDINGS, + INPUT_OUTPUT_DESCRIPTOR_UPDATE_TEMPLATE, INPUT_OUTPUT_BANK_INFO, + COMPUTE_PUSH_CONSTANT_RANGE, VULKAN_QUAD_INDEXED_COMP_SPV), scheduler{scheduler_}, staging_buffer_pool{staging_buffer_pool_}, update_descriptor_queue{update_descriptor_queue_} {} @@ -313,11 +316,11 @@ std::pair QuadIndexedPass::Assemble( update_descriptor_queue.Acquire(); update_descriptor_queue.AddBuffer(src_buffer, src_offset, input_size); update_descriptor_queue.AddBuffer(staging.buffer, staging.offset, staging_size); - const VkDescriptorSet set = CommitDescriptorSet(update_descriptor_queue); + const void* const descriptor_data{update_descriptor_queue.UpdateData()}; scheduler.RequestOutsideRenderPassOperationContext(); - scheduler.Record([layout = *layout, pipeline = *pipeline, buffer = staging.buffer, set, - num_tri_vertices, base_vertex, index_shift](vk::CommandBuffer cmdbuf) { + scheduler.Record([this, buffer = staging.buffer, descriptor_data, num_tri_vertices, base_vertex, + index_shift](vk::CommandBuffer cmdbuf) { static constexpr u32 DISPATCH_SIZE = 1024; static constexpr VkMemoryBarrier WRITE_BARRIER{ .sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER, @@ -325,10 +328,12 @@ std::pair QuadIndexedPass::Assemble( .srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT, .dstAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, }; - const std::array push_constants = {base_vertex, index_shift}; - cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, pipeline); - cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_COMPUTE, layout, 0, set, {}); - cmdbuf.PushConstants(layout, VK_SHADER_STAGE_COMPUTE_BIT, 0, sizeof(push_constants), + const std::array push_constants{base_vertex, index_shift}; + const VkDescriptorSet set = descriptor_allocator.Commit(); + device.GetLogical().UpdateDescriptorSet(set, *descriptor_template, descriptor_data); + cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline); + cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_COMPUTE, *layout, 0, set, {}); + cmdbuf.PushConstants(*layout, VK_SHADER_STAGE_COMPUTE_BIT, 0, sizeof(push_constants), &push_constants); cmdbuf.Dispatch(Common::DivCeil(num_tri_vertices, DISPATCH_SIZE), 1, 1); cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, @@ -338,15 +343,14 @@ std::pair QuadIndexedPass::Assemble( } ASTCDecoderPass::ASTCDecoderPass(const Device& device_, VKScheduler& scheduler_, - VKDescriptorPool& descriptor_pool_, + DescriptorPool& descriptor_pool_, StagingBufferPool& staging_buffer_pool_, VKUpdateDescriptorQueue& update_descriptor_queue_, MemoryAllocator& memory_allocator_) - : VKComputePass(device_, descriptor_pool_, BuildASTCDescriptorSetBindings(), - BuildASTCPassDescriptorUpdateTemplateEntry(), - BuildComputePushConstantRange(sizeof(AstcPushConstants)), - ASTC_DECODER_COMP_SPV), - device{device_}, scheduler{scheduler_}, staging_buffer_pool{staging_buffer_pool_}, + : ComputePass(device_, descriptor_pool_, ASTC_DESCRIPTOR_SET_BINDINGS, + ASTC_PASS_DESCRIPTOR_UPDATE_TEMPLATE_ENTRY, ASTC_BANK_INFO, + COMPUTE_PUSH_CONSTANT_RANGE, ASTC_DECODER_COMP_SPV), + scheduler{scheduler_}, staging_buffer_pool{staging_buffer_pool_}, update_descriptor_queue{update_descriptor_queue_}, memory_allocator{memory_allocator_} {} ASTCDecoderPass::~ASTCDecoderPass() = default; @@ -443,16 +447,14 @@ void ASTCDecoderPass::Assemble(Image& image, const StagingBufferRef& map, update_descriptor_queue.AddBuffer(*data_buffer, sizeof(ASTC_ENCODINGS_VALUES), sizeof(SWIZZLE_TABLE)); update_descriptor_queue.AddImage(image.StorageImageView(swizzle.level)); - - const VkDescriptorSet set = CommitDescriptorSet(update_descriptor_queue); - const VkPipelineLayout vk_layout = *layout; + const void* const descriptor_data{update_descriptor_queue.UpdateData()}; // To unswizzle the ASTC data const auto params = MakeBlockLinearSwizzle2DParams(swizzle, image.info); ASSERT(params.origin == (std::array{0, 0, 0})); ASSERT(params.destination == (std::array{0, 0, 0})); - scheduler.Record([vk_layout, num_dispatches_x, num_dispatches_y, num_dispatches_z, - block_dims, params, set](vk::CommandBuffer cmdbuf) { + scheduler.Record([this, num_dispatches_x, num_dispatches_y, num_dispatches_z, block_dims, + params, descriptor_data](vk::CommandBuffer cmdbuf) { const AstcPushConstants uniforms{ .blocks_dims = block_dims, .bytes_per_block_log2 = params.bytes_per_block_log2, @@ -462,8 +464,10 @@ void ASTCDecoderPass::Assemble(Image& image, const StagingBufferRef& map, .block_height = params.block_height, .block_height_mask = params.block_height_mask, }; - cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_COMPUTE, vk_layout, 0, set, {}); - cmdbuf.PushConstants(vk_layout, VK_SHADER_STAGE_COMPUTE_BIT, uniforms); + const VkDescriptorSet set = descriptor_allocator.Commit(); + device.GetLogical().UpdateDescriptorSet(set, *descriptor_template, descriptor_data); + cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_COMPUTE, *layout, 0, set, {}); + cmdbuf.PushConstants(*layout, VK_SHADER_STAGE_COMPUTE_BIT, uniforms); cmdbuf.Dispatch(num_dispatches_x, num_dispatches_y, num_dispatches_z); }); } diff --git a/src/video_core/renderer_vulkan/vk_compute_pass.h b/src/video_core/renderer_vulkan/vk_compute_pass.h index 5ea187c30..114aef2bd 100755 --- a/src/video_core/renderer_vulkan/vk_compute_pass.h +++ b/src/video_core/renderer_vulkan/vk_compute_pass.h @@ -4,7 +4,6 @@ #pragma once -#include #include #include @@ -27,31 +26,31 @@ class VKUpdateDescriptorQueue; class Image; struct StagingBufferRef; -class VKComputePass { +class ComputePass { public: - explicit VKComputePass(const Device& device, VKDescriptorPool& descriptor_pool, - vk::Span bindings, - vk::Span templates, - vk::Span push_constants, std::span code); - ~VKComputePass(); + explicit ComputePass(const Device& device, DescriptorPool& descriptor_pool, + vk::Span bindings, + vk::Span templates, + const DescriptorBankInfo& bank_info, + vk::Span push_constants, std::span code); + ~ComputePass(); protected: - VkDescriptorSet CommitDescriptorSet(VKUpdateDescriptorQueue& update_descriptor_queue); - + const Device& device; vk::DescriptorUpdateTemplateKHR descriptor_template; vk::PipelineLayout layout; vk::Pipeline pipeline; + vk::DescriptorSetLayout descriptor_set_layout; + DescriptorAllocator descriptor_allocator; private: - vk::DescriptorSetLayout descriptor_set_layout; - std::optional descriptor_allocator; vk::ShaderModule module; }; -class Uint8Pass final : public VKComputePass { +class Uint8Pass final : public ComputePass { public: explicit Uint8Pass(const Device& device_, VKScheduler& scheduler_, - VKDescriptorPool& descriptor_pool_, StagingBufferPool& staging_buffer_pool_, + DescriptorPool& descriptor_pool_, StagingBufferPool& staging_buffer_pool_, VKUpdateDescriptorQueue& update_descriptor_queue_); ~Uint8Pass(); @@ -66,10 +65,10 @@ private: VKUpdateDescriptorQueue& update_descriptor_queue; }; -class QuadIndexedPass final : public VKComputePass { +class QuadIndexedPass final : public ComputePass { public: explicit QuadIndexedPass(const Device& device_, VKScheduler& scheduler_, - VKDescriptorPool& descriptor_pool_, + DescriptorPool& descriptor_pool_, StagingBufferPool& staging_buffer_pool_, VKUpdateDescriptorQueue& update_descriptor_queue_); ~QuadIndexedPass(); @@ -84,10 +83,10 @@ private: VKUpdateDescriptorQueue& update_descriptor_queue; }; -class ASTCDecoderPass final : public VKComputePass { +class ASTCDecoderPass final : public ComputePass { public: explicit ASTCDecoderPass(const Device& device_, VKScheduler& scheduler_, - VKDescriptorPool& descriptor_pool_, + DescriptorPool& descriptor_pool_, StagingBufferPool& staging_buffer_pool_, VKUpdateDescriptorQueue& update_descriptor_queue_, MemoryAllocator& memory_allocator_); @@ -99,7 +98,6 @@ public: private: void MakeDataBuffer(); - const Device& device; VKScheduler& scheduler; StagingBufferPool& staging_buffer_pool; VKUpdateDescriptorQueue& update_descriptor_queue; diff --git a/src/video_core/renderer_vulkan/vk_compute_pipeline.cpp b/src/video_core/renderer_vulkan/vk_compute_pipeline.cpp index 3a48219b7..70b84c7a6 100755 --- a/src/video_core/renderer_vulkan/vk_compute_pipeline.cpp +++ b/src/video_core/renderer_vulkan/vk_compute_pipeline.cpp @@ -2,152 +2,198 @@ // Licensed under GPLv2 or any later version // Refer to the license.txt file included. +#include #include +#include + +#include "video_core/renderer_vulkan/pipeline_helper.h" +#include "video_core/renderer_vulkan/vk_buffer_cache.h" #include "video_core/renderer_vulkan/vk_compute_pipeline.h" #include "video_core/renderer_vulkan/vk_descriptor_pool.h" #include "video_core/renderer_vulkan/vk_pipeline_cache.h" #include "video_core/renderer_vulkan/vk_scheduler.h" -#include "video_core/renderer_vulkan/vk_shader_decompiler.h" #include "video_core/renderer_vulkan/vk_update_descriptor.h" +#include "video_core/shader_notify.h" #include "video_core/vulkan_common/vulkan_device.h" #include "video_core/vulkan_common/vulkan_wrapper.h" namespace Vulkan { -VKComputePipeline::VKComputePipeline(const Device& device_, VKScheduler& scheduler_, - VKDescriptorPool& descriptor_pool_, - VKUpdateDescriptorQueue& update_descriptor_queue_, - const SPIRVShader& shader_) - : device{device_}, scheduler{scheduler_}, entries{shader_.entries}, - descriptor_set_layout{CreateDescriptorSetLayout()}, - descriptor_allocator{descriptor_pool_, *descriptor_set_layout}, - update_descriptor_queue{update_descriptor_queue_}, layout{CreatePipelineLayout()}, - descriptor_template{CreateDescriptorUpdateTemplate()}, - shader_module{CreateShaderModule(shader_.code)}, pipeline{CreatePipeline()} {} +using Shader::ImageBufferDescriptor; +using Tegra::Texture::TexturePair; -VKComputePipeline::~VKComputePipeline() = default; - -VkDescriptorSet VKComputePipeline::CommitDescriptorSet() { - if (!descriptor_template) { - return {}; +ComputePipeline::ComputePipeline(const Device& device_, DescriptorPool& descriptor_pool, + VKUpdateDescriptorQueue& update_descriptor_queue_, + Common::ThreadWorker* thread_worker, + VideoCore::ShaderNotify* shader_notify, const Shader::Info& info_, + vk::ShaderModule spv_module_) + : device{device_}, update_descriptor_queue{update_descriptor_queue_}, info{info_}, + spv_module(std::move(spv_module_)) { + if (shader_notify) { + shader_notify->MarkShaderBuilding(); } - const VkDescriptorSet set = descriptor_allocator.Commit(); - update_descriptor_queue.Send(*descriptor_template, set); - return set; -} + std::copy_n(info.constant_buffer_used_sizes.begin(), uniform_buffer_sizes.size(), + uniform_buffer_sizes.begin()); -vk::DescriptorSetLayout VKComputePipeline::CreateDescriptorSetLayout() const { - std::vector bindings; - u32 binding = 0; - const auto add_bindings = [&](VkDescriptorType descriptor_type, std::size_t num_entries) { - // TODO(Rodrigo): Maybe make individual bindings here? - for (u32 bindpoint = 0; bindpoint < static_cast(num_entries); ++bindpoint) { - bindings.push_back({ - .binding = binding++, - .descriptorType = descriptor_type, - .descriptorCount = 1, - .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, - .pImmutableSamplers = nullptr, - }); - } - }; - add_bindings(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, entries.const_buffers.size()); - add_bindings(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, entries.global_buffers.size()); - add_bindings(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, entries.uniform_texels.size()); - add_bindings(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, entries.samplers.size()); - add_bindings(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, entries.storage_texels.size()); - add_bindings(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, entries.images.size()); + auto func{[this, &descriptor_pool, shader_notify] { + DescriptorLayoutBuilder builder{device}; + builder.Add(info, VK_SHADER_STAGE_COMPUTE_BIT); - return device.GetLogical().CreateDescriptorSetLayout({ - .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .bindingCount = static_cast(bindings.size()), - .pBindings = bindings.data(), - }); -} - -vk::PipelineLayout VKComputePipeline::CreatePipelineLayout() const { - return device.GetLogical().CreatePipelineLayout({ - .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .setLayoutCount = 1, - .pSetLayouts = descriptor_set_layout.address(), - .pushConstantRangeCount = 0, - .pPushConstantRanges = nullptr, - }); -} - -vk::DescriptorUpdateTemplateKHR VKComputePipeline::CreateDescriptorUpdateTemplate() const { - std::vector template_entries; - u32 binding = 0; - u32 offset = 0; - FillDescriptorUpdateTemplateEntries(entries, binding, offset, template_entries); - if (template_entries.empty()) { - // If the shader doesn't use descriptor sets, skip template creation. - return {}; - } - - return device.GetLogical().CreateDescriptorUpdateTemplateKHR({ - .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, - .pNext = nullptr, - .flags = 0, - .descriptorUpdateEntryCount = static_cast(template_entries.size()), - .pDescriptorUpdateEntries = template_entries.data(), - .templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR, - .descriptorSetLayout = *descriptor_set_layout, - .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS, - .pipelineLayout = *layout, - .set = DESCRIPTOR_SET, - }); -} - -vk::ShaderModule VKComputePipeline::CreateShaderModule(const std::vector& code) const { - device.SaveShader(code); - - return device.GetLogical().CreateShaderModule({ - .sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .codeSize = code.size() * sizeof(u32), - .pCode = code.data(), - }); -} - -vk::Pipeline VKComputePipeline::CreatePipeline() const { - - VkComputePipelineCreateInfo ci{ - .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .stage = - { + descriptor_set_layout = builder.CreateDescriptorSetLayout(false); + pipeline_layout = builder.CreatePipelineLayout(*descriptor_set_layout); + descriptor_update_template = + builder.CreateTemplate(*descriptor_set_layout, *pipeline_layout, false); + descriptor_allocator = descriptor_pool.Allocator(*descriptor_set_layout, info); + const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT subgroup_size_ci{ + .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + .pNext = nullptr, + .requiredSubgroupSize = GuestWarpSize, + }; + pipeline = device.GetLogical().CreateComputePipeline({ + .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + .stage{ .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, - .pNext = nullptr, + .pNext = device.IsExtSubgroupSizeControlSupported() ? &subgroup_size_ci : nullptr, .flags = 0, .stage = VK_SHADER_STAGE_COMPUTE_BIT, - .module = *shader_module, + .module = *spv_module, .pName = "main", .pSpecializationInfo = nullptr, }, - .layout = *layout, - .basePipelineHandle = nullptr, - .basePipelineIndex = 0, - }; + .layout = *pipeline_layout, + .basePipelineHandle = 0, + .basePipelineIndex = 0, + }); + std::lock_guard lock{build_mutex}; + is_built = true; + build_condvar.notify_one(); + if (shader_notify) { + shader_notify->MarkShaderComplete(); + } + }}; + if (thread_worker) { + thread_worker->QueueWork(std::move(func)); + } else { + func(); + } +} - const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT subgroup_size_ci{ - .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, - .pNext = nullptr, - .requiredSubgroupSize = GuestWarpSize, - }; +void ComputePipeline::Configure(Tegra::Engines::KeplerCompute& kepler_compute, + Tegra::MemoryManager& gpu_memory, VKScheduler& scheduler, + BufferCache& buffer_cache, TextureCache& texture_cache) { + update_descriptor_queue.Acquire(); - if (entries.uses_warps && device.IsGuestWarpSizeSupported(VK_SHADER_STAGE_COMPUTE_BIT)) { - ci.stage.pNext = &subgroup_size_ci; + buffer_cache.SetComputeUniformBufferState(info.constant_buffer_mask, &uniform_buffer_sizes); + buffer_cache.UnbindComputeStorageBuffers(); + size_t ssbo_index{}; + for (const auto& desc : info.storage_buffers_descriptors) { + ASSERT(desc.count == 1); + buffer_cache.BindComputeStorageBuffer(ssbo_index, desc.cbuf_index, desc.cbuf_offset, + desc.is_written); + ++ssbo_index; } - return device.GetLogical().CreateComputePipeline(ci); + texture_cache.SynchronizeComputeDescriptors(); + + static constexpr size_t max_elements = 64; + std::array image_view_ids; + boost::container::static_vector image_view_indices; + boost::container::static_vector samplers; + + const auto& qmd{kepler_compute.launch_description}; + const auto& cbufs{qmd.const_buffer_config}; + const bool via_header_index{qmd.linked_tsc != 0}; + const auto read_handle{[&](const auto& desc, u32 index) { + ASSERT(((qmd.const_buffer_enable_mask >> desc.cbuf_index) & 1) != 0); + const u32 index_offset{index << desc.size_shift}; + const u32 offset{desc.cbuf_offset + index_offset}; + const GPUVAddr addr{cbufs[desc.cbuf_index].Address() + offset}; + if constexpr (std::is_same_v || + std::is_same_v) { + if (desc.has_secondary) { + ASSERT(((qmd.const_buffer_enable_mask >> desc.secondary_cbuf_index) & 1) != 0); + const u32 secondary_offset{desc.secondary_cbuf_offset + index_offset}; + const GPUVAddr separate_addr{cbufs[desc.secondary_cbuf_index].Address() + + secondary_offset}; + const u32 lhs_raw{gpu_memory.Read(addr)}; + const u32 rhs_raw{gpu_memory.Read(separate_addr)}; + return TexturePair(lhs_raw | rhs_raw, via_header_index); + } + } + return TexturePair(gpu_memory.Read(addr), via_header_index); + }}; + const auto add_image{[&](const auto& desc) { + for (u32 index = 0; index < desc.count; ++index) { + const auto handle{read_handle(desc, index)}; + image_view_indices.push_back(handle.first); + } + }}; + std::ranges::for_each(info.texture_buffer_descriptors, add_image); + std::ranges::for_each(info.image_buffer_descriptors, add_image); + for (const auto& desc : info.texture_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + const auto handle{read_handle(desc, index)}; + image_view_indices.push_back(handle.first); + + Sampler* const sampler = texture_cache.GetComputeSampler(handle.second); + samplers.push_back(sampler->Handle()); + } + } + std::ranges::for_each(info.image_descriptors, add_image); + + const std::span indices_span(image_view_indices.data(), image_view_indices.size()); + texture_cache.FillComputeImageViews(indices_span, image_view_ids); + + buffer_cache.UnbindComputeTextureBuffers(); + ImageId* texture_buffer_ids{image_view_ids.data()}; + size_t index{}; + const auto add_buffer{[&](const auto& desc) { + constexpr bool is_image = std::is_same_v; + for (u32 i = 0; i < desc.count; ++i) { + bool is_written{false}; + if constexpr (is_image) { + is_written = desc.is_written; + } + ImageView& image_view = texture_cache.GetImageView(*texture_buffer_ids); + buffer_cache.BindComputeTextureBuffer(index, image_view.GpuAddr(), + image_view.BufferSize(), image_view.format, + is_written, is_image); + ++texture_buffer_ids; + ++index; + } + }}; + std::ranges::for_each(info.texture_buffer_descriptors, add_buffer); + std::ranges::for_each(info.image_buffer_descriptors, add_buffer); + + buffer_cache.UpdateComputeBuffers(); + buffer_cache.BindHostComputeBuffers(); + + const VkSampler* samplers_it{samplers.data()}; + const ImageId* views_it{image_view_ids.data()}; + PushImageDescriptors(info, samplers_it, views_it, texture_cache, update_descriptor_queue); + + if (!is_built.load(std::memory_order::relaxed)) { + // Wait for the pipeline to be built + scheduler.Record([this](vk::CommandBuffer) { + std::unique_lock lock{build_mutex}; + build_condvar.wait(lock, [this] { return is_built.load(std::memory_order::relaxed); }); + }); + } + const void* const descriptor_data{update_descriptor_queue.UpdateData()}; + scheduler.Record([this, descriptor_data](vk::CommandBuffer cmdbuf) { + cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline); + if (!descriptor_set_layout) { + return; + } + const VkDescriptorSet descriptor_set{descriptor_allocator.Commit()}; + const vk::Device& dev{device.GetLogical()}; + dev.UpdateDescriptorSet(descriptor_set, *descriptor_update_template, descriptor_data); + cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline_layout, 0, + descriptor_set, nullptr); + }); } } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_compute_pipeline.h b/src/video_core/renderer_vulkan/vk_compute_pipeline.h index 7e16575ac..52fec04d3 100755 --- a/src/video_core/renderer_vulkan/vk_compute_pipeline.h +++ b/src/video_core/renderer_vulkan/vk_compute_pipeline.h @@ -4,61 +4,63 @@ #pragma once +#include +#include +#include + #include "common/common_types.h" +#include "common/thread_worker.h" +#include "shader_recompiler/shader_info.h" +#include "video_core/memory_manager.h" +#include "video_core/renderer_vulkan/vk_buffer_cache.h" #include "video_core/renderer_vulkan/vk_descriptor_pool.h" -#include "video_core/renderer_vulkan/vk_shader_decompiler.h" +#include "video_core/renderer_vulkan/vk_texture_cache.h" +#include "video_core/renderer_vulkan/vk_update_descriptor.h" #include "video_core/vulkan_common/vulkan_wrapper.h" +namespace VideoCore { +class ShaderNotify; +} + namespace Vulkan { class Device; class VKScheduler; -class VKUpdateDescriptorQueue; -class VKComputePipeline final { +class ComputePipeline { public: - explicit VKComputePipeline(const Device& device_, VKScheduler& scheduler_, - VKDescriptorPool& descriptor_pool_, - VKUpdateDescriptorQueue& update_descriptor_queue_, - const SPIRVShader& shader_); - ~VKComputePipeline(); + explicit ComputePipeline(const Device& device, DescriptorPool& descriptor_pool, + VKUpdateDescriptorQueue& update_descriptor_queue, + Common::ThreadWorker* thread_worker, + VideoCore::ShaderNotify* shader_notify, const Shader::Info& info, + vk::ShaderModule spv_module); - VkDescriptorSet CommitDescriptorSet(); + ComputePipeline& operator=(ComputePipeline&&) noexcept = delete; + ComputePipeline(ComputePipeline&&) noexcept = delete; - VkPipeline GetHandle() const { - return *pipeline; - } + ComputePipeline& operator=(const ComputePipeline&) = delete; + ComputePipeline(const ComputePipeline&) = delete; - VkPipelineLayout GetLayout() const { - return *layout; - } - - const ShaderEntries& GetEntries() const { - return entries; - } + void Configure(Tegra::Engines::KeplerCompute& kepler_compute, Tegra::MemoryManager& gpu_memory, + VKScheduler& scheduler, BufferCache& buffer_cache, TextureCache& texture_cache); private: - vk::DescriptorSetLayout CreateDescriptorSetLayout() const; - - vk::PipelineLayout CreatePipelineLayout() const; - - vk::DescriptorUpdateTemplateKHR CreateDescriptorUpdateTemplate() const; - - vk::ShaderModule CreateShaderModule(const std::vector& code) const; - - vk::Pipeline CreatePipeline() const; - const Device& device; - VKScheduler& scheduler; - ShaderEntries entries; + VKUpdateDescriptorQueue& update_descriptor_queue; + Shader::Info info; + VideoCommon::ComputeUniformBufferSizes uniform_buffer_sizes{}; + + vk::ShaderModule spv_module; vk::DescriptorSetLayout descriptor_set_layout; DescriptorAllocator descriptor_allocator; - VKUpdateDescriptorQueue& update_descriptor_queue; - vk::PipelineLayout layout; - vk::DescriptorUpdateTemplateKHR descriptor_template; - vk::ShaderModule shader_module; + vk::PipelineLayout pipeline_layout; + vk::DescriptorUpdateTemplateKHR descriptor_update_template; vk::Pipeline pipeline; + + std::condition_variable build_condvar; + std::mutex build_mutex; + std::atomic_bool is_built{false}; }; } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_descriptor_pool.cpp b/src/video_core/renderer_vulkan/vk_descriptor_pool.cpp index ef9fb5910..8e77e4796 100755 --- a/src/video_core/renderer_vulkan/vk_descriptor_pool.cpp +++ b/src/video_core/renderer_vulkan/vk_descriptor_pool.cpp @@ -2,6 +2,8 @@ // Licensed under GPLv2 or any later version // Refer to the license.txt file included. +#include +#include #include #include "common/common_types.h" @@ -13,79 +15,149 @@ namespace Vulkan { -// Prefer small grow rates to avoid saturating the descriptor pool with barely used pipelines. -constexpr std::size_t SETS_GROW_RATE = 0x20; +// Prefer small grow rates to avoid saturating the descriptor pool with barely used pipelines +constexpr size_t SETS_GROW_RATE = 16; +constexpr s32 SCORE_THRESHOLD = 3; +constexpr u32 SETS_PER_POOL = 64; -DescriptorAllocator::DescriptorAllocator(VKDescriptorPool& descriptor_pool_, - VkDescriptorSetLayout layout_) - : ResourcePool(descriptor_pool_.master_semaphore, SETS_GROW_RATE), - descriptor_pool{descriptor_pool_}, layout{layout_} {} +struct DescriptorBank { + DescriptorBankInfo info; + std::vector pools; +}; -DescriptorAllocator::~DescriptorAllocator() = default; - -VkDescriptorSet DescriptorAllocator::Commit() { - const std::size_t index = CommitResource(); - return descriptors_allocations[index / SETS_GROW_RATE][index % SETS_GROW_RATE]; +bool DescriptorBankInfo::IsSuperset(const DescriptorBankInfo& subset) const noexcept { + return uniform_buffers >= subset.uniform_buffers && storage_buffers >= subset.storage_buffers && + texture_buffers >= subset.texture_buffers && image_buffers >= subset.image_buffers && + textures >= subset.textures && images >= subset.image_buffers; } -void DescriptorAllocator::Allocate(std::size_t begin, std::size_t end) { - descriptors_allocations.push_back(descriptor_pool.AllocateDescriptors(layout, end - begin)); +template +static u32 Accumulate(const Descriptors& descriptors) { + u32 count = 0; + for (const auto& descriptor : descriptors) { + count += descriptor.count; + } + return count; } -VKDescriptorPool::VKDescriptorPool(const Device& device_, VKScheduler& scheduler) - : device{device_}, master_semaphore{scheduler.GetMasterSemaphore()}, active_pool{ - AllocateNewPool()} {} +static DescriptorBankInfo MakeBankInfo(std::span infos) { + DescriptorBankInfo bank; + for (const Shader::Info& info : infos) { + bank.uniform_buffers += Accumulate(info.constant_buffer_descriptors); + bank.storage_buffers += Accumulate(info.storage_buffers_descriptors); + bank.texture_buffers += Accumulate(info.texture_buffer_descriptors); + bank.image_buffers += Accumulate(info.image_buffer_descriptors); + bank.textures += Accumulate(info.texture_descriptors); + bank.images += Accumulate(info.image_descriptors); + } + bank.score = bank.uniform_buffers + bank.storage_buffers + bank.texture_buffers + + bank.image_buffers + bank.textures + bank.images; + return bank; +} -VKDescriptorPool::~VKDescriptorPool() = default; - -vk::DescriptorPool* VKDescriptorPool::AllocateNewPool() { - static constexpr u32 num_sets = 0x20000; - static constexpr VkDescriptorPoolSize pool_sizes[] = { - {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, num_sets * 90}, - {VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, num_sets * 60}, - {VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, num_sets * 64}, - {VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, num_sets * 64}, - {VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, num_sets * 64}, - {VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, num_sets * 40}, +static void AllocatePool(const Device& device, DescriptorBank& bank) { + std::array pool_sizes; + size_t pool_cursor{}; + const auto add = [&](VkDescriptorType type, u32 count) { + if (count > 0) { + pool_sizes[pool_cursor++] = { + .type = type, + .descriptorCount = count * SETS_PER_POOL, + }; + } }; - - const VkDescriptorPoolCreateInfo ci{ + const auto& info{bank.info}; + add(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, info.uniform_buffers); + add(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, info.storage_buffers); + add(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, info.texture_buffers); + add(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, info.image_buffers); + add(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, info.textures); + add(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, info.images); + bank.pools.push_back(device.GetLogical().CreateDescriptorPool({ .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, .pNext = nullptr, .flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, - .maxSets = num_sets, - .poolSizeCount = static_cast(std::size(pool_sizes)), + .maxSets = SETS_PER_POOL, + .poolSizeCount = static_cast(pool_cursor), .pPoolSizes = std::data(pool_sizes), - }; - return &pools.emplace_back(device.GetLogical().CreateDescriptorPool(ci)); + })); } -vk::DescriptorSets VKDescriptorPool::AllocateDescriptors(VkDescriptorSetLayout layout, - std::size_t count) { - const std::vector layout_copies(count, layout); - VkDescriptorSetAllocateInfo ai{ +DescriptorAllocator::DescriptorAllocator(const Device& device_, MasterSemaphore& master_semaphore_, + DescriptorBank& bank_, VkDescriptorSetLayout layout_) + : ResourcePool(master_semaphore_, SETS_GROW_RATE), device{&device_}, bank{&bank_}, + layout{layout_} {} + +VkDescriptorSet DescriptorAllocator::Commit() { + const size_t index = CommitResource(); + return sets[index / SETS_GROW_RATE][index % SETS_GROW_RATE]; +} + +void DescriptorAllocator::Allocate(size_t begin, size_t end) { + sets.push_back(AllocateDescriptors(end - begin)); +} + +vk::DescriptorSets DescriptorAllocator::AllocateDescriptors(size_t count) { + const std::vector layouts(count, layout); + VkDescriptorSetAllocateInfo allocate_info{ .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, .pNext = nullptr, - .descriptorPool = **active_pool, + .descriptorPool = *bank->pools.back(), .descriptorSetCount = static_cast(count), - .pSetLayouts = layout_copies.data(), + .pSetLayouts = layouts.data(), }; - - vk::DescriptorSets sets = active_pool->Allocate(ai); - if (!sets.IsOutOfPoolMemory()) { - return sets; + vk::DescriptorSets new_sets = bank->pools.back().Allocate(allocate_info); + if (!new_sets.IsOutOfPoolMemory()) { + return new_sets; } - // Our current pool is out of memory. Allocate a new one and retry - active_pool = AllocateNewPool(); - ai.descriptorPool = **active_pool; - sets = active_pool->Allocate(ai); - if (!sets.IsOutOfPoolMemory()) { - return sets; + AllocatePool(*device, *bank); + allocate_info.descriptorPool = *bank->pools.back(); + new_sets = bank->pools.back().Allocate(allocate_info); + if (!new_sets.IsOutOfPoolMemory()) { + return new_sets; } - // After allocating a new pool, we are out of memory again. We can't handle this from here. throw vk::Exception(VK_ERROR_OUT_OF_POOL_MEMORY); } +DescriptorPool::DescriptorPool(const Device& device_, VKScheduler& scheduler) + : device{device_}, master_semaphore{scheduler.GetMasterSemaphore()} {} + +DescriptorPool::~DescriptorPool() = default; + +DescriptorAllocator DescriptorPool::Allocator(VkDescriptorSetLayout layout, + std::span infos) { + return Allocator(layout, MakeBankInfo(infos)); +} + +DescriptorAllocator DescriptorPool::Allocator(VkDescriptorSetLayout layout, + const Shader::Info& info) { + return Allocator(layout, MakeBankInfo(std::array{info})); +} + +DescriptorAllocator DescriptorPool::Allocator(VkDescriptorSetLayout layout, + const DescriptorBankInfo& info) { + return DescriptorAllocator(device, master_semaphore, Bank(info), layout); +} + +DescriptorBank& DescriptorPool::Bank(const DescriptorBankInfo& reqs) { + std::shared_lock read_lock{banks_mutex}; + const auto it = std::ranges::find_if(bank_infos, [&reqs](const DescriptorBankInfo& bank) { + return std::abs(bank.score - reqs.score) < SCORE_THRESHOLD && bank.IsSuperset(reqs); + }); + if (it != bank_infos.end()) { + return *banks[std::distance(bank_infos.begin(), it)].get(); + } + read_lock.unlock(); + + std::unique_lock write_lock{banks_mutex}; + bank_infos.push_back(reqs); + + auto& bank = *banks.emplace_back(std::make_unique()); + bank.info = reqs; + AllocatePool(device, bank); + return bank; +} + } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_descriptor_pool.h b/src/video_core/renderer_vulkan/vk_descriptor_pool.h index f892be7be..59466aac5 100755 --- a/src/video_core/renderer_vulkan/vk_descriptor_pool.h +++ b/src/video_core/renderer_vulkan/vk_descriptor_pool.h @@ -4,57 +4,85 @@ #pragma once +#include +#include #include +#include "shader_recompiler/shader_info.h" #include "video_core/renderer_vulkan/vk_resource_pool.h" #include "video_core/vulkan_common/vulkan_wrapper.h" namespace Vulkan { class Device; -class VKDescriptorPool; class VKScheduler; +struct DescriptorBank; + +struct DescriptorBankInfo { + [[nodiscard]] bool IsSuperset(const DescriptorBankInfo& subset) const noexcept; + + u32 uniform_buffers{}; ///< Number of uniform buffer descriptors + u32 storage_buffers{}; ///< Number of storage buffer descriptors + u32 texture_buffers{}; ///< Number of texture buffer descriptors + u32 image_buffers{}; ///< Number of image buffer descriptors + u32 textures{}; ///< Number of texture descriptors + u32 images{}; ///< Number of image descriptors + s32 score{}; ///< Number of descriptors in total +}; + class DescriptorAllocator final : public ResourcePool { + friend class DescriptorPool; + public: - explicit DescriptorAllocator(VKDescriptorPool& descriptor_pool, VkDescriptorSetLayout layout); - ~DescriptorAllocator() override; + explicit DescriptorAllocator() = default; + ~DescriptorAllocator() override = default; + + DescriptorAllocator& operator=(DescriptorAllocator&&) noexcept = default; + DescriptorAllocator(DescriptorAllocator&&) noexcept = default; DescriptorAllocator& operator=(const DescriptorAllocator&) = delete; DescriptorAllocator(const DescriptorAllocator&) = delete; VkDescriptorSet Commit(); -protected: - void Allocate(std::size_t begin, std::size_t end) override; - private: - VKDescriptorPool& descriptor_pool; - const VkDescriptorSetLayout layout; + explicit DescriptorAllocator(const Device& device_, MasterSemaphore& master_semaphore_, + DescriptorBank& bank_, VkDescriptorSetLayout layout_); - std::vector descriptors_allocations; + void Allocate(size_t begin, size_t end) override; + + vk::DescriptorSets AllocateDescriptors(size_t count); + + const Device* device{}; + DescriptorBank* bank{}; + VkDescriptorSetLayout layout{}; + + std::vector sets; }; -class VKDescriptorPool final { - friend DescriptorAllocator; - +class DescriptorPool { public: - explicit VKDescriptorPool(const Device& device, VKScheduler& scheduler); - ~VKDescriptorPool(); + explicit DescriptorPool(const Device& device, VKScheduler& scheduler); + ~DescriptorPool(); - VKDescriptorPool(const VKDescriptorPool&) = delete; - VKDescriptorPool& operator=(const VKDescriptorPool&) = delete; + DescriptorPool& operator=(const DescriptorPool&) = delete; + DescriptorPool(const DescriptorPool&) = delete; + + DescriptorAllocator Allocator(VkDescriptorSetLayout layout, + std::span infos); + DescriptorAllocator Allocator(VkDescriptorSetLayout layout, const Shader::Info& info); + DescriptorAllocator Allocator(VkDescriptorSetLayout layout, const DescriptorBankInfo& info); private: - vk::DescriptorPool* AllocateNewPool(); - - vk::DescriptorSets AllocateDescriptors(VkDescriptorSetLayout layout, std::size_t count); + DescriptorBank& Bank(const DescriptorBankInfo& reqs); const Device& device; MasterSemaphore& master_semaphore; - std::vector pools; - vk::DescriptorPool* active_pool; + std::shared_mutex banks_mutex; + std::vector bank_infos; + std::vector> banks; }; } // namespace Vulkan \ No newline at end of file diff --git a/src/video_core/renderer_vulkan/vk_graphics_pipeline.cpp b/src/video_core/renderer_vulkan/vk_graphics_pipeline.cpp index fc6dd83eb..f0ae0b0d6 100755 --- a/src/video_core/renderer_vulkan/vk_graphics_pipeline.cpp +++ b/src/video_core/renderer_vulkan/vk_graphics_pipeline.cpp @@ -1,29 +1,58 @@ -// Copyright 2019 yuzu Emulator Project +// Copyright 2021 yuzu Emulator Project // Licensed under GPLv2 or any later version // Refer to the license.txt file included. #include -#include -#include -#include +#include -#include "common/common_types.h" -#include "common/microprofile.h" -#include "video_core/renderer_vulkan/fixed_pipeline_state.h" +#include +#include + +#include "common/bit_field.h" #include "video_core/renderer_vulkan/maxwell_to_vk.h" -#include "video_core/renderer_vulkan/vk_descriptor_pool.h" +#include "video_core/renderer_vulkan/pipeline_helper.h" +#include "video_core/renderer_vulkan/vk_buffer_cache.h" #include "video_core/renderer_vulkan/vk_graphics_pipeline.h" -#include "video_core/renderer_vulkan/vk_pipeline_cache.h" +#include "video_core/renderer_vulkan/vk_render_pass_cache.h" #include "video_core/renderer_vulkan/vk_scheduler.h" +#include "video_core/renderer_vulkan/vk_texture_cache.h" #include "video_core/renderer_vulkan/vk_update_descriptor.h" +#include "video_core/shader_notify.h" #include "video_core/vulkan_common/vulkan_device.h" -#include "video_core/vulkan_common/vulkan_wrapper.h" + +#if defined(_MSC_VER) && defined(NDEBUG) +#define LAMBDA_FORCEINLINE [[msvc::forceinline]] +#else +#define LAMBDA_FORCEINLINE +#endif namespace Vulkan { - -MICROPROFILE_DECLARE(Vulkan_PipelineCache); - namespace { +using boost::container::small_vector; +using boost::container::static_vector; +using Shader::ImageBufferDescriptor; +using Tegra::Texture::TexturePair; +using VideoCore::Surface::PixelFormat; +using VideoCore::Surface::PixelFormatFromDepthFormat; +using VideoCore::Surface::PixelFormatFromRenderTargetFormat; + +constexpr size_t NUM_STAGES = Maxwell::MaxShaderStage; +constexpr size_t MAX_IMAGE_ELEMENTS = 64; + +DescriptorLayoutBuilder MakeBuilder(const Device& device, std::span infos) { + DescriptorLayoutBuilder builder{device}; + for (size_t index = 0; index < infos.size(); ++index) { + static constexpr std::array stages{ + VK_SHADER_STAGE_VERTEX_BIT, + VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + VK_SHADER_STAGE_GEOMETRY_BIT, + VK_SHADER_STAGE_FRAGMENT_BIT, + }; + builder.Add(infos[index], stages.at(index)); + } + return builder; +} template VkStencilOpState GetStencilFaceState(const StencilFace& face) { @@ -39,15 +68,24 @@ VkStencilOpState GetStencilFaceState(const StencilFace& face) { } bool SupportsPrimitiveRestart(VkPrimitiveTopology topology) { - static constexpr std::array unsupported_topologies = { + static constexpr std::array unsupported_topologies{ VK_PRIMITIVE_TOPOLOGY_POINT_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, - VK_PRIMITIVE_TOPOLOGY_PATCH_LIST}; - return std::find(std::begin(unsupported_topologies), std::end(unsupported_topologies), - topology) == std::end(unsupported_topologies); + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, + // VK_PRIMITIVE_TOPOLOGY_QUAD_LIST_EXT, + }; + return std::ranges::find(unsupported_topologies, topology) == unsupported_topologies.end(); +} + +bool IsLine(VkPrimitiveTopology topology) { + static constexpr std::array line_topologies{ + VK_PRIMITIVE_TOPOLOGY_LINE_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + // VK_PRIMITIVE_TOPOLOGY_LINE_LOOP_EXT, + }; + return std::ranges::find(line_topologies, topology) == line_topologies.end(); } VkViewportSwizzleNV UnpackViewportSwizzle(u16 swizzle) { @@ -59,8 +97,7 @@ VkViewportSwizzleNV UnpackViewportSwizzle(u16 swizzle) { BitField<12, 3, Maxwell::ViewportSwizzle> w; }; const Swizzle unpacked{swizzle}; - - return { + return VkViewportSwizzleNV{ .x = MaxwellToVK::ViewportSwizzle(unpacked.x), .y = MaxwellToVK::ViewportSwizzle(unpacked.y), .z = MaxwellToVK::ViewportSwizzle(unpacked.z), @@ -68,193 +105,447 @@ VkViewportSwizzleNV UnpackViewportSwizzle(u16 swizzle) { }; } -VkSampleCountFlagBits ConvertMsaaMode(Tegra::Texture::MsaaMode msaa_mode) { - switch (msaa_mode) { - case Tegra::Texture::MsaaMode::Msaa1x1: - return VK_SAMPLE_COUNT_1_BIT; - case Tegra::Texture::MsaaMode::Msaa2x1: - case Tegra::Texture::MsaaMode::Msaa2x1_D3D: - return VK_SAMPLE_COUNT_2_BIT; - case Tegra::Texture::MsaaMode::Msaa2x2: - case Tegra::Texture::MsaaMode::Msaa2x2_VC4: - case Tegra::Texture::MsaaMode::Msaa2x2_VC12: - return VK_SAMPLE_COUNT_4_BIT; - case Tegra::Texture::MsaaMode::Msaa4x2: - case Tegra::Texture::MsaaMode::Msaa4x2_D3D: - case Tegra::Texture::MsaaMode::Msaa4x2_VC8: - case Tegra::Texture::MsaaMode::Msaa4x2_VC24: - return VK_SAMPLE_COUNT_8_BIT; - case Tegra::Texture::MsaaMode::Msaa4x4: - return VK_SAMPLE_COUNT_16_BIT; - default: - UNREACHABLE_MSG("Invalid msaa_mode={}", static_cast(msaa_mode)); - return VK_SAMPLE_COUNT_1_BIT; +PixelFormat DecodeFormat(u8 encoded_format) { + const auto format{static_cast(encoded_format)}; + if (format == Tegra::RenderTargetFormat::NONE) { + return PixelFormat::Invalid; } + return PixelFormatFromRenderTargetFormat(format); } +RenderPassKey MakeRenderPassKey(const FixedPipelineState& state) { + RenderPassKey key; + std::ranges::transform(state.color_formats, key.color_formats.begin(), DecodeFormat); + if (state.depth_enabled != 0) { + const auto depth_format{static_cast(state.depth_format.Value())}; + key.depth_format = PixelFormatFromDepthFormat(depth_format); + } else { + key.depth_format = PixelFormat::Invalid; + } + key.samples = MaxwellToVK::MsaaMode(state.msaa_mode); + return key; +} + +size_t NumAttachments(const FixedPipelineState& state) { + size_t num{}; + for (size_t index = 0; index < Maxwell::NumRenderTargets; ++index) { + const auto format{static_cast(state.color_formats[index])}; + if (format != Tegra::RenderTargetFormat::NONE) { + num = index + 1; + } + } + return num; +} + +template +bool Passes(const std::array& modules, + const std::array& stage_infos) { + for (size_t stage = 0; stage < NUM_STAGES; ++stage) { + if (!Spec::enabled_stages[stage] && modules[stage]) { + return false; + } + const auto& info{stage_infos[stage]}; + if constexpr (!Spec::has_storage_buffers) { + if (!info.storage_buffers_descriptors.empty()) { + return false; + } + } + if constexpr (!Spec::has_texture_buffers) { + if (!info.texture_buffer_descriptors.empty()) { + return false; + } + } + if constexpr (!Spec::has_image_buffers) { + if (!info.image_buffer_descriptors.empty()) { + return false; + } + } + if constexpr (!Spec::has_images) { + if (!info.image_descriptors.empty()) { + return false; + } + } + } + return true; +} + +using ConfigureFuncPtr = void (*)(GraphicsPipeline*, bool); + +template +ConfigureFuncPtr FindSpec(const std::array& modules, + const std::array& stage_infos) { + if constexpr (sizeof...(Specs) > 0) { + if (!Passes(modules, stage_infos)) { + return FindSpec(modules, stage_infos); + } + } + return GraphicsPipeline::MakeConfigureSpecFunc(); +} + +struct SimpleVertexFragmentSpec { + static constexpr std::array enabled_stages{true, false, false, false, true}; + static constexpr bool has_storage_buffers = false; + static constexpr bool has_texture_buffers = false; + static constexpr bool has_image_buffers = false; + static constexpr bool has_images = false; +}; + +struct SimpleVertexSpec { + static constexpr std::array enabled_stages{true, false, false, false, false}; + static constexpr bool has_storage_buffers = false; + static constexpr bool has_texture_buffers = false; + static constexpr bool has_image_buffers = false; + static constexpr bool has_images = false; +}; + +struct DefaultSpec { + static constexpr std::array enabled_stages{true, true, true, true, true}; + static constexpr bool has_storage_buffers = true; + static constexpr bool has_texture_buffers = true; + static constexpr bool has_image_buffers = true; + static constexpr bool has_images = true; +}; + +ConfigureFuncPtr ConfigureFunc(const std::array& modules, + const std::array& infos) { + return FindSpec(modules, infos); +} } // Anonymous namespace -VKGraphicsPipeline::VKGraphicsPipeline(const Device& device_, VKScheduler& scheduler_, - VKDescriptorPool& descriptor_pool_, - VKUpdateDescriptorQueue& update_descriptor_queue_, - const GraphicsPipelineCacheKey& key, - vk::Span bindings, - const SPIRVProgram& program, u32 num_color_buffers) - : device{device_}, scheduler{scheduler_}, cache_key{key}, hash{cache_key.Hash()}, - descriptor_set_layout{CreateDescriptorSetLayout(bindings)}, - descriptor_allocator{descriptor_pool_, *descriptor_set_layout}, - update_descriptor_queue{update_descriptor_queue_}, layout{CreatePipelineLayout()}, - descriptor_template{CreateDescriptorUpdateTemplate(program)}, - modules(CreateShaderModules(program)), - pipeline(CreatePipeline(program, cache_key.renderpass, num_color_buffers)) {} - -VKGraphicsPipeline::~VKGraphicsPipeline() = default; - -VkDescriptorSet VKGraphicsPipeline::CommitDescriptorSet() { - if (!descriptor_template) { - return {}; +GraphicsPipeline::GraphicsPipeline( + Tegra::Engines::Maxwell3D& maxwell3d_, Tegra::MemoryManager& gpu_memory_, + VKScheduler& scheduler_, BufferCache& buffer_cache_, TextureCache& texture_cache_, + VideoCore::ShaderNotify* shader_notify, const Device& device_, DescriptorPool& descriptor_pool, + VKUpdateDescriptorQueue& update_descriptor_queue_, Common::ThreadWorker* worker_thread, + RenderPassCache& render_pass_cache, const GraphicsPipelineCacheKey& key_, + std::array stages, + const std::array& infos) + : key{key_}, maxwell3d{maxwell3d_}, gpu_memory{gpu_memory_}, device{device_}, + texture_cache{texture_cache_}, buffer_cache{buffer_cache_}, scheduler{scheduler_}, + update_descriptor_queue{update_descriptor_queue_}, spv_modules{std::move(stages)} { + if (shader_notify) { + shader_notify->MarkShaderBuilding(); } - const VkDescriptorSet set = descriptor_allocator.Commit(); - update_descriptor_queue.Send(*descriptor_template, set); - return set; -} - -vk::DescriptorSetLayout VKGraphicsPipeline::CreateDescriptorSetLayout( - vk::Span bindings) const { - const VkDescriptorSetLayoutCreateInfo ci{ - .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .bindingCount = bindings.size(), - .pBindings = bindings.data(), - }; - return device.GetLogical().CreateDescriptorSetLayout(ci); -} - -vk::PipelineLayout VKGraphicsPipeline::CreatePipelineLayout() const { - const VkPipelineLayoutCreateInfo ci{ - .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .setLayoutCount = 1, - .pSetLayouts = descriptor_set_layout.address(), - .pushConstantRangeCount = 0, - .pPushConstantRanges = nullptr, - }; - return device.GetLogical().CreatePipelineLayout(ci); -} - -vk::DescriptorUpdateTemplateKHR VKGraphicsPipeline::CreateDescriptorUpdateTemplate( - const SPIRVProgram& program) const { - std::vector template_entries; - u32 binding = 0; - u32 offset = 0; - for (const auto& stage : program) { - if (stage) { - FillDescriptorUpdateTemplateEntries(stage->entries, binding, offset, template_entries); - } - } - if (template_entries.empty()) { - // If the shader doesn't use descriptor sets, skip template creation. - return {}; - } - - const VkDescriptorUpdateTemplateCreateInfoKHR ci{ - .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, - .pNext = nullptr, - .flags = 0, - .descriptorUpdateEntryCount = static_cast(template_entries.size()), - .pDescriptorUpdateEntries = template_entries.data(), - .templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR, - .descriptorSetLayout = *descriptor_set_layout, - .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS, - .pipelineLayout = *layout, - .set = DESCRIPTOR_SET, - }; - return device.GetLogical().CreateDescriptorUpdateTemplateKHR(ci); -} - -std::vector VKGraphicsPipeline::CreateShaderModules( - const SPIRVProgram& program) const { - VkShaderModuleCreateInfo ci{ - .sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .codeSize = 0, - .pCode = nullptr, - }; - - std::vector shader_modules; - shader_modules.reserve(Maxwell::MaxShaderStage); - for (std::size_t i = 0; i < Maxwell::MaxShaderStage; ++i) { - const auto& stage = program[i]; - if (!stage) { + for (size_t stage = 0; stage < NUM_STAGES; ++stage) { + const Shader::Info* const info{infos[stage]}; + if (!info) { continue; } - - device.SaveShader(stage->code); - - ci.codeSize = stage->code.size() * sizeof(u32); - ci.pCode = stage->code.data(); - shader_modules.push_back(device.GetLogical().CreateShaderModule(ci)); + stage_infos[stage] = *info; + enabled_uniform_buffer_masks[stage] = info->constant_buffer_mask; + std::ranges::copy(info->constant_buffer_used_sizes, uniform_buffer_sizes[stage].begin()); } - return shader_modules; + auto func{[this, shader_notify, &render_pass_cache, &descriptor_pool] { + DescriptorLayoutBuilder builder{MakeBuilder(device, stage_infos)}; + uses_push_descriptor = builder.CanUsePushDescriptor(); + descriptor_set_layout = builder.CreateDescriptorSetLayout(uses_push_descriptor); + if (!uses_push_descriptor) { + descriptor_allocator = descriptor_pool.Allocator(*descriptor_set_layout, stage_infos); + } + const VkDescriptorSetLayout set_layout{*descriptor_set_layout}; + pipeline_layout = builder.CreatePipelineLayout(set_layout); + descriptor_update_template = + builder.CreateTemplate(set_layout, *pipeline_layout, uses_push_descriptor); + + const VkRenderPass render_pass{render_pass_cache.Get(MakeRenderPassKey(key.state))}; + Validate(); + MakePipeline(render_pass); + + std::lock_guard lock{build_mutex}; + is_built = true; + build_condvar.notify_one(); + if (shader_notify) { + shader_notify->MarkShaderComplete(); + } + }}; + if (worker_thread) { + worker_thread->QueueWork(std::move(func)); + } else { + func(); + } + configure_func = ConfigureFunc(spv_modules, stage_infos); } -vk::Pipeline VKGraphicsPipeline::CreatePipeline(const SPIRVProgram& program, - VkRenderPass renderpass, - u32 num_color_buffers) const { - const auto& state = cache_key.fixed_state; - const auto& viewport_swizzles = state.viewport_swizzles; +void GraphicsPipeline::AddTransition(GraphicsPipeline* transition) { + transition_keys.push_back(transition->key); + transitions.push_back(transition); +} - FixedPipelineState::DynamicState dynamic; - if (device.IsExtExtendedDynamicStateSupported()) { - // Insert dummy values, as long as they are valid they don't matter as extended dynamic - // state is ignored - dynamic.raw1 = 0; - dynamic.raw2 = 0; - dynamic.vertex_strides.fill(0); - } else { - dynamic = state.dynamic_state; +template +void GraphicsPipeline::ConfigureImpl(bool is_indexed) { + std::array image_view_ids; + std::array image_view_indices; + std::array samplers; + size_t sampler_index{}; + size_t image_index{}; + + texture_cache.SynchronizeGraphicsDescriptors(); + + buffer_cache.SetUniformBuffersState(enabled_uniform_buffer_masks, &uniform_buffer_sizes); + + const auto& regs{maxwell3d.regs}; + const bool via_header_index{regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex}; + const auto config_stage{[&](size_t stage) LAMBDA_FORCEINLINE { + const Shader::Info& info{stage_infos[stage]}; + buffer_cache.UnbindGraphicsStorageBuffers(stage); + if constexpr (Spec::has_storage_buffers) { + size_t ssbo_index{}; + for (const auto& desc : info.storage_buffers_descriptors) { + ASSERT(desc.count == 1); + buffer_cache.BindGraphicsStorageBuffer(stage, ssbo_index, desc.cbuf_index, + desc.cbuf_offset, desc.is_written); + ++ssbo_index; + } + } + const auto& cbufs{maxwell3d.state.shader_stages[stage].const_buffers}; + const auto read_handle{[&](const auto& desc, u32 index) { + ASSERT(cbufs[desc.cbuf_index].enabled); + const u32 index_offset{index << desc.size_shift}; + const u32 offset{desc.cbuf_offset + index_offset}; + const GPUVAddr addr{cbufs[desc.cbuf_index].address + offset}; + if constexpr (std::is_same_v || + std::is_same_v) { + if (desc.has_secondary) { + ASSERT(cbufs[desc.secondary_cbuf_index].enabled); + const u32 second_offset{desc.secondary_cbuf_offset + index_offset}; + const GPUVAddr separate_addr{cbufs[desc.secondary_cbuf_index].address + + second_offset}; + const u32 lhs_raw{gpu_memory.Read(addr)}; + const u32 rhs_raw{gpu_memory.Read(separate_addr)}; + const u32 raw{lhs_raw | rhs_raw}; + return TexturePair(raw, via_header_index); + } + } + return TexturePair(gpu_memory.Read(addr), via_header_index); + }}; + const auto add_image{[&](const auto& desc) { + for (u32 index = 0; index < desc.count; ++index) { + const auto handle{read_handle(desc, index)}; + image_view_indices[image_index++] = handle.first; + } + }}; + if constexpr (Spec::has_texture_buffers) { + for (const auto& desc : info.texture_buffer_descriptors) { + add_image(desc); + } + } + if constexpr (Spec::has_image_buffers) { + for (const auto& desc : info.image_buffer_descriptors) { + add_image(desc); + } + } + for (const auto& desc : info.texture_descriptors) { + for (u32 index = 0; index < desc.count; ++index) { + const auto handle{read_handle(desc, index)}; + image_view_indices[image_index++] = handle.first; + + Sampler* const sampler{texture_cache.GetGraphicsSampler(handle.second)}; + samplers[sampler_index++] = sampler->Handle(); + } + } + if constexpr (Spec::has_images) { + for (const auto& desc : info.image_descriptors) { + add_image(desc); + } + } + }}; + if constexpr (Spec::enabled_stages[0]) { + config_stage(0); + } + if constexpr (Spec::enabled_stages[1]) { + config_stage(1); + } + if constexpr (Spec::enabled_stages[2]) { + config_stage(2); + } + if constexpr (Spec::enabled_stages[3]) { + config_stage(3); + } + if constexpr (Spec::enabled_stages[4]) { + config_stage(4); + } + const std::span indices_span(image_view_indices.data(), image_index); + texture_cache.FillGraphicsImageViews(indices_span, image_view_ids); + + ImageId* texture_buffer_index{image_view_ids.data()}; + const auto bind_stage_info{[&](size_t stage) LAMBDA_FORCEINLINE { + size_t index{}; + const auto add_buffer{[&](const auto& desc) { + constexpr bool is_image = std::is_same_v; + for (u32 i = 0; i < desc.count; ++i) { + bool is_written{false}; + if constexpr (is_image) { + is_written = desc.is_written; + } + ImageView& image_view{texture_cache.GetImageView(*texture_buffer_index)}; + buffer_cache.BindGraphicsTextureBuffer(stage, index, image_view.GpuAddr(), + image_view.BufferSize(), image_view.format, + is_written, is_image); + ++index; + ++texture_buffer_index; + } + }}; + buffer_cache.UnbindGraphicsTextureBuffers(stage); + + const Shader::Info& info{stage_infos[stage]}; + if constexpr (Spec::has_texture_buffers) { + for (const auto& desc : info.texture_buffer_descriptors) { + add_buffer(desc); + } + } + if constexpr (Spec::has_image_buffers) { + for (const auto& desc : info.image_buffer_descriptors) { + add_buffer(desc); + } + } + for (const auto& desc : info.texture_descriptors) { + texture_buffer_index += desc.count; + } + if constexpr (Spec::has_images) { + for (const auto& desc : info.image_descriptors) { + texture_buffer_index += desc.count; + } + } + }}; + if constexpr (Spec::enabled_stages[0]) { + bind_stage_info(0); + } + if constexpr (Spec::enabled_stages[1]) { + bind_stage_info(1); + } + if constexpr (Spec::enabled_stages[2]) { + bind_stage_info(2); + } + if constexpr (Spec::enabled_stages[3]) { + bind_stage_info(3); + } + if constexpr (Spec::enabled_stages[4]) { + bind_stage_info(4); } - std::vector vertex_bindings; - std::vector vertex_binding_divisors; - for (std::size_t index = 0; index < Maxwell::NumVertexArrays; ++index) { - const bool instanced = state.binding_divisors[index] != 0; - const auto rate = instanced ? VK_VERTEX_INPUT_RATE_INSTANCE : VK_VERTEX_INPUT_RATE_VERTEX; - vertex_bindings.push_back({ - .binding = static_cast(index), - .stride = dynamic.vertex_strides[index], - .inputRate = rate, + buffer_cache.UpdateGraphicsBuffers(is_indexed); + buffer_cache.BindHostGeometryBuffers(is_indexed); + + update_descriptor_queue.Acquire(); + + const VkSampler* samplers_it{samplers.data()}; + const ImageId* views_it{image_view_ids.data()}; + const auto prepare_stage{[&](size_t stage) LAMBDA_FORCEINLINE { + buffer_cache.BindHostStageBuffers(stage); + PushImageDescriptors(stage_infos[stage], samplers_it, views_it, texture_cache, + update_descriptor_queue); + }}; + if constexpr (Spec::enabled_stages[0]) { + prepare_stage(0); + } + if constexpr (Spec::enabled_stages[1]) { + prepare_stage(1); + } + if constexpr (Spec::enabled_stages[2]) { + prepare_stage(2); + } + if constexpr (Spec::enabled_stages[3]) { + prepare_stage(3); + } + if constexpr (Spec::enabled_stages[4]) { + prepare_stage(4); + } + ConfigureDraw(); +} + +void GraphicsPipeline::ConfigureDraw() { + texture_cache.UpdateRenderTargets(false); + scheduler.RequestRenderpass(texture_cache.GetFramebuffer()); + + if (!is_built.load(std::memory_order::relaxed)) { + // Wait for the pipeline to be built + scheduler.Record([this](vk::CommandBuffer) { + std::unique_lock lock{build_mutex}; + build_condvar.wait(lock, [this] { return is_built.load(std::memory_order::relaxed); }); }); - if (instanced) { - vertex_binding_divisors.push_back({ + } + const bool bind_pipeline{scheduler.UpdateGraphicsPipeline(this)}; + const void* const descriptor_data{update_descriptor_queue.UpdateData()}; + scheduler.Record([this, descriptor_data, bind_pipeline](vk::CommandBuffer cmdbuf) { + if (bind_pipeline) { + cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline); + } + if (!descriptor_set_layout) { + return; + } + if (uses_push_descriptor) { + cmdbuf.PushDescriptorSetWithTemplateKHR(*descriptor_update_template, *pipeline_layout, + 0, descriptor_data); + } else { + const VkDescriptorSet descriptor_set{descriptor_allocator.Commit()}; + const vk::Device& dev{device.GetLogical()}; + dev.UpdateDescriptorSet(descriptor_set, *descriptor_update_template, descriptor_data); + cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline_layout, 0, + descriptor_set, nullptr); + } + }); +} + +void GraphicsPipeline::MakePipeline(VkRenderPass render_pass) { + FixedPipelineState::DynamicState dynamic{}; + if (!key.state.extended_dynamic_state) { + dynamic = key.state.dynamic_state; + } + static_vector vertex_bindings; + static_vector vertex_binding_divisors; + static_vector vertex_attributes; + if (key.state.dynamic_vertex_input) { + for (size_t index = 0; index < key.state.attributes.size(); ++index) { + const u32 type = key.state.DynamicAttributeType(index); + if (!stage_infos[0].loads.Generic(index) || type == 0) { + continue; + } + vertex_attributes.push_back({ + .location = static_cast(index), + .binding = 0, + .format = type == 1 ? VK_FORMAT_R32_SFLOAT + : type == 2 ? VK_FORMAT_R32_SINT + : VK_FORMAT_R32_UINT, + .offset = 0, + }); + } + if (!vertex_attributes.empty()) { + vertex_bindings.push_back({ + .binding = 0, + .stride = 4, + .inputRate = VK_VERTEX_INPUT_RATE_VERTEX, + }); + } + } else { + for (size_t index = 0; index < Maxwell::NumVertexArrays; ++index) { + const bool instanced = key.state.binding_divisors[index] != 0; + const auto rate = + instanced ? VK_VERTEX_INPUT_RATE_INSTANCE : VK_VERTEX_INPUT_RATE_VERTEX; + vertex_bindings.push_back({ .binding = static_cast(index), - .divisor = state.binding_divisors[index], + .stride = dynamic.vertex_strides[index], + .inputRate = rate, + }); + if (instanced) { + vertex_binding_divisors.push_back({ + .binding = static_cast(index), + .divisor = key.state.binding_divisors[index], + }); + } + } + for (size_t index = 0; index < key.state.attributes.size(); ++index) { + const auto& attribute = key.state.attributes[index]; + if (!attribute.enabled || !stage_infos[0].loads.Generic(index)) { + continue; + } + vertex_attributes.push_back({ + .location = static_cast(index), + .binding = attribute.buffer, + .format = MaxwellToVK::VertexFormat(attribute.Type(), attribute.Size()), + .offset = attribute.offset, }); } } - - std::vector vertex_attributes; - const auto& input_attributes = program[0]->entries.attributes; - for (std::size_t index = 0; index < state.attributes.size(); ++index) { - const auto& attribute = state.attributes[index]; - if (!attribute.enabled) { - continue; - } - if (!input_attributes.contains(static_cast(index))) { - // Skip attributes not used by the vertex shaders. - continue; - } - vertex_attributes.push_back({ - .location = static_cast(index), - .binding = attribute.buffer, - .format = MaxwellToVK::VertexFormat(attribute.Type(), attribute.Size()), - .offset = attribute.offset, - }); - } - VkPipelineVertexInputStateCreateInfo vertex_input_ci{ .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, .pNext = nullptr, @@ -264,7 +555,6 @@ vk::Pipeline VKGraphicsPipeline::CreatePipeline(const SPIRVProgram& program, .vertexAttributeDescriptionCount = static_cast(vertex_attributes.size()), .pVertexAttributeDescriptions = vertex_attributes.data(), }; - const VkPipelineVertexInputDivisorStateCreateInfoEXT input_divisor_ci{ .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, .pNext = nullptr, @@ -274,78 +564,113 @@ vk::Pipeline VKGraphicsPipeline::CreatePipeline(const SPIRVProgram& program, if (!vertex_binding_divisors.empty()) { vertex_input_ci.pNext = &input_divisor_ci; } - - const auto input_assembly_topology = MaxwellToVK::PrimitiveTopology(device, state.topology); + auto input_assembly_topology = MaxwellToVK::PrimitiveTopology(device, key.state.topology); + if (input_assembly_topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST) { + if (!spv_modules[1] && !spv_modules[2]) { + LOG_WARNING(Render_Vulkan, "Patch topology used without tessellation, using points"); + input_assembly_topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST; + } + } const VkPipelineInputAssemblyStateCreateInfo input_assembly_ci{ .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, .pNext = nullptr, .flags = 0, - .topology = MaxwellToVK::PrimitiveTopology(device, state.topology), - .primitiveRestartEnable = state.primitive_restart_enable != 0 && + .topology = input_assembly_topology, + .primitiveRestartEnable = key.state.primitive_restart_enable != 0 && SupportsPrimitiveRestart(input_assembly_topology), }; - const VkPipelineTessellationStateCreateInfo tessellation_ci{ .sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, .pNext = nullptr, .flags = 0, - .patchControlPoints = state.patch_control_points_minus_one.Value() + 1, + .patchControlPoints = key.state.patch_control_points_minus_one.Value() + 1, }; - VkPipelineViewportStateCreateInfo viewport_ci{ - .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + std::array swizzles; + std::ranges::transform(key.state.viewport_swizzles, swizzles.begin(), UnpackViewportSwizzle); + const VkPipelineViewportSwizzleStateCreateInfoNV swizzle_ci{ + .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, .pNext = nullptr, .flags = 0, .viewportCount = Maxwell::NumViewports, + .pViewportSwizzles = swizzles.data(), + }; + const VkPipelineViewportStateCreateInfo viewport_ci{ + .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + .pNext = device.IsNvViewportSwizzleSupported() ? &swizzle_ci : nullptr, + .flags = 0, + .viewportCount = Maxwell::NumViewports, .pViewports = nullptr, .scissorCount = Maxwell::NumViewports, .pScissors = nullptr, }; - std::array swizzles; - std::ranges::transform(viewport_swizzles, swizzles.begin(), UnpackViewportSwizzle); - VkPipelineViewportSwizzleStateCreateInfoNV swizzle_ci{ - .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, - .pNext = nullptr, - .flags = 0, - .viewportCount = Maxwell::NumViewports, - .pViewportSwizzles = swizzles.data(), - }; - if (device.IsNvViewportSwizzleSupported()) { - viewport_ci.pNext = &swizzle_ci; - } - - const VkPipelineRasterizationStateCreateInfo rasterization_ci{ + VkPipelineRasterizationStateCreateInfo rasterization_ci{ .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, .pNext = nullptr, .flags = 0, .depthClampEnable = - static_cast(state.depth_clamp_disabled == 0 ? VK_TRUE : VK_FALSE), + static_cast(key.state.depth_clamp_disabled == 0 ? VK_TRUE : VK_FALSE), .rasterizerDiscardEnable = - static_cast(state.rasterize_enable == 0 ? VK_TRUE : VK_FALSE), - .polygonMode = VK_POLYGON_MODE_FILL, + static_cast(key.state.rasterize_enable == 0 ? VK_TRUE : VK_FALSE), + .polygonMode = + MaxwellToVK::PolygonMode(FixedPipelineState::UnpackPolygonMode(key.state.polygon_mode)), .cullMode = static_cast( dynamic.cull_enable ? MaxwellToVK::CullFace(dynamic.CullFace()) : VK_CULL_MODE_NONE), .frontFace = MaxwellToVK::FrontFace(dynamic.FrontFace()), - .depthBiasEnable = state.depth_bias_enable, + .depthBiasEnable = key.state.depth_bias_enable, .depthBiasConstantFactor = 0.0f, .depthBiasClamp = 0.0f, .depthBiasSlopeFactor = 0.0f, .lineWidth = 1.0f, }; + VkPipelineRasterizationLineStateCreateInfoEXT line_state{ + .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + .pNext = nullptr, + .lineRasterizationMode = key.state.smooth_lines != 0 + ? VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT + : VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, + .stippledLineEnable = VK_FALSE, // TODO + .lineStippleFactor = 0, + .lineStipplePattern = 0, + }; + VkPipelineRasterizationConservativeStateCreateInfoEXT conservative_raster{ + .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + .pNext = nullptr, + .flags = 0, + .conservativeRasterizationMode = key.state.conservative_raster_enable != 0 + ? VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT + : VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + .extraPrimitiveOverestimationSize = 0.0f, + }; + VkPipelineRasterizationProvokingVertexStateCreateInfoEXT provoking_vertex{ + .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT, + .pNext = nullptr, + .provokingVertexMode = key.state.provoking_vertex_last != 0 + ? VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT + : VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT, + }; + if (IsLine(input_assembly_topology) && device.IsExtLineRasterizationSupported()) { + line_state.pNext = std::exchange(rasterization_ci.pNext, &line_state); + } + if (device.IsExtConservativeRasterizationSupported()) { + conservative_raster.pNext = std::exchange(rasterization_ci.pNext, &conservative_raster); + } + if (device.IsExtProvokingVertexSupported()) { + provoking_vertex.pNext = std::exchange(rasterization_ci.pNext, &provoking_vertex); + } const VkPipelineMultisampleStateCreateInfo multisample_ci{ .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, .pNext = nullptr, .flags = 0, - .rasterizationSamples = ConvertMsaaMode(state.msaa_mode), + .rasterizationSamples = MaxwellToVK::MsaaMode(key.state.msaa_mode), .sampleShadingEnable = VK_FALSE, .minSampleShading = 0.0f, .pSampleMask = nullptr, .alphaToCoverageEnable = VK_FALSE, .alphaToOneEnable = VK_FALSE, }; - const VkPipelineDepthStencilStateCreateInfo depth_stencil_ci{ .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, .pNext = nullptr, @@ -355,32 +680,32 @@ vk::Pipeline VKGraphicsPipeline::CreatePipeline(const SPIRVProgram& program, .depthCompareOp = dynamic.depth_test_enable ? MaxwellToVK::ComparisonOp(dynamic.DepthTestFunc()) : VK_COMPARE_OP_ALWAYS, - .depthBoundsTestEnable = dynamic.depth_bounds_enable, + .depthBoundsTestEnable = dynamic.depth_bounds_enable && device.IsDepthBoundsSupported(), .stencilTestEnable = dynamic.stencil_enable, .front = GetStencilFaceState(dynamic.front), .back = GetStencilFaceState(dynamic.back), .minDepthBounds = 0.0f, .maxDepthBounds = 0.0f, }; - - std::array cb_attachments; - for (std::size_t index = 0; index < num_color_buffers; ++index) { - static constexpr std::array COMPONENT_TABLE{ + if (dynamic.depth_bounds_enable && !device.IsDepthBoundsSupported()) { + LOG_WARNING(Render_Vulkan, "Depth bounds is enabled but not supported"); + } + static_vector cb_attachments; + const size_t num_attachments{NumAttachments(key.state)}; + for (size_t index = 0; index < num_attachments; ++index) { + static constexpr std::array mask_table{ VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, VK_COLOR_COMPONENT_B_BIT, VK_COLOR_COMPONENT_A_BIT, }; - const auto& blend = state.attachments[index]; - - VkColorComponentFlags color_components = 0; - for (std::size_t i = 0; i < COMPONENT_TABLE.size(); ++i) { - if (blend.Mask()[i]) { - color_components |= COMPONENT_TABLE[i]; - } + const auto& blend{key.state.attachments[index]}; + const std::array mask{blend.Mask()}; + VkColorComponentFlags write_mask{}; + for (size_t i = 0; i < mask_table.size(); ++i) { + write_mask |= mask[i] ? mask_table[i] : 0; } - - cb_attachments[index] = { + cb_attachments.push_back({ .blendEnable = blend.enable != 0, .srcColorBlendFactor = MaxwellToVK::BlendFactor(blend.SourceRGBFactor()), .dstColorBlendFactor = MaxwellToVK::BlendFactor(blend.DestRGBFactor()), @@ -388,28 +713,27 @@ vk::Pipeline VKGraphicsPipeline::CreatePipeline(const SPIRVProgram& program, .srcAlphaBlendFactor = MaxwellToVK::BlendFactor(blend.SourceAlphaFactor()), .dstAlphaBlendFactor = MaxwellToVK::BlendFactor(blend.DestAlphaFactor()), .alphaBlendOp = MaxwellToVK::BlendEquation(blend.EquationAlpha()), - .colorWriteMask = color_components, - }; + .colorWriteMask = write_mask, + }); } - const VkPipelineColorBlendStateCreateInfo color_blend_ci{ .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, .pNext = nullptr, .flags = 0, .logicOpEnable = VK_FALSE, .logicOp = VK_LOGIC_OP_COPY, - .attachmentCount = num_color_buffers, + .attachmentCount = static_cast(cb_attachments.size()), .pAttachments = cb_attachments.data(), .blendConstants = {}, }; - - std::vector dynamic_states{ + static_vector dynamic_states{ VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR, VK_DYNAMIC_STATE_DEPTH_BIAS, VK_DYNAMIC_STATE_BLEND_CONSTANTS, VK_DYNAMIC_STATE_DEPTH_BOUNDS, VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, VK_DYNAMIC_STATE_STENCIL_REFERENCE, + VK_DYNAMIC_STATE_LINE_WIDTH, }; - if (device.IsExtExtendedDynamicStateSupported()) { + if (key.state.extended_dynamic_state) { static constexpr std::array extended{ VK_DYNAMIC_STATE_CULL_MODE_EXT, VK_DYNAMIC_STATE_FRONT_FACE_EXT, @@ -421,9 +745,11 @@ vk::Pipeline VKGraphicsPipeline::CreatePipeline(const SPIRVProgram& program, VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, VK_DYNAMIC_STATE_STENCIL_OP_EXT, }; + if (key.state.dynamic_vertex_input) { + dynamic_states.push_back(VK_DYNAMIC_STATE_VERTEX_INPUT_EXT); + } dynamic_states.insert(dynamic_states.end(), extended.begin(), extended.end()); } - const VkPipelineDynamicStateCreateInfo dynamic_state_ci{ .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, .pNext = nullptr, @@ -431,34 +757,33 @@ vk::Pipeline VKGraphicsPipeline::CreatePipeline(const SPIRVProgram& program, .dynamicStateCount = static_cast(dynamic_states.size()), .pDynamicStates = dynamic_states.data(), }; - - const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT subgroup_size_ci{ + [[maybe_unused]] const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT subgroup_size_ci{ .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, .pNext = nullptr, .requiredSubgroupSize = GuestWarpSize, }; - - std::vector shader_stages; - std::size_t module_index = 0; - for (std::size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) { - if (!program[stage]) { + static_vector shader_stages; + for (size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) { + if (!spv_modules[stage]) { continue; } - - VkPipelineShaderStageCreateInfo& stage_ci = shader_stages.emplace_back(); - stage_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; - stage_ci.pNext = nullptr; - stage_ci.flags = 0; - stage_ci.stage = MaxwellToVK::ShaderStage(static_cast(stage)); - stage_ci.module = *modules[module_index++]; - stage_ci.pName = "main"; - stage_ci.pSpecializationInfo = nullptr; - + [[maybe_unused]] auto& stage_ci = + shader_stages.emplace_back(VkPipelineShaderStageCreateInfo{ + .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + .stage = MaxwellToVK::ShaderStage(Shader::StageFromIndex(stage)), + .module = *spv_modules[stage], + .pName = "main", + .pSpecializationInfo = nullptr, + }); + /* if (program[stage]->entries.uses_warps && device.IsGuestWarpSizeSupported(stage_ci.stage)) { stage_ci.pNext = &subgroup_size_ci; } + */ } - return device.GetLogical().CreateGraphicsPipeline(VkGraphicsPipelineCreateInfo{ + pipeline = device.GetLogical().CreateGraphicsPipeline({ .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, .pNext = nullptr, .flags = 0, @@ -473,12 +798,31 @@ vk::Pipeline VKGraphicsPipeline::CreatePipeline(const SPIRVProgram& program, .pDepthStencilState = &depth_stencil_ci, .pColorBlendState = &color_blend_ci, .pDynamicState = &dynamic_state_ci, - .layout = *layout, - .renderPass = renderpass, + .layout = *pipeline_layout, + .renderPass = render_pass, .subpass = 0, .basePipelineHandle = nullptr, .basePipelineIndex = 0, }); } +void GraphicsPipeline::Validate() { + size_t num_images{}; + for (const auto& info : stage_infos) { + for (const auto& desc : info.texture_buffer_descriptors) { + num_images += desc.count; + } + for (const auto& desc : info.image_buffer_descriptors) { + num_images += desc.count; + } + for (const auto& desc : info.texture_descriptors) { + num_images += desc.count; + } + for (const auto& desc : info.image_descriptors) { + num_images += desc.count; + } + } + ASSERT(num_images <= MAX_IMAGE_ELEMENTS); +} + } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_graphics_pipeline.h b/src/video_core/renderer_vulkan/vk_graphics_pipeline.h index 8b6a98fe0..622267147 100755 --- a/src/video_core/renderer_vulkan/vk_graphics_pipeline.h +++ b/src/video_core/renderer_vulkan/vk_graphics_pipeline.h @@ -1,30 +1,36 @@ -// Copyright 2019 yuzu Emulator Project +// Copyright 2021 yuzu Emulator Project // Licensed under GPLv2 or any later version // Refer to the license.txt file included. #pragma once +#include #include -#include -#include +#include +#include +#include +#include -#include "common/common_types.h" +#include "common/thread_worker.h" +#include "shader_recompiler/shader_info.h" #include "video_core/engines/maxwell_3d.h" #include "video_core/renderer_vulkan/fixed_pipeline_state.h" +#include "video_core/renderer_vulkan/vk_buffer_cache.h" #include "video_core/renderer_vulkan/vk_descriptor_pool.h" -#include "video_core/renderer_vulkan/vk_shader_decompiler.h" +#include "video_core/renderer_vulkan/vk_texture_cache.h" #include "video_core/vulkan_common/vulkan_wrapper.h" +namespace VideoCore { +class ShaderNotify; +} + namespace Vulkan { -using Maxwell = Tegra::Engines::Maxwell3D::Regs; - struct GraphicsPipelineCacheKey { - VkRenderPass renderpass; - std::array shaders; - FixedPipelineState fixed_state; + std::array unique_hashes; + FixedPipelineState state; - std::size_t Hash() const noexcept; + size_t Hash() const noexcept; bool operator==(const GraphicsPipelineCacheKey& rhs) const noexcept; @@ -32,72 +38,117 @@ struct GraphicsPipelineCacheKey { return !operator==(rhs); } - std::size_t Size() const noexcept { - return sizeof(renderpass) + sizeof(shaders) + fixed_state.Size(); + size_t Size() const noexcept { + return sizeof(unique_hashes) + state.Size(); } }; static_assert(std::has_unique_object_representations_v); static_assert(std::is_trivially_copyable_v); static_assert(std::is_trivially_constructible_v); +} // namespace Vulkan + +namespace std { +template <> +struct hash { + size_t operator()(const Vulkan::GraphicsPipelineCacheKey& k) const noexcept { + return k.Hash(); + } +}; +} // namespace std + +namespace Vulkan { + class Device; -class VKDescriptorPool; +class RenderPassCache; class VKScheduler; class VKUpdateDescriptorQueue; -using SPIRVProgram = std::array, Maxwell::MaxShaderStage>; +class GraphicsPipeline { + static constexpr size_t NUM_STAGES = Tegra::Engines::Maxwell3D::Regs::MaxShaderStage; -class VKGraphicsPipeline final { public: - explicit VKGraphicsPipeline(const Device& device_, VKScheduler& scheduler_, - VKDescriptorPool& descriptor_pool, - VKUpdateDescriptorQueue& update_descriptor_queue_, - const GraphicsPipelineCacheKey& key, - vk::Span bindings, - const SPIRVProgram& program, u32 num_color_buffers); - ~VKGraphicsPipeline(); + explicit GraphicsPipeline( + Tegra::Engines::Maxwell3D& maxwell3d, Tegra::MemoryManager& gpu_memory, + VKScheduler& scheduler, BufferCache& buffer_cache, TextureCache& texture_cache, + VideoCore::ShaderNotify* shader_notify, const Device& device, + DescriptorPool& descriptor_pool, VKUpdateDescriptorQueue& update_descriptor_queue, + Common::ThreadWorker* worker_thread, RenderPassCache& render_pass_cache, + const GraphicsPipelineCacheKey& key, std::array stages, + const std::array& infos); - VkDescriptorSet CommitDescriptorSet(); + GraphicsPipeline& operator=(GraphicsPipeline&&) noexcept = delete; + GraphicsPipeline(GraphicsPipeline&&) noexcept = delete; - VkPipeline GetHandle() const { - return *pipeline; + GraphicsPipeline& operator=(const GraphicsPipeline&) = delete; + GraphicsPipeline(const GraphicsPipeline&) = delete; + + void AddTransition(GraphicsPipeline* transition); + + void Configure(bool is_indexed) { + configure_func(this, is_indexed); } - VkPipelineLayout GetLayout() const { - return *layout; + [[nodiscard]] GraphicsPipeline* Next(const GraphicsPipelineCacheKey& current_key) noexcept { + if (key == current_key) { + return this; + } + const auto it{std::find(transition_keys.begin(), transition_keys.end(), current_key)}; + return it != transition_keys.end() ? transitions[std::distance(transition_keys.begin(), it)] + : nullptr; } - GraphicsPipelineCacheKey GetCacheKey() const { - return cache_key; + [[nodiscard]] bool IsBuilt() const noexcept { + return is_built.load(std::memory_order::relaxed); + } + + template + static auto MakeConfigureSpecFunc() { + return [](GraphicsPipeline* pipeline, bool is_indexed) { + pipeline->ConfigureImpl(is_indexed); + }; } private: - vk::DescriptorSetLayout CreateDescriptorSetLayout( - vk::Span bindings) const; + template + void ConfigureImpl(bool is_indexed); - vk::PipelineLayout CreatePipelineLayout() const; + void ConfigureDraw(); - vk::DescriptorUpdateTemplateKHR CreateDescriptorUpdateTemplate( - const SPIRVProgram& program) const; + void MakePipeline(VkRenderPass render_pass); - std::vector CreateShaderModules(const SPIRVProgram& program) const; - - vk::Pipeline CreatePipeline(const SPIRVProgram& program, VkRenderPass renderpass, - u32 num_color_buffers) const; + void Validate(); + const GraphicsPipelineCacheKey key; + Tegra::Engines::Maxwell3D& maxwell3d; + Tegra::MemoryManager& gpu_memory; const Device& device; + TextureCache& texture_cache; + BufferCache& buffer_cache; VKScheduler& scheduler; - const GraphicsPipelineCacheKey cache_key; - const u64 hash; + VKUpdateDescriptorQueue& update_descriptor_queue; + + void (*configure_func)(GraphicsPipeline*, bool){}; + + std::vector transition_keys; + std::vector transitions; + + std::array spv_modules; + + std::array stage_infos; + std::array enabled_uniform_buffer_masks{}; + VideoCommon::UniformBufferSizes uniform_buffer_sizes{}; vk::DescriptorSetLayout descriptor_set_layout; DescriptorAllocator descriptor_allocator; - VKUpdateDescriptorQueue& update_descriptor_queue; - vk::PipelineLayout layout; - vk::DescriptorUpdateTemplateKHR descriptor_template; - std::vector modules; - + vk::PipelineLayout pipeline_layout; + vk::DescriptorUpdateTemplateKHR descriptor_update_template; vk::Pipeline pipeline; + + std::condition_variable build_condvar; + std::mutex build_mutex; + std::atomic_bool is_built{false}; + bool uses_push_descriptor{false}; }; } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_master_semaphore.h b/src/video_core/renderer_vulkan/vk_master_semaphore.h index ee3cd35d0..4f8688118 100755 --- a/src/video_core/renderer_vulkan/vk_master_semaphore.h +++ b/src/video_core/renderer_vulkan/vk_master_semaphore.h @@ -39,9 +39,9 @@ public: return KnownGpuTick() >= tick; } - /// Advance to the logical tick. - void NextTick() noexcept { - ++current_tick; + /// Advance to the logical tick and return the old one + [[nodiscard]] u64 NextTick() noexcept { + return current_tick.fetch_add(1, std::memory_order::relaxed); } /// Refresh the known GPU tick diff --git a/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp b/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp index 8991505ca..a2646fc6d 100755 --- a/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp +++ b/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp @@ -4,444 +4,610 @@ #include #include +#include #include +#include #include #include "common/bit_cast.h" #include "common/cityhash.h" +#include "common/fs/fs.h" +#include "common/fs/path_util.h" #include "common/microprofile.h" +#include "common/thread_worker.h" #include "core/core.h" #include "core/memory.h" +#include "shader_recompiler/backend/spirv/emit_spirv.h" +#include "shader_recompiler/environment.h" +#include "shader_recompiler/frontend/maxwell/control_flow.h" +#include "shader_recompiler/frontend/maxwell/translate_program.h" +#include "shader_recompiler/program_header.h" +#include "video_core/dirty_flags.h" #include "video_core/engines/kepler_compute.h" #include "video_core/engines/maxwell_3d.h" #include "video_core/memory_manager.h" #include "video_core/renderer_vulkan/fixed_pipeline_state.h" #include "video_core/renderer_vulkan/maxwell_to_vk.h" +#include "video_core/renderer_vulkan/pipeline_helper.h" #include "video_core/renderer_vulkan/vk_compute_pipeline.h" #include "video_core/renderer_vulkan/vk_descriptor_pool.h" -#include "video_core/renderer_vulkan/vk_graphics_pipeline.h" #include "video_core/renderer_vulkan/vk_pipeline_cache.h" #include "video_core/renderer_vulkan/vk_rasterizer.h" #include "video_core/renderer_vulkan/vk_scheduler.h" +#include "video_core/renderer_vulkan/vk_shader_util.h" #include "video_core/renderer_vulkan/vk_update_descriptor.h" -#include "video_core/shader/compiler_settings.h" -#include "video_core/shader/memory_util.h" #include "video_core/shader_cache.h" +#include "video_core/shader_environment.h" #include "video_core/shader_notify.h" #include "video_core/vulkan_common/vulkan_device.h" #include "video_core/vulkan_common/vulkan_wrapper.h" namespace Vulkan { - MICROPROFILE_DECLARE(Vulkan_PipelineCache); -using Tegra::Engines::ShaderType; -using VideoCommon::Shader::GetShaderAddress; -using VideoCommon::Shader::GetShaderCode; -using VideoCommon::Shader::KERNEL_MAIN_OFFSET; -using VideoCommon::Shader::ProgramCode; -using VideoCommon::Shader::STAGE_MAIN_OFFSET; - namespace { +using Shader::Backend::SPIRV::EmitSPIRV; +using Shader::Maxwell::MergeDualVertexPrograms; +using Shader::Maxwell::TranslateProgram; +using VideoCommon::ComputeEnvironment; +using VideoCommon::FileEnvironment; +using VideoCommon::GenericEnvironment; +using VideoCommon::GraphicsEnvironment; -constexpr VkDescriptorType UNIFORM_BUFFER = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; -constexpr VkDescriptorType STORAGE_BUFFER = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; -constexpr VkDescriptorType UNIFORM_TEXEL_BUFFER = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER; -constexpr VkDescriptorType COMBINED_IMAGE_SAMPLER = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; -constexpr VkDescriptorType STORAGE_TEXEL_BUFFER = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER; -constexpr VkDescriptorType STORAGE_IMAGE = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE; - -constexpr VideoCommon::Shader::CompilerSettings compiler_settings{ - .depth = VideoCommon::Shader::CompileDepth::FullDecompile, - .disable_else_derivation = true, -}; - -constexpr std::size_t GetStageFromProgram(std::size_t program) { - return program == 0 ? 0 : program - 1; +template +auto MakeSpan(Container& container) { + return std::span(container.data(), container.size()); } -constexpr ShaderType GetStageFromProgram(Maxwell::ShaderProgram program) { - return static_cast(GetStageFromProgram(static_cast(program))); -} - -ShaderType GetShaderType(Maxwell::ShaderProgram program) { - switch (program) { - case Maxwell::ShaderProgram::VertexB: - return ShaderType::Vertex; - case Maxwell::ShaderProgram::TesselationControl: - return ShaderType::TesselationControl; - case Maxwell::ShaderProgram::TesselationEval: - return ShaderType::TesselationEval; - case Maxwell::ShaderProgram::Geometry: - return ShaderType::Geometry; - case Maxwell::ShaderProgram::Fragment: - return ShaderType::Fragment; - default: - UNIMPLEMENTED_MSG("program={}", program); - return ShaderType::Vertex; +Shader::CompareFunction MaxwellToCompareFunction(Maxwell::ComparisonOp comparison) { + switch (comparison) { + case Maxwell::ComparisonOp::Never: + case Maxwell::ComparisonOp::NeverOld: + return Shader::CompareFunction::Never; + case Maxwell::ComparisonOp::Less: + case Maxwell::ComparisonOp::LessOld: + return Shader::CompareFunction::Less; + case Maxwell::ComparisonOp::Equal: + case Maxwell::ComparisonOp::EqualOld: + return Shader::CompareFunction::Equal; + case Maxwell::ComparisonOp::LessEqual: + case Maxwell::ComparisonOp::LessEqualOld: + return Shader::CompareFunction::LessThanEqual; + case Maxwell::ComparisonOp::Greater: + case Maxwell::ComparisonOp::GreaterOld: + return Shader::CompareFunction::Greater; + case Maxwell::ComparisonOp::NotEqual: + case Maxwell::ComparisonOp::NotEqualOld: + return Shader::CompareFunction::NotEqual; + case Maxwell::ComparisonOp::GreaterEqual: + case Maxwell::ComparisonOp::GreaterEqualOld: + return Shader::CompareFunction::GreaterThanEqual; + case Maxwell::ComparisonOp::Always: + case Maxwell::ComparisonOp::AlwaysOld: + return Shader::CompareFunction::Always; } + UNIMPLEMENTED_MSG("Unimplemented comparison op={}", comparison); + return {}; } -template -void AddBindings(std::vector& bindings, u32& binding, - VkShaderStageFlags stage_flags, const Container& container) { - const u32 num_entries = static_cast(std::size(container)); - for (std::size_t i = 0; i < num_entries; ++i) { - u32 count = 1; - if constexpr (descriptor_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) { - // Combined image samplers can be arrayed. - count = container[i].size; +Shader::AttributeType CastAttributeType(const FixedPipelineState::VertexAttribute& attr) { + if (attr.enabled == 0) { + return Shader::AttributeType::Disabled; + } + switch (attr.Type()) { + case Maxwell::VertexAttribute::Type::SignedNorm: + case Maxwell::VertexAttribute::Type::UnsignedNorm: + case Maxwell::VertexAttribute::Type::UnsignedScaled: + case Maxwell::VertexAttribute::Type::SignedScaled: + case Maxwell::VertexAttribute::Type::Float: + return Shader::AttributeType::Float; + case Maxwell::VertexAttribute::Type::SignedInt: + return Shader::AttributeType::SignedInt; + case Maxwell::VertexAttribute::Type::UnsignedInt: + return Shader::AttributeType::UnsignedInt; + } + return Shader::AttributeType::Float; +} + +Shader::AttributeType AttributeType(const FixedPipelineState& state, size_t index) { + switch (state.DynamicAttributeType(index)) { + case 0: + return Shader::AttributeType::Disabled; + case 1: + return Shader::AttributeType::Float; + case 2: + return Shader::AttributeType::SignedInt; + case 3: + return Shader::AttributeType::UnsignedInt; + } + return Shader::AttributeType::Disabled; +} + +Shader::RuntimeInfo MakeRuntimeInfo(std::span programs, + const GraphicsPipelineCacheKey& key, + const Shader::IR::Program& program, + const Shader::IR::Program* previous_program) { + Shader::RuntimeInfo info; + if (previous_program) { + info.previous_stage_stores = previous_program->info.stores; + if (previous_program->is_geometry_passthrough) { + info.previous_stage_stores.mask |= previous_program->info.passthrough.mask; } - bindings.push_back({ - .binding = binding++, - .descriptorType = descriptor_type, - .descriptorCount = count, - .stageFlags = stage_flags, - .pImmutableSamplers = nullptr, - }); + } else { + info.previous_stage_stores.mask.set(); } + const Shader::Stage stage{program.stage}; + const bool has_geometry{key.unique_hashes[4] != 0 && !programs[4].is_geometry_passthrough}; + const bool gl_ndc{key.state.ndc_minus_one_to_one != 0}; + const float point_size{Common::BitCast(key.state.point_size)}; + switch (stage) { + case Shader::Stage::VertexB: + if (!has_geometry) { + if (key.state.topology == Maxwell::PrimitiveTopology::Points) { + info.fixed_state_point_size = point_size; + } + if (key.state.xfb_enabled) { + info.xfb_varyings = VideoCommon::MakeTransformFeedbackVaryings(key.state.xfb_state); + } + info.convert_depth_mode = gl_ndc; + } + if (key.state.dynamic_vertex_input) { + for (size_t index = 0; index < Maxwell::NumVertexAttributes; ++index) { + info.generic_input_types[index] = AttributeType(key.state, index); + } + } else { + std::ranges::transform(key.state.attributes, info.generic_input_types.begin(), + &CastAttributeType); + } + break; + case Shader::Stage::TessellationEval: + // We have to flip tessellation clockwise for some reason... + info.tess_clockwise = key.state.tessellation_clockwise == 0; + info.tess_primitive = [&key] { + const u32 raw{key.state.tessellation_primitive.Value()}; + switch (static_cast(raw)) { + case Maxwell::TessellationPrimitive::Isolines: + return Shader::TessPrimitive::Isolines; + case Maxwell::TessellationPrimitive::Triangles: + return Shader::TessPrimitive::Triangles; + case Maxwell::TessellationPrimitive::Quads: + return Shader::TessPrimitive::Quads; + } + UNREACHABLE(); + return Shader::TessPrimitive::Triangles; + }(); + info.tess_spacing = [&] { + const u32 raw{key.state.tessellation_spacing}; + switch (static_cast(raw)) { + case Maxwell::TessellationSpacing::Equal: + return Shader::TessSpacing::Equal; + case Maxwell::TessellationSpacing::FractionalOdd: + return Shader::TessSpacing::FractionalOdd; + case Maxwell::TessellationSpacing::FractionalEven: + return Shader::TessSpacing::FractionalEven; + } + UNREACHABLE(); + return Shader::TessSpacing::Equal; + }(); + break; + case Shader::Stage::Geometry: + if (program.output_topology == Shader::OutputTopology::PointList) { + info.fixed_state_point_size = point_size; + } + if (key.state.xfb_enabled != 0) { + info.xfb_varyings = VideoCommon::MakeTransformFeedbackVaryings(key.state.xfb_state); + } + info.convert_depth_mode = gl_ndc; + break; + case Shader::Stage::Fragment: + info.alpha_test_func = MaxwellToCompareFunction( + key.state.UnpackComparisonOp(key.state.alpha_test_func.Value())); + info.alpha_test_reference = Common::BitCast(key.state.alpha_test_ref); + break; + default: + break; + } + switch (key.state.topology) { + case Maxwell::PrimitiveTopology::Points: + info.input_topology = Shader::InputTopology::Points; + break; + case Maxwell::PrimitiveTopology::Lines: + case Maxwell::PrimitiveTopology::LineLoop: + case Maxwell::PrimitiveTopology::LineStrip: + info.input_topology = Shader::InputTopology::Lines; + break; + case Maxwell::PrimitiveTopology::Triangles: + case Maxwell::PrimitiveTopology::TriangleStrip: + case Maxwell::PrimitiveTopology::TriangleFan: + case Maxwell::PrimitiveTopology::Quads: + case Maxwell::PrimitiveTopology::QuadStrip: + case Maxwell::PrimitiveTopology::Polygon: + case Maxwell::PrimitiveTopology::Patches: + info.input_topology = Shader::InputTopology::Triangles; + break; + case Maxwell::PrimitiveTopology::LinesAdjacency: + case Maxwell::PrimitiveTopology::LineStripAdjacency: + info.input_topology = Shader::InputTopology::LinesAdjacency; + break; + case Maxwell::PrimitiveTopology::TrianglesAdjacency: + case Maxwell::PrimitiveTopology::TriangleStripAdjacency: + info.input_topology = Shader::InputTopology::TrianglesAdjacency; + break; + } + info.force_early_z = key.state.early_z != 0; + info.y_negate = key.state.y_negate != 0; + return info; } - -u32 FillDescriptorLayout(const ShaderEntries& entries, - std::vector& bindings, - Maxwell::ShaderProgram program_type, u32 base_binding) { - const ShaderType stage = GetStageFromProgram(program_type); - const VkShaderStageFlags flags = MaxwellToVK::ShaderStage(stage); - - u32 binding = base_binding; - AddBindings(bindings, binding, flags, entries.const_buffers); - AddBindings(bindings, binding, flags, entries.global_buffers); - AddBindings(bindings, binding, flags, entries.uniform_texels); - AddBindings(bindings, binding, flags, entries.samplers); - AddBindings(bindings, binding, flags, entries.storage_texels); - AddBindings(bindings, binding, flags, entries.images); - return binding; -} - } // Anonymous namespace -std::size_t GraphicsPipelineCacheKey::Hash() const noexcept { - const u64 hash = Common::CityHash64(reinterpret_cast(this), Size()); - return static_cast(hash); -} - -bool GraphicsPipelineCacheKey::operator==(const GraphicsPipelineCacheKey& rhs) const noexcept { - return std::memcmp(&rhs, this, Size()) == 0; -} - -std::size_t ComputePipelineCacheKey::Hash() const noexcept { +size_t ComputePipelineCacheKey::Hash() const noexcept { const u64 hash = Common::CityHash64(reinterpret_cast(this), sizeof *this); - return static_cast(hash); + return static_cast(hash); } bool ComputePipelineCacheKey::operator==(const ComputePipelineCacheKey& rhs) const noexcept { return std::memcmp(&rhs, this, sizeof *this) == 0; } -Shader::Shader(Tegra::Engines::ConstBufferEngineInterface& engine_, ShaderType stage_, - GPUVAddr gpu_addr_, VAddr cpu_addr_, ProgramCode program_code_, u32 main_offset_) - : gpu_addr(gpu_addr_), program_code(std::move(program_code_)), registry(stage_, engine_), - shader_ir(program_code, main_offset_, compiler_settings, registry), - entries(GenerateShaderEntries(shader_ir)) {} - -Shader::~Shader() = default; - -VKPipelineCache::VKPipelineCache(RasterizerVulkan& rasterizer_, Tegra::GPU& gpu_, - Tegra::Engines::Maxwell3D& maxwell3d_, - Tegra::Engines::KeplerCompute& kepler_compute_, - Tegra::MemoryManager& gpu_memory_, const Device& device_, - VKScheduler& scheduler_, VKDescriptorPool& descriptor_pool_, - VKUpdateDescriptorQueue& update_descriptor_queue_) - : VideoCommon::ShaderCache{rasterizer_}, gpu{gpu_}, maxwell3d{maxwell3d_}, - kepler_compute{kepler_compute_}, gpu_memory{gpu_memory_}, device{device_}, - scheduler{scheduler_}, descriptor_pool{descriptor_pool_}, update_descriptor_queue{ - update_descriptor_queue_} {} - -VKPipelineCache::~VKPipelineCache() = default; - -std::array VKPipelineCache::GetShaders() { - std::array shaders{}; - - for (std::size_t index = 0; index < Maxwell::MaxShaderProgram; ++index) { - const auto program{static_cast(index)}; - - // Skip stages that are not enabled - if (!maxwell3d.regs.IsShaderConfigEnabled(index)) { - continue; - } - - const GPUVAddr gpu_addr{GetShaderAddress(maxwell3d, program)}; - const std::optional cpu_addr = gpu_memory.GpuToCpuAddress(gpu_addr); - ASSERT(cpu_addr); - - Shader* result = cpu_addr ? TryGet(*cpu_addr) : null_shader.get(); - if (!result) { - const u8* const host_ptr{gpu_memory.GetPointer(gpu_addr)}; - - // No shader found - create a new one - static constexpr u32 stage_offset = STAGE_MAIN_OFFSET; - const auto stage = static_cast(index == 0 ? 0 : index - 1); - ProgramCode code = GetShaderCode(gpu_memory, gpu_addr, host_ptr, false); - const std::size_t size_in_bytes = code.size() * sizeof(u64); - - auto shader = std::make_unique(maxwell3d, stage, gpu_addr, *cpu_addr, - std::move(code), stage_offset); - result = shader.get(); - - if (cpu_addr) { - Register(std::move(shader), *cpu_addr, size_in_bytes); - } else { - null_shader = std::move(shader); - } - } - shaders[index] = result; - } - return last_shaders = shaders; +size_t GraphicsPipelineCacheKey::Hash() const noexcept { + const u64 hash = Common::CityHash64(reinterpret_cast(this), Size()); + return static_cast(hash); } -VKGraphicsPipeline* VKPipelineCache::GetGraphicsPipeline( - const GraphicsPipelineCacheKey& key, u32 num_color_buffers, - VideoCommon::Shader::AsyncShaders& async_shaders) { - MICROPROFILE_SCOPE(Vulkan_PipelineCache); - - if (last_graphics_pipeline && last_graphics_key == key) { - return last_graphics_pipeline; - } - last_graphics_key = key; - - if (device.UseAsynchronousShaders() && async_shaders.IsShaderAsync(gpu)) { - std::unique_lock lock{pipeline_cache}; - const auto [pair, is_cache_miss] = graphics_cache.try_emplace(key); - if (is_cache_miss) { - gpu.ShaderNotify().MarkSharderBuilding(); - LOG_INFO(Render_Vulkan, "Compile 0x{:016X}", key.Hash()); - const auto [program, bindings] = DecompileShaders(key.fixed_state); - async_shaders.QueueVulkanShader(this, device, scheduler, descriptor_pool, - update_descriptor_queue, bindings, program, key, - num_color_buffers); - } - last_graphics_pipeline = pair->second.get(); - return last_graphics_pipeline; - } - - const auto [pair, is_cache_miss] = graphics_cache.try_emplace(key); - auto& entry = pair->second; - if (is_cache_miss) { - gpu.ShaderNotify().MarkSharderBuilding(); - LOG_INFO(Render_Vulkan, "Compile 0x{:016X}", key.Hash()); - const auto [program, bindings] = DecompileShaders(key.fixed_state); - entry = std::make_unique(device, scheduler, descriptor_pool, - update_descriptor_queue, key, bindings, - program, num_color_buffers); - gpu.ShaderNotify().MarkShaderComplete(); - } - last_graphics_pipeline = entry.get(); - return last_graphics_pipeline; +bool GraphicsPipelineCacheKey::operator==(const GraphicsPipelineCacheKey& rhs) const noexcept { + return std::memcmp(&rhs, this, Size()) == 0; } -VKComputePipeline& VKPipelineCache::GetComputePipeline(const ComputePipelineCacheKey& key) { +PipelineCache::PipelineCache(RasterizerVulkan& rasterizer_, Tegra::Engines::Maxwell3D& maxwell3d_, + Tegra::Engines::KeplerCompute& kepler_compute_, + Tegra::MemoryManager& gpu_memory_, const Device& device_, + VKScheduler& scheduler_, DescriptorPool& descriptor_pool_, + VKUpdateDescriptorQueue& update_descriptor_queue_, + RenderPassCache& render_pass_cache_, BufferCache& buffer_cache_, + TextureCache& texture_cache_, VideoCore::ShaderNotify& shader_notify_) + : VideoCommon::ShaderCache{rasterizer_, gpu_memory_, maxwell3d_, kepler_compute_}, + device{device_}, scheduler{scheduler_}, descriptor_pool{descriptor_pool_}, + update_descriptor_queue{update_descriptor_queue_}, render_pass_cache{render_pass_cache_}, + buffer_cache{buffer_cache_}, texture_cache{texture_cache_}, shader_notify{shader_notify_}, + use_asynchronous_shaders{Settings::values.use_asynchronous_shaders.GetValue()}, + workers(std::max(std::thread::hardware_concurrency(), 2U) - 1, "yuzu:PipelineBuilder"), + serialization_thread(1, "yuzu:PipelineSerialization") { + const auto& float_control{device.FloatControlProperties()}; + const VkDriverIdKHR driver_id{device.GetDriverID()}; + profile = Shader::Profile{ + .supported_spirv = device.IsKhrSpirv1_4Supported() ? 0x00010400U : 0x00010000U, + .unified_descriptor_binding = true, + .support_descriptor_aliasing = true, + .support_int8 = true, + .support_int16 = device.IsShaderInt16Supported(), + .support_int64 = device.IsShaderInt64Supported(), + .support_vertex_instance_id = false, + .support_float_controls = true, + .support_separate_denorm_behavior = float_control.denormBehaviorIndependence == + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR, + .support_separate_rounding_mode = + float_control.roundingModeIndependence == VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR, + .support_fp16_denorm_preserve = float_control.shaderDenormPreserveFloat16 != VK_FALSE, + .support_fp32_denorm_preserve = float_control.shaderDenormPreserveFloat32 != VK_FALSE, + .support_fp16_denorm_flush = float_control.shaderDenormFlushToZeroFloat16 != VK_FALSE, + .support_fp32_denorm_flush = float_control.shaderDenormFlushToZeroFloat32 != VK_FALSE, + .support_fp16_signed_zero_nan_preserve = + float_control.shaderSignedZeroInfNanPreserveFloat16 != VK_FALSE, + .support_fp32_signed_zero_nan_preserve = + float_control.shaderSignedZeroInfNanPreserveFloat32 != VK_FALSE, + .support_fp64_signed_zero_nan_preserve = + float_control.shaderSignedZeroInfNanPreserveFloat64 != VK_FALSE, + .support_explicit_workgroup_layout = device.IsKhrWorkgroupMemoryExplicitLayoutSupported(), + .support_vote = true, + .support_viewport_index_layer_non_geometry = + device.IsExtShaderViewportIndexLayerSupported(), + .support_viewport_mask = device.IsNvViewportArray2Supported(), + .support_typeless_image_loads = device.IsFormatlessImageLoadSupported(), + .support_demote_to_helper_invocation = true, + .support_int64_atomics = device.IsExtShaderAtomicInt64Supported(), + .support_derivative_control = true, + .support_geometry_shader_passthrough = device.IsNvGeometryShaderPassthroughSupported(), + + .warp_size_potentially_larger_than_guest = device.IsWarpSizePotentiallyBiggerThanGuest(), + + .lower_left_origin_mode = false, + .need_declared_frag_colors = false, + + .has_broken_spirv_clamp = driver_id == VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR, + .has_broken_unsigned_image_offsets = false, + .has_broken_signed_operations = false, + .has_broken_fp16_float_controls = driver_id == VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR, + .ignore_nan_fp_comparisons = false, + }; + host_info = Shader::HostTranslateInfo{ + .support_float16 = device.IsFloat16Supported(), + .support_int64 = device.IsShaderInt64Supported(), + }; +} + +PipelineCache::~PipelineCache() = default; + +GraphicsPipeline* PipelineCache::CurrentGraphicsPipeline() { MICROPROFILE_SCOPE(Vulkan_PipelineCache); - const auto [pair, is_cache_miss] = compute_cache.try_emplace(key); - auto& entry = pair->second; - if (!is_cache_miss) { - return *entry; + if (!RefreshStages(graphics_key.unique_hashes)) { + current_pipeline = nullptr; + return nullptr; } - LOG_INFO(Render_Vulkan, "Compile 0x{:016X}", key.Hash()); + graphics_key.state.Refresh(maxwell3d, device.IsExtExtendedDynamicStateSupported(), + device.IsExtVertexInputDynamicStateSupported()); - const GPUVAddr gpu_addr = key.shader; + if (current_pipeline) { + GraphicsPipeline* const next{current_pipeline->Next(graphics_key)}; + if (next) { + current_pipeline = next; + return BuiltPipeline(current_pipeline); + } + } + return CurrentGraphicsPipelineSlowPath(); +} - const std::optional cpu_addr = gpu_memory.GpuToCpuAddress(gpu_addr); - ASSERT(cpu_addr); +ComputePipeline* PipelineCache::CurrentComputePipeline() { + MICROPROFILE_SCOPE(Vulkan_PipelineCache); - Shader* shader = cpu_addr ? TryGet(*cpu_addr) : null_kernel.get(); + const ShaderInfo* const shader{ComputeShader()}; if (!shader) { - // No shader found - create a new one - const auto host_ptr = gpu_memory.GetPointer(gpu_addr); - - ProgramCode code = GetShaderCode(gpu_memory, gpu_addr, host_ptr, true); - const std::size_t size_in_bytes = code.size() * sizeof(u64); - - auto shader_info = std::make_unique(kepler_compute, ShaderType::Compute, gpu_addr, - *cpu_addr, std::move(code), KERNEL_MAIN_OFFSET); - shader = shader_info.get(); - - if (cpu_addr) { - Register(std::move(shader_info), *cpu_addr, size_in_bytes); - } else { - null_kernel = std::move(shader_info); - } + return nullptr; } - - const Specialization specialization{ - .base_binding = 0, - .workgroup_size = key.workgroup_size, - .shared_memory_size = key.shared_memory_size, - .point_size = std::nullopt, - .enabled_attributes = {}, - .attribute_types = {}, - .ndc_minus_one_to_one = false, + const auto& qmd{kepler_compute.launch_description}; + const ComputePipelineCacheKey key{ + .unique_hash = shader->unique_hash, + .shared_memory_size = qmd.shared_alloc, + .workgroup_size{qmd.block_dim_x, qmd.block_dim_y, qmd.block_dim_z}, }; - const SPIRVShader spirv_shader{Decompile(device, shader->GetIR(), ShaderType::Compute, - shader->GetRegistry(), specialization), - shader->GetEntries()}; - entry = std::make_unique(device, scheduler, descriptor_pool, - update_descriptor_queue, spirv_shader); - return *entry; + const auto [pair, is_new]{compute_cache.try_emplace(key)}; + auto& pipeline{pair->second}; + if (!is_new) { + return pipeline.get(); + } + pipeline = CreateComputePipeline(key, shader); + return pipeline.get(); } -void VKPipelineCache::EmplacePipeline(std::unique_ptr pipeline) { - gpu.ShaderNotify().MarkShaderComplete(); - std::unique_lock lock{pipeline_cache}; - graphics_cache.at(pipeline->GetCacheKey()) = std::move(pipeline); -} - -void VKPipelineCache::OnShaderRemoval(Shader* shader) { - bool finished = false; - const auto Finish = [&] { - // TODO(Rodrigo): Instead of finishing here, wait for the fences that use this pipeline and - // flush. - if (finished) { - return; - } - finished = true; - scheduler.Finish(); - }; - - const GPUVAddr invalidated_addr = shader->GetGpuAddr(); - for (auto it = graphics_cache.begin(); it != graphics_cache.end();) { - auto& entry = it->first; - if (std::find(entry.shaders.begin(), entry.shaders.end(), invalidated_addr) == - entry.shaders.end()) { - ++it; - continue; - } - Finish(); - it = graphics_cache.erase(it); - } - for (auto it = compute_cache.begin(); it != compute_cache.end();) { - auto& entry = it->first; - if (entry.shader != invalidated_addr) { - ++it; - continue; - } - Finish(); - it = compute_cache.erase(it); - } -} - -std::pair> -VKPipelineCache::DecompileShaders(const FixedPipelineState& fixed_state) { - Specialization specialization; - if (fixed_state.topology == Maxwell::PrimitiveTopology::Points) { - float point_size; - std::memcpy(&point_size, &fixed_state.point_size, sizeof(float)); - specialization.point_size = point_size; - ASSERT(point_size != 0.0f); - } - for (std::size_t i = 0; i < Maxwell::NumVertexAttributes; ++i) { - const auto& attribute = fixed_state.attributes[i]; - specialization.enabled_attributes[i] = attribute.enabled.Value() != 0; - specialization.attribute_types[i] = attribute.Type(); - } - specialization.ndc_minus_one_to_one = fixed_state.ndc_minus_one_to_one; - specialization.early_fragment_tests = fixed_state.early_z; - - // Alpha test - specialization.alpha_test_func = - FixedPipelineState::UnpackComparisonOp(fixed_state.alpha_test_func.Value()); - specialization.alpha_test_ref = Common::BitCast(fixed_state.alpha_test_ref); - - SPIRVProgram program; - std::vector bindings; - - for (std::size_t index = 1; index < Maxwell::MaxShaderProgram; ++index) { - const auto program_enum = static_cast(index); - // Skip stages that are not enabled - if (!maxwell3d.regs.IsShaderConfigEnabled(index)) { - continue; - } - const GPUVAddr gpu_addr = GetShaderAddress(maxwell3d, program_enum); - const std::optional cpu_addr = gpu_memory.GpuToCpuAddress(gpu_addr); - Shader* const shader = cpu_addr ? TryGet(*cpu_addr) : null_shader.get(); - - const std::size_t stage = index == 0 ? 0 : index - 1; // Stage indices are 0 - 5 - const ShaderType program_type = GetShaderType(program_enum); - const auto& entries = shader->GetEntries(); - program[stage] = { - Decompile(device, shader->GetIR(), program_type, shader->GetRegistry(), specialization), - entries, - }; - - const u32 old_binding = specialization.base_binding; - specialization.base_binding = - FillDescriptorLayout(entries, bindings, program_enum, specialization.base_binding); - ASSERT(old_binding + entries.NumBindings() == specialization.base_binding); - } - return {std::move(program), std::move(bindings)}; -} - -template -void AddEntry(std::vector& template_entries, u32& binding, - u32& offset, const Container& container) { - static constexpr u32 entry_size = static_cast(sizeof(DescriptorUpdateEntry)); - const u32 count = static_cast(std::size(container)); - - if constexpr (descriptor_type == COMBINED_IMAGE_SAMPLER) { - for (u32 i = 0; i < count; ++i) { - const u32 num_samplers = container[i].size; - template_entries.push_back({ - .dstBinding = binding, - .dstArrayElement = 0, - .descriptorCount = num_samplers, - .descriptorType = descriptor_type, - .offset = offset, - .stride = entry_size, - }); - - ++binding; - offset += num_samplers * entry_size; - } +void PipelineCache::LoadDiskResources(u64 title_id, std::stop_token stop_loading, + const VideoCore::DiskResourceLoadCallback& callback) { + if (title_id == 0) { return; } - - if constexpr (descriptor_type == UNIFORM_TEXEL_BUFFER || - descriptor_type == STORAGE_TEXEL_BUFFER) { - // Nvidia has a bug where updating multiple texels at once causes the driver to crash. - // Note: Fixed in driver Windows 443.24, Linux 440.66.15 - for (u32 i = 0; i < count; ++i) { - template_entries.push_back({ - .dstBinding = binding + i, - .dstArrayElement = 0, - .descriptorCount = 1, - .descriptorType = descriptor_type, - .offset = static_cast(offset + i * entry_size), - .stride = entry_size, - }); - } - } else if (count > 0) { - template_entries.push_back({ - .dstBinding = binding, - .dstArrayElement = 0, - .descriptorCount = count, - .descriptorType = descriptor_type, - .offset = offset, - .stride = entry_size, - }); + const auto shader_dir{Common::FS::GetYuzuPath(Common::FS::YuzuPath::ShaderDir)}; + const auto base_dir{shader_dir / fmt::format("{:016x}", title_id)}; + if (!Common::FS::CreateDir(shader_dir) || !Common::FS::CreateDir(base_dir)) { + LOG_ERROR(Common_Filesystem, "Failed to create pipeline cache directories"); + return; } - offset += count * entry_size; - binding += count; + pipeline_cache_filename = base_dir / "vulkan.bin"; + + struct { + std::mutex mutex; + size_t total{}; + size_t built{}; + bool has_loaded{}; + } state; + + const auto load_compute{[&](std::ifstream& file, FileEnvironment env) { + ComputePipelineCacheKey key; + file.read(reinterpret_cast(&key), sizeof(key)); + + workers.QueueWork([this, key, env = std::move(env), &state, &callback]() mutable { + ShaderPools pools; + auto pipeline{CreateComputePipeline(pools, key, env, false)}; + std::lock_guard lock{state.mutex}; + if (pipeline) { + compute_cache.emplace(key, std::move(pipeline)); + } + ++state.built; + if (state.has_loaded) { + callback(VideoCore::LoadCallbackStage::Build, state.built, state.total); + } + }); + ++state.total; + }}; + const bool extended_dynamic_state = device.IsExtExtendedDynamicStateSupported(); + const bool dynamic_vertex_input = device.IsExtVertexInputDynamicStateSupported(); + const auto load_graphics{[&](std::ifstream& file, std::vector envs) { + GraphicsPipelineCacheKey key; + file.read(reinterpret_cast(&key), sizeof(key)); + + if ((key.state.extended_dynamic_state != 0) != extended_dynamic_state || + (key.state.dynamic_vertex_input != 0) != dynamic_vertex_input) { + return; + } + workers.QueueWork([this, key, envs = std::move(envs), &state, &callback]() mutable { + ShaderPools pools; + boost::container::static_vector env_ptrs; + for (auto& env : envs) { + env_ptrs.push_back(&env); + } + auto pipeline{CreateGraphicsPipeline(pools, key, MakeSpan(env_ptrs), false)}; + + std::lock_guard lock{state.mutex}; + graphics_cache.emplace(key, std::move(pipeline)); + ++state.built; + if (state.has_loaded) { + callback(VideoCore::LoadCallbackStage::Build, state.built, state.total); + } + }); + ++state.total; + }}; + VideoCommon::LoadPipelines(stop_loading, pipeline_cache_filename, load_compute, load_graphics); + + std::unique_lock lock{state.mutex}; + callback(VideoCore::LoadCallbackStage::Build, 0, state.total); + state.has_loaded = true; + lock.unlock(); + + workers.WaitForRequests(); } -void FillDescriptorUpdateTemplateEntries( - const ShaderEntries& entries, u32& binding, u32& offset, - std::vector& template_entries) { - AddEntry(template_entries, offset, binding, entries.const_buffers); - AddEntry(template_entries, offset, binding, entries.global_buffers); - AddEntry(template_entries, offset, binding, entries.uniform_texels); - AddEntry(template_entries, offset, binding, entries.samplers); - AddEntry(template_entries, offset, binding, entries.storage_texels); - AddEntry(template_entries, offset, binding, entries.images); +GraphicsPipeline* PipelineCache::CurrentGraphicsPipelineSlowPath() { + const auto [pair, is_new]{graphics_cache.try_emplace(graphics_key)}; + auto& pipeline{pair->second}; + if (is_new) { + pipeline = CreateGraphicsPipeline(); + } + if (!pipeline) { + return nullptr; + } + if (current_pipeline) { + current_pipeline->AddTransition(pipeline.get()); + } + current_pipeline = pipeline.get(); + return BuiltPipeline(current_pipeline); +} + +GraphicsPipeline* PipelineCache::BuiltPipeline(GraphicsPipeline* pipeline) const noexcept { + if (pipeline->IsBuilt()) { + return pipeline; + } + if (!use_asynchronous_shaders) { + return pipeline; + } + // If something is using depth, we can assume that games are not rendering anything which + // will be used one time. + if (maxwell3d.regs.zeta_enable) { + return nullptr; + } + // If games are using a small index count, we can assume these are full screen quads. + // Usually these shaders are only used once for building textures so we can assume they + // can't be built async + if (maxwell3d.regs.index_array.count <= 6 || maxwell3d.regs.vertex_buffer.count <= 6) { + return pipeline; + } + return nullptr; +} + +std::unique_ptr PipelineCache::CreateGraphicsPipeline( + ShaderPools& pools, const GraphicsPipelineCacheKey& key, + std::span envs, bool build_in_parallel) try { + LOG_INFO(Render_Vulkan, "0x{:016x}", key.Hash()); + size_t env_index{0}; + std::array programs; + const bool uses_vertex_a{key.unique_hashes[0] != 0}; + const bool uses_vertex_b{key.unique_hashes[1] != 0}; + for (size_t index = 0; index < Maxwell::MaxShaderProgram; ++index) { + if (key.unique_hashes[index] == 0) { + continue; + } + Shader::Environment& env{*envs[env_index]}; + ++env_index; + + const u32 cfg_offset{static_cast(env.StartAddress() + sizeof(Shader::ProgramHeader))}; + Shader::Maxwell::Flow::CFG cfg(env, pools.flow_block, cfg_offset, index == 0); + if (!uses_vertex_a || index != 1) { + // Normal path + programs[index] = TranslateProgram(pools.inst, pools.block, env, cfg, host_info); + } else { + // VertexB path when VertexA is present. + auto& program_va{programs[0]}; + auto program_vb{TranslateProgram(pools.inst, pools.block, env, cfg, host_info)}; + programs[index] = MergeDualVertexPrograms(program_va, program_vb, env); + } + } + std::array infos{}; + std::array modules; + + const Shader::IR::Program* previous_stage{}; + Shader::Backend::Bindings binding; + for (size_t index = uses_vertex_a && uses_vertex_b ? 1 : 0; index < Maxwell::MaxShaderProgram; + ++index) { + if (key.unique_hashes[index] == 0) { + continue; + } + UNIMPLEMENTED_IF(index == 0); + + Shader::IR::Program& program{programs[index]}; + const size_t stage_index{index - 1}; + infos[stage_index] = &program.info; + + const auto runtime_info{MakeRuntimeInfo(programs, key, program, previous_stage)}; + const std::vector code{EmitSPIRV(profile, runtime_info, program, binding)}; + device.SaveShader(code); + modules[stage_index] = BuildShader(device, code); + if (device.HasDebuggingToolAttached()) { + const std::string name{fmt::format("Shader {:016x}", key.unique_hashes[index])}; + modules[stage_index].SetObjectNameEXT(name.c_str()); + } + previous_stage = &program; + } + Common::ThreadWorker* const thread_worker{build_in_parallel ? &workers : nullptr}; + return std::make_unique( + maxwell3d, gpu_memory, scheduler, buffer_cache, texture_cache, &shader_notify, device, + descriptor_pool, update_descriptor_queue, thread_worker, render_pass_cache, key, + std::move(modules), infos); + +} catch (const Shader::Exception& exception) { + LOG_ERROR(Render_Vulkan, "{}", exception.what()); + return nullptr; +} + +std::unique_ptr PipelineCache::CreateGraphicsPipeline() { + GraphicsEnvironments environments; + GetGraphicsEnvironments(environments, graphics_key.unique_hashes); + + main_pools.ReleaseContents(); + auto pipeline{CreateGraphicsPipeline(main_pools, graphics_key, environments.Span(), true)}; + if (!pipeline || pipeline_cache_filename.empty()) { + return pipeline; + } + serialization_thread.QueueWork([this, key = graphics_key, envs = std::move(environments.envs)] { + boost::container::static_vector + env_ptrs; + for (size_t index = 0; index < Maxwell::MaxShaderProgram; ++index) { + if (key.unique_hashes[index] != 0) { + env_ptrs.push_back(&envs[index]); + } + } + SerializePipeline(key, env_ptrs, pipeline_cache_filename); + }); + return pipeline; +} + +std::unique_ptr PipelineCache::CreateComputePipeline( + const ComputePipelineCacheKey& key, const ShaderInfo* shader) { + const GPUVAddr program_base{kepler_compute.regs.code_loc.Address()}; + const auto& qmd{kepler_compute.launch_description}; + ComputeEnvironment env{kepler_compute, gpu_memory, program_base, qmd.program_start}; + env.SetCachedSize(shader->size_bytes); + + main_pools.ReleaseContents(); + auto pipeline{CreateComputePipeline(main_pools, key, env, true)}; + if (!pipeline || pipeline_cache_filename.empty()) { + return pipeline; + } + serialization_thread.QueueWork([this, key, env = std::move(env)] { + SerializePipeline(key, std::array{&env}, + pipeline_cache_filename); + }); + return pipeline; +} + +std::unique_ptr PipelineCache::CreateComputePipeline( + ShaderPools& pools, const ComputePipelineCacheKey& key, Shader::Environment& env, + bool build_in_parallel) try { + LOG_INFO(Render_Vulkan, "0x{:016x}", key.Hash()); + + Shader::Maxwell::Flow::CFG cfg{env, pools.flow_block, env.StartAddress()}; + auto program{TranslateProgram(pools.inst, pools.block, env, cfg, host_info)}; + const std::vector code{EmitSPIRV(profile, program)}; + device.SaveShader(code); + vk::ShaderModule spv_module{BuildShader(device, code)}; + if (device.HasDebuggingToolAttached()) { + const auto name{fmt::format("Shader {:016x}", key.unique_hash)}; + spv_module.SetObjectNameEXT(name.c_str()); + } + Common::ThreadWorker* const thread_worker{build_in_parallel ? &workers : nullptr}; + return std::make_unique(device, descriptor_pool, update_descriptor_queue, + thread_worker, &shader_notify, program.info, + std::move(spv_module)); + +} catch (const Shader::Exception& exception) { + LOG_ERROR(Render_Vulkan, "{}", exception.what()); + return nullptr; } } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_pipeline_cache.h b/src/video_core/renderer_vulkan/vk_pipeline_cache.h index 89d635a3d..efe5a7ed8 100755 --- a/src/video_core/renderer_vulkan/vk_pipeline_cache.h +++ b/src/video_core/renderer_vulkan/vk_pipeline_cache.h @@ -6,24 +6,28 @@ #include #include +#include +#include #include #include #include #include #include -#include - #include "common/common_types.h" -#include "video_core/engines/const_buffer_engine_interface.h" +#include "common/thread_worker.h" +#include "shader_recompiler/frontend/ir/basic_block.h" +#include "shader_recompiler/frontend/ir/value.h" +#include "shader_recompiler/frontend/maxwell/control_flow.h" +#include "shader_recompiler/host_translate_info.h" +#include "shader_recompiler/object_pool.h" +#include "shader_recompiler/profile.h" #include "video_core/engines/maxwell_3d.h" #include "video_core/renderer_vulkan/fixed_pipeline_state.h" +#include "video_core/renderer_vulkan/vk_buffer_cache.h" +#include "video_core/renderer_vulkan/vk_compute_pipeline.h" #include "video_core/renderer_vulkan/vk_graphics_pipeline.h" -#include "video_core/renderer_vulkan/vk_shader_decompiler.h" -#include "video_core/shader/async_shaders.h" -#include "video_core/shader/memory_util.h" -#include "video_core/shader/registry.h" -#include "video_core/shader/shader_ir.h" +#include "video_core/renderer_vulkan/vk_texture_cache.h" #include "video_core/shader_cache.h" #include "video_core/vulkan_common/vulkan_wrapper.h" @@ -31,23 +35,24 @@ namespace Core { class System; } -namespace Vulkan { +namespace Shader::IR { +struct Program; +} -class Device; -class RasterizerVulkan; -class VKComputePipeline; -class VKDescriptorPool; -class VKScheduler; -class VKUpdateDescriptorQueue; +namespace VideoCore { +class ShaderNotify; +} + +namespace Vulkan { using Maxwell = Tegra::Engines::Maxwell3D::Regs; struct ComputePipelineCacheKey { - GPUVAddr shader; + u64 unique_hash; u32 shared_memory_size; std::array workgroup_size; - std::size_t Hash() const noexcept; + size_t Hash() const noexcept; bool operator==(const ComputePipelineCacheKey& rhs) const noexcept; @@ -63,16 +68,9 @@ static_assert(std::is_trivially_constructible_v); namespace std { -template <> -struct hash { - std::size_t operator()(const Vulkan::GraphicsPipelineCacheKey& k) const noexcept { - return k.Hash(); - } -}; - template <> struct hash { - std::size_t operator()(const Vulkan::ComputePipelineCacheKey& k) const noexcept { + size_t operator()(const Vulkan::ComputePipelineCacheKey& k) const noexcept { return k.Hash(); } }; @@ -81,94 +79,90 @@ struct hash { namespace Vulkan { -class Shader { -public: - explicit Shader(Tegra::Engines::ConstBufferEngineInterface& engine_, - Tegra::Engines::ShaderType stage_, GPUVAddr gpu_addr, VAddr cpu_addr_, - VideoCommon::Shader::ProgramCode program_code, u32 main_offset_); - ~Shader(); +class ComputePipeline; +class Device; +class DescriptorPool; +class RasterizerVulkan; +class RenderPassCache; +class VKScheduler; +class VKUpdateDescriptorQueue; - GPUVAddr GetGpuAddr() const { - return gpu_addr; +using VideoCommon::ShaderInfo; + +struct ShaderPools { + void ReleaseContents() { + flow_block.ReleaseContents(); + block.ReleaseContents(); + inst.ReleaseContents(); } - VideoCommon::Shader::ShaderIR& GetIR() { - return shader_ir; - } - - const VideoCommon::Shader::ShaderIR& GetIR() const { - return shader_ir; - } - - const VideoCommon::Shader::Registry& GetRegistry() const { - return registry; - } - - const ShaderEntries& GetEntries() const { - return entries; - } - -private: - GPUVAddr gpu_addr{}; - VideoCommon::Shader::ProgramCode program_code; - VideoCommon::Shader::Registry registry; - VideoCommon::Shader::ShaderIR shader_ir; - ShaderEntries entries; + Shader::ObjectPool inst; + Shader::ObjectPool block; + Shader::ObjectPool flow_block; }; -class VKPipelineCache final : public VideoCommon::ShaderCache { +class PipelineCache : public VideoCommon::ShaderCache { public: - explicit VKPipelineCache(RasterizerVulkan& rasterizer, Tegra::GPU& gpu, - Tegra::Engines::Maxwell3D& maxwell3d, - Tegra::Engines::KeplerCompute& kepler_compute, - Tegra::MemoryManager& gpu_memory, const Device& device, - VKScheduler& scheduler, VKDescriptorPool& descriptor_pool, - VKUpdateDescriptorQueue& update_descriptor_queue); - ~VKPipelineCache() override; + explicit PipelineCache(RasterizerVulkan& rasterizer, Tegra::Engines::Maxwell3D& maxwell3d, + Tegra::Engines::KeplerCompute& kepler_compute, + Tegra::MemoryManager& gpu_memory, const Device& device, + VKScheduler& scheduler, DescriptorPool& descriptor_pool, + VKUpdateDescriptorQueue& update_descriptor_queue, + RenderPassCache& render_pass_cache, BufferCache& buffer_cache, + TextureCache& texture_cache, VideoCore::ShaderNotify& shader_notify_); + ~PipelineCache(); - std::array GetShaders(); + [[nodiscard]] GraphicsPipeline* CurrentGraphicsPipeline(); - VKGraphicsPipeline* GetGraphicsPipeline(const GraphicsPipelineCacheKey& key, - u32 num_color_buffers, - VideoCommon::Shader::AsyncShaders& async_shaders); + [[nodiscard]] ComputePipeline* CurrentComputePipeline(); - VKComputePipeline& GetComputePipeline(const ComputePipelineCacheKey& key); - - void EmplacePipeline(std::unique_ptr pipeline); - -protected: - void OnShaderRemoval(Shader* shader) final; + void LoadDiskResources(u64 title_id, std::stop_token stop_loading, + const VideoCore::DiskResourceLoadCallback& callback); private: - std::pair> DecompileShaders( - const FixedPipelineState& fixed_state); + [[nodiscard]] GraphicsPipeline* CurrentGraphicsPipelineSlowPath(); - Tegra::GPU& gpu; - Tegra::Engines::Maxwell3D& maxwell3d; - Tegra::Engines::KeplerCompute& kepler_compute; - Tegra::MemoryManager& gpu_memory; + [[nodiscard]] GraphicsPipeline* BuiltPipeline(GraphicsPipeline* pipeline) const noexcept; + + std::unique_ptr CreateGraphicsPipeline(); + + std::unique_ptr CreateGraphicsPipeline( + ShaderPools& pools, const GraphicsPipelineCacheKey& key, + std::span envs, bool build_in_parallel); + + std::unique_ptr CreateComputePipeline(const ComputePipelineCacheKey& key, + const ShaderInfo* shader); + + std::unique_ptr CreateComputePipeline(ShaderPools& pools, + const ComputePipelineCacheKey& key, + Shader::Environment& env, + bool build_in_parallel); const Device& device; VKScheduler& scheduler; - VKDescriptorPool& descriptor_pool; + DescriptorPool& descriptor_pool; VKUpdateDescriptorQueue& update_descriptor_queue; + RenderPassCache& render_pass_cache; + BufferCache& buffer_cache; + TextureCache& texture_cache; + VideoCore::ShaderNotify& shader_notify; + bool use_asynchronous_shaders{}; - std::unique_ptr null_shader; - std::unique_ptr null_kernel; + GraphicsPipelineCacheKey graphics_key{}; + GraphicsPipeline* current_pipeline{}; - std::array last_shaders{}; + std::unordered_map> compute_cache; + std::unordered_map> graphics_cache; - GraphicsPipelineCacheKey last_graphics_key; - VKGraphicsPipeline* last_graphics_pipeline = nullptr; + ShaderPools main_pools; - std::mutex pipeline_cache; - std::unordered_map> - graphics_cache; - std::unordered_map> compute_cache; + Shader::Profile profile; + Shader::HostTranslateInfo host_info; + + std::filesystem::path pipeline_cache_filename; + + Common::ThreadWorker workers; + Common::ThreadWorker serialization_thread; }; -void FillDescriptorUpdateTemplateEntries( - const ShaderEntries& entries, u32& binding, u32& offset, - std::vector& template_entries); - } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_query_cache.cpp b/src/video_core/renderer_vulkan/vk_query_cache.cpp index 7cadd5147..c9cb32d71 100755 --- a/src/video_core/renderer_vulkan/vk_query_cache.cpp +++ b/src/video_core/renderer_vulkan/vk_query_cache.cpp @@ -114,14 +114,10 @@ void HostCounter::EndQuery() { } u64 HostCounter::BlockingQuery() const { - if (tick >= cache.GetScheduler().CurrentTick()) { - cache.GetScheduler().Flush(); - } - + cache.GetScheduler().Wait(tick); u64 data; const VkResult query_result = cache.GetDevice().GetLogical().GetQueryResults( - query.first, query.second, 1, sizeof(data), &data, sizeof(data), - VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT); + query.first, query.second, 1, sizeof(data), &data, sizeof(data), VK_QUERY_RESULT_64_BIT); switch (query_result) { case VK_SUCCESS: diff --git a/src/video_core/renderer_vulkan/vk_rasterizer.cpp b/src/video_core/renderer_vulkan/vk_rasterizer.cpp index e49b84ad1..0cb733df6 100755 --- a/src/video_core/renderer_vulkan/vk_rasterizer.cpp +++ b/src/video_core/renderer_vulkan/vk_rasterizer.cpp @@ -24,7 +24,6 @@ #include "video_core/renderer_vulkan/vk_buffer_cache.h" #include "video_core/renderer_vulkan/vk_compute_pipeline.h" #include "video_core/renderer_vulkan/vk_descriptor_pool.h" -#include "video_core/renderer_vulkan/vk_graphics_pipeline.h" #include "video_core/renderer_vulkan/vk_pipeline_cache.h" #include "video_core/renderer_vulkan/vk_rasterizer.h" #include "video_core/renderer_vulkan/vk_scheduler.h" @@ -55,11 +54,10 @@ struct DrawParams { u32 num_instances; u32 base_vertex; u32 num_vertices; + u32 first_index; bool is_indexed; }; -constexpr auto COMPUTE_SHADER_INDEX = static_cast(Tegra::Engines::ShaderType::Compute); - VkViewport GetViewportState(const Device& device, const Maxwell& regs, size_t index) { const auto& src = regs.viewport_transform[index]; const float width = src.scale_x * 2.0f; @@ -97,118 +95,6 @@ VkRect2D GetScissorState(const Maxwell& regs, size_t index) { return scissor; } -std::array GetShaderAddresses( - const std::array& shaders) { - std::array addresses; - for (size_t i = 0; i < std::size(addresses); ++i) { - addresses[i] = shaders[i] ? shaders[i]->GetGpuAddr() : 0; - } - return addresses; -} - -struct TextureHandle { - constexpr TextureHandle(u32 data, bool via_header_index) { - const Tegra::Texture::TextureHandle handle{data}; - image = handle.tic_id; - sampler = via_header_index ? image : handle.tsc_id.Value(); - } - - u32 image; - u32 sampler; -}; - -template -TextureHandle GetTextureInfo(const Engine& engine, bool via_header_index, const Entry& entry, - size_t stage, size_t index = 0) { - const auto shader_type = static_cast(stage); - if constexpr (std::is_same_v) { - if (entry.is_separated) { - const u32 buffer_1 = entry.buffer; - const u32 buffer_2 = entry.secondary_buffer; - const u32 offset_1 = entry.offset; - const u32 offset_2 = entry.secondary_offset; - const u32 handle_1 = engine.AccessConstBuffer32(shader_type, buffer_1, offset_1); - const u32 handle_2 = engine.AccessConstBuffer32(shader_type, buffer_2, offset_2); - return TextureHandle(handle_1 | handle_2, via_header_index); - } - } - if (entry.is_bindless) { - const u32 raw = engine.AccessConstBuffer32(shader_type, entry.buffer, entry.offset); - return TextureHandle(raw, via_header_index); - } - const u32 buffer = engine.GetBoundBuffer(); - const u64 offset = (entry.offset + index) * sizeof(u32); - return TextureHandle(engine.AccessConstBuffer32(shader_type, buffer, offset), via_header_index); -} - -ImageViewType ImageViewTypeFromEntry(const SamplerEntry& entry) { - if (entry.is_buffer) { - return ImageViewType::e2D; - } - switch (entry.type) { - case Tegra::Shader::TextureType::Texture1D: - return entry.is_array ? ImageViewType::e1DArray : ImageViewType::e1D; - case Tegra::Shader::TextureType::Texture2D: - return entry.is_array ? ImageViewType::e2DArray : ImageViewType::e2D; - case Tegra::Shader::TextureType::Texture3D: - return ImageViewType::e3D; - case Tegra::Shader::TextureType::TextureCube: - return entry.is_array ? ImageViewType::CubeArray : ImageViewType::Cube; - } - UNREACHABLE(); - return ImageViewType::e2D; -} - -ImageViewType ImageViewTypeFromEntry(const ImageEntry& entry) { - switch (entry.type) { - case Tegra::Shader::ImageType::Texture1D: - return ImageViewType::e1D; - case Tegra::Shader::ImageType::Texture1DArray: - return ImageViewType::e1DArray; - case Tegra::Shader::ImageType::Texture2D: - return ImageViewType::e2D; - case Tegra::Shader::ImageType::Texture2DArray: - return ImageViewType::e2DArray; - case Tegra::Shader::ImageType::Texture3D: - return ImageViewType::e3D; - case Tegra::Shader::ImageType::TextureBuffer: - return ImageViewType::Buffer; - } - UNREACHABLE(); - return ImageViewType::e2D; -} - -void PushImageDescriptors(const ShaderEntries& entries, TextureCache& texture_cache, - VKUpdateDescriptorQueue& update_descriptor_queue, - ImageViewId*& image_view_id_ptr, VkSampler*& sampler_ptr) { - for ([[maybe_unused]] const auto& entry : entries.uniform_texels) { - const ImageViewId image_view_id = *image_view_id_ptr++; - const ImageView& image_view = texture_cache.GetImageView(image_view_id); - update_descriptor_queue.AddTexelBuffer(image_view.BufferView()); - } - for (const auto& entry : entries.samplers) { - for (size_t i = 0; i < entry.size; ++i) { - const VkSampler sampler = *sampler_ptr++; - const ImageViewId image_view_id = *image_view_id_ptr++; - const ImageView& image_view = texture_cache.GetImageView(image_view_id); - const VkImageView handle = image_view.Handle(ImageViewTypeFromEntry(entry)); - update_descriptor_queue.AddSampledImage(handle, sampler); - } - } - for ([[maybe_unused]] const auto& entry : entries.storage_texels) { - const ImageViewId image_view_id = *image_view_id_ptr++; - const ImageView& image_view = texture_cache.GetImageView(image_view_id); - update_descriptor_queue.AddTexelBuffer(image_view.BufferView()); - } - for (const auto& entry : entries.images) { - // TODO: Mark as modified - const ImageViewId image_view_id = *image_view_id_ptr++; - const ImageView& image_view = texture_cache.GetImageView(image_view_id); - const VkImageView handle = image_view.Handle(ImageViewTypeFromEntry(entry)); - update_descriptor_queue.AddImage(handle); - } -} - DrawParams MakeDrawParams(const Maxwell& regs, u32 num_instances, bool is_instanced, bool is_indexed) { DrawParams params{ @@ -216,6 +102,7 @@ DrawParams MakeDrawParams(const Maxwell& regs, u32 num_instances, bool is_instan .num_instances = is_instanced ? num_instances : 1, .base_vertex = is_indexed ? regs.vb_element_base : regs.vertex_buffer.first, .num_vertices = is_indexed ? regs.index_array.count : regs.vertex_buffer.count, + .first_index = is_indexed ? regs.index_array.first : 0, .is_indexed = is_indexed, }; if (regs.draw.topology == Maxwell::PrimitiveTopology::Quads) { @@ -243,21 +130,21 @@ RasterizerVulkan::RasterizerVulkan(Core::Frontend::EmuWindow& emu_window_, Tegra blit_image(device, scheduler, state_tracker, descriptor_pool), astc_decoder_pass(device, scheduler, descriptor_pool, staging_pool, update_descriptor_queue, memory_allocator), - texture_cache_runtime{device, scheduler, memory_allocator, - staging_pool, blit_image, astc_decoder_pass}, + render_pass_cache(device), texture_cache_runtime{device, scheduler, + memory_allocator, staging_pool, + blit_image, astc_decoder_pass, + render_pass_cache}, texture_cache(texture_cache_runtime, *this, maxwell3d, kepler_compute, gpu_memory), buffer_cache_runtime(device, memory_allocator, scheduler, staging_pool, update_descriptor_queue, descriptor_pool), buffer_cache(*this, maxwell3d, kepler_compute, gpu_memory, cpu_memory_, buffer_cache_runtime), - pipeline_cache(*this, gpu, maxwell3d, kepler_compute, gpu_memory, device, scheduler, - descriptor_pool, update_descriptor_queue), + pipeline_cache(*this, maxwell3d, kepler_compute, gpu_memory, device, scheduler, + descriptor_pool, update_descriptor_queue, render_pass_cache, buffer_cache, + texture_cache, gpu.ShaderNotify()), query_cache{*this, maxwell3d, gpu_memory, device, scheduler}, fence_manager(*this, gpu, texture_cache, buffer_cache, query_cache, device, scheduler), - wfi_event(device.GetLogical().CreateEvent()), async_shaders(emu_window_) { + wfi_event(device.GetLogical().CreateEvent()) { scheduler.SetQueryCache(query_cache); - if (device.UseAsynchronousShaders()) { - async_shaders.AllocateWorkers(); - } } RasterizerVulkan::~RasterizerVulkan() = default; @@ -270,53 +157,30 @@ void RasterizerVulkan::Draw(bool is_indexed, bool is_instanced) { query_cache.UpdateCounters(); - graphics_key.fixed_state.Refresh(maxwell3d, device.IsExtExtendedDynamicStateSupported()); - - std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex}; - - texture_cache.SynchronizeGraphicsDescriptors(); - texture_cache.UpdateRenderTargets(false); - - const auto shaders = pipeline_cache.GetShaders(); - graphics_key.shaders = GetShaderAddresses(shaders); - - SetupShaderDescriptors(shaders, is_indexed); - - const Framebuffer* const framebuffer = texture_cache.GetFramebuffer(); - graphics_key.renderpass = framebuffer->RenderPass(); - - VKGraphicsPipeline* const pipeline = pipeline_cache.GetGraphicsPipeline( - graphics_key, framebuffer->NumColorBuffers(), async_shaders); - if (pipeline == nullptr || pipeline->GetHandle() == VK_NULL_HANDLE) { - // Async graphics pipeline was not ready. + GraphicsPipeline* const pipeline{pipeline_cache.CurrentGraphicsPipeline()}; + if (!pipeline) { return; } + std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex}; + pipeline->Configure(is_indexed); BeginTransformFeedback(); - scheduler.RequestRenderpass(framebuffer); - scheduler.BindGraphicsPipeline(pipeline->GetHandle()); UpdateDynamicStates(); - const auto& regs = maxwell3d.regs; - const u32 num_instances = maxwell3d.mme_draw.instance_count; - const DrawParams draw_params = MakeDrawParams(regs, num_instances, is_instanced, is_indexed); - const VkPipelineLayout pipeline_layout = pipeline->GetLayout(); - const VkDescriptorSet descriptor_set = pipeline->CommitDescriptorSet(); - scheduler.Record([pipeline_layout, descriptor_set, draw_params](vk::CommandBuffer cmdbuf) { - if (descriptor_set) { - cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout, - DESCRIPTOR_SET, descriptor_set, nullptr); - } + const auto& regs{maxwell3d.regs}; + const u32 num_instances{maxwell3d.mme_draw.instance_count}; + const DrawParams draw_params{MakeDrawParams(regs, num_instances, is_instanced, is_indexed)}; + scheduler.Record([draw_params](vk::CommandBuffer cmdbuf) { if (draw_params.is_indexed) { - cmdbuf.DrawIndexed(draw_params.num_vertices, draw_params.num_instances, 0, - draw_params.base_vertex, draw_params.base_instance); + cmdbuf.DrawIndexed(draw_params.num_vertices, draw_params.num_instances, + draw_params.first_index, draw_params.base_vertex, + draw_params.base_instance); } else { cmdbuf.Draw(draw_params.num_vertices, draw_params.num_instances, draw_params.base_vertex, draw_params.base_instance); } }); - EndTransformFeedback(); } @@ -326,6 +190,7 @@ void RasterizerVulkan::Clear() { if (!maxwell3d.ShouldExecute()) { return; } + FlushWork(); query_cache.UpdateCounters(); @@ -393,73 +258,20 @@ void RasterizerVulkan::Clear() { }); } -void RasterizerVulkan::DispatchCompute(GPUVAddr code_addr) { - MICROPROFILE_SCOPE(Vulkan_Compute); +void RasterizerVulkan::DispatchCompute() { + FlushWork(); - query_cache.UpdateCounters(); - - const auto& launch_desc = kepler_compute.launch_description; - auto& pipeline = pipeline_cache.GetComputePipeline({ - .shader = code_addr, - .shared_memory_size = launch_desc.shared_alloc, - .workgroup_size{ - launch_desc.block_dim_x, - launch_desc.block_dim_y, - launch_desc.block_dim_z, - }, - }); - - // Compute dispatches can't be executed inside a renderpass - scheduler.RequestOutsideRenderPassOperationContext(); - - image_view_indices.clear(); - sampler_handles.clear(); - - std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex}; - - const auto& entries = pipeline.GetEntries(); - buffer_cache.SetEnabledComputeUniformBuffers(entries.enabled_uniform_buffers); - buffer_cache.UnbindComputeStorageBuffers(); - u32 ssbo_index = 0; - for (const auto& buffer : entries.global_buffers) { - buffer_cache.BindComputeStorageBuffer(ssbo_index, buffer.cbuf_index, buffer.cbuf_offset, - buffer.is_written); - ++ssbo_index; + ComputePipeline* const pipeline{pipeline_cache.CurrentComputePipeline()}; + if (!pipeline) { + return; } - buffer_cache.UpdateComputeBuffers(); + std::scoped_lock lock{texture_cache.mutex, buffer_cache.mutex}; + pipeline->Configure(kepler_compute, gpu_memory, scheduler, buffer_cache, texture_cache); - texture_cache.SynchronizeComputeDescriptors(); - - SetupComputeUniformTexels(entries); - SetupComputeTextures(entries); - SetupComputeStorageTexels(entries); - SetupComputeImages(entries); - - const std::span indices_span(image_view_indices.data(), image_view_indices.size()); - texture_cache.FillComputeImageViews(indices_span, image_view_ids); - - update_descriptor_queue.Acquire(); - - buffer_cache.BindHostComputeBuffers(); - - ImageViewId* image_view_id_ptr = image_view_ids.data(); - VkSampler* sampler_ptr = sampler_handles.data(); - PushImageDescriptors(entries, texture_cache, update_descriptor_queue, image_view_id_ptr, - sampler_ptr); - - const VkPipeline pipeline_handle = pipeline.GetHandle(); - const VkPipelineLayout pipeline_layout = pipeline.GetLayout(); - const VkDescriptorSet descriptor_set = pipeline.CommitDescriptorSet(); - scheduler.Record([grid_x = launch_desc.grid_dim_x, grid_y = launch_desc.grid_dim_y, - grid_z = launch_desc.grid_dim_z, pipeline_handle, pipeline_layout, - descriptor_set](vk::CommandBuffer cmdbuf) { - cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, pipeline_handle); - if (descriptor_set) { - cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_COMPUTE, pipeline_layout, - DESCRIPTOR_SET, descriptor_set, nullptr); - } - cmdbuf.Dispatch(grid_x, grid_y, grid_z); - }); + const auto& qmd{kepler_compute.launch_description}; + const std::array dim{qmd.grid_dim_x, qmd.grid_dim_y, qmd.grid_dim_z}; + scheduler.RequestOutsideRenderPassOperationContext(); + scheduler.Record([dim](vk::CommandBuffer cmdbuf) { cmdbuf.Dispatch(dim[0], dim[1], dim[2]); }); } void RasterizerVulkan::ResetCounter(VideoCore::QueryType type) { @@ -644,6 +456,7 @@ void RasterizerVulkan::WaitForIdle() { void RasterizerVulkan::FragmentBarrier() { // We already put barriers when a render pass finishes + scheduler.RequestOutsideRenderPassOperationContext(); } void RasterizerVulkan::TiledCacheBarrier() { @@ -651,10 +464,11 @@ void RasterizerVulkan::TiledCacheBarrier() { } void RasterizerVulkan::FlushCommands() { - if (draw_counter > 0) { - draw_counter = 0; - scheduler.Flush(); + if (draw_counter == 0) { + return; } + draw_counter = 0; + scheduler.Flush(); } void RasterizerVulkan::TickFrame() { @@ -690,13 +504,18 @@ bool RasterizerVulkan::AccelerateDisplay(const Tegra::FramebufferConfig& config, if (!image_view) { return false; } - screen_info.image_view = image_view->Handle(VideoCommon::ImageViewType::e2D); + screen_info.image_view = image_view->Handle(Shader::TextureType::Color2D); screen_info.width = image_view->size.width; screen_info.height = image_view->size.height; screen_info.is_srgb = VideoCore::Surface::IsPixelFormatSRGB(image_view->format); return true; } +void RasterizerVulkan::LoadDiskResources(u64 title_id, std::stop_token stop_loading, + const VideoCore::DiskResourceLoadCallback& callback) { + pipeline_cache.LoadDiskResources(title_id, stop_loading, callback); +} + void RasterizerVulkan::FlushWork() { static constexpr u32 DRAWS_TO_DISPATCH = 4096; @@ -705,65 +524,17 @@ void RasterizerVulkan::FlushWork() { if ((++draw_counter & 7) != 7) { return; } - if (draw_counter < DRAWS_TO_DISPATCH) { // Send recorded tasks to the worker thread scheduler.DispatchWork(); return; } - // Otherwise (every certain number of draws) flush execution. // This submits commands to the Vulkan driver. scheduler.Flush(); draw_counter = 0; } -void RasterizerVulkan::SetupShaderDescriptors( - const std::array& shaders, bool is_indexed) { - image_view_indices.clear(); - sampler_handles.clear(); - for (size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) { - Shader* const shader = shaders[stage + 1]; - if (!shader) { - continue; - } - const ShaderEntries& entries = shader->GetEntries(); - SetupGraphicsUniformTexels(entries, stage); - SetupGraphicsTextures(entries, stage); - SetupGraphicsStorageTexels(entries, stage); - SetupGraphicsImages(entries, stage); - - buffer_cache.SetEnabledUniformBuffers(stage, entries.enabled_uniform_buffers); - buffer_cache.UnbindGraphicsStorageBuffers(stage); - u32 ssbo_index = 0; - for (const auto& buffer : entries.global_buffers) { - buffer_cache.BindGraphicsStorageBuffer(stage, ssbo_index, buffer.cbuf_index, - buffer.cbuf_offset, buffer.is_written); - ++ssbo_index; - } - } - const std::span indices_span(image_view_indices.data(), image_view_indices.size()); - buffer_cache.UpdateGraphicsBuffers(is_indexed); - texture_cache.FillGraphicsImageViews(indices_span, image_view_ids); - - buffer_cache.BindHostGeometryBuffers(is_indexed); - - update_descriptor_queue.Acquire(); - - ImageViewId* image_view_id_ptr = image_view_ids.data(); - VkSampler* sampler_ptr = sampler_handles.data(); - for (size_t stage = 0; stage < Maxwell::MaxShaderStage; ++stage) { - // Skip VertexA stage - Shader* const shader = shaders[stage + 1]; - if (!shader) { - continue; - } - buffer_cache.BindHostStageBuffers(stage); - PushImageDescriptors(shader->GetEntries(), texture_cache, update_descriptor_queue, - image_view_id_ptr, sampler_ptr); - } -} - void RasterizerVulkan::UpdateDynamicStates() { auto& regs = maxwell3d.regs; UpdateViewportsState(regs); @@ -772,6 +543,7 @@ void RasterizerVulkan::UpdateDynamicStates() { UpdateBlendConstants(regs); UpdateDepthBounds(regs); UpdateStencilFaces(regs); + UpdateLineWidth(regs); if (device.IsExtExtendedDynamicStateSupported()) { UpdateCullMode(regs); UpdateDepthBoundsTestEnable(regs); @@ -781,6 +553,9 @@ void RasterizerVulkan::UpdateDynamicStates() { UpdateFrontFace(regs); UpdateStencilOp(regs); UpdateStencilTestEnable(regs); + if (device.IsExtVertexInputDynamicStateSupported()) { + UpdateVertexInput(regs); + } } } @@ -812,89 +587,6 @@ void RasterizerVulkan::EndTransformFeedback() { [](vk::CommandBuffer cmdbuf) { cmdbuf.EndTransformFeedbackEXT(0, 0, nullptr, nullptr); }); } -void RasterizerVulkan::SetupGraphicsUniformTexels(const ShaderEntries& entries, size_t stage) { - const auto& regs = maxwell3d.regs; - const bool via_header_index = regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex; - for (const auto& entry : entries.uniform_texels) { - const TextureHandle handle = GetTextureInfo(maxwell3d, via_header_index, entry, stage); - image_view_indices.push_back(handle.image); - } -} - -void RasterizerVulkan::SetupGraphicsTextures(const ShaderEntries& entries, size_t stage) { - const auto& regs = maxwell3d.regs; - const bool via_header_index = regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex; - for (const auto& entry : entries.samplers) { - for (size_t index = 0; index < entry.size; ++index) { - const TextureHandle handle = - GetTextureInfo(maxwell3d, via_header_index, entry, stage, index); - image_view_indices.push_back(handle.image); - - Sampler* const sampler = texture_cache.GetGraphicsSampler(handle.sampler); - sampler_handles.push_back(sampler->Handle()); - } - } -} - -void RasterizerVulkan::SetupGraphicsStorageTexels(const ShaderEntries& entries, size_t stage) { - const auto& regs = maxwell3d.regs; - const bool via_header_index = regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex; - for (const auto& entry : entries.storage_texels) { - const TextureHandle handle = GetTextureInfo(maxwell3d, via_header_index, entry, stage); - image_view_indices.push_back(handle.image); - } -} - -void RasterizerVulkan::SetupGraphicsImages(const ShaderEntries& entries, size_t stage) { - const auto& regs = maxwell3d.regs; - const bool via_header_index = regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex; - for (const auto& entry : entries.images) { - const TextureHandle handle = GetTextureInfo(maxwell3d, via_header_index, entry, stage); - image_view_indices.push_back(handle.image); - } -} - -void RasterizerVulkan::SetupComputeUniformTexels(const ShaderEntries& entries) { - const bool via_header_index = kepler_compute.launch_description.linked_tsc; - for (const auto& entry : entries.uniform_texels) { - const TextureHandle handle = - GetTextureInfo(kepler_compute, via_header_index, entry, COMPUTE_SHADER_INDEX); - image_view_indices.push_back(handle.image); - } -} - -void RasterizerVulkan::SetupComputeTextures(const ShaderEntries& entries) { - const bool via_header_index = kepler_compute.launch_description.linked_tsc; - for (const auto& entry : entries.samplers) { - for (size_t index = 0; index < entry.size; ++index) { - const TextureHandle handle = GetTextureInfo(kepler_compute, via_header_index, entry, - COMPUTE_SHADER_INDEX, index); - image_view_indices.push_back(handle.image); - - Sampler* const sampler = texture_cache.GetComputeSampler(handle.sampler); - sampler_handles.push_back(sampler->Handle()); - } - } -} - -void RasterizerVulkan::SetupComputeStorageTexels(const ShaderEntries& entries) { - const bool via_header_index = kepler_compute.launch_description.linked_tsc; - for (const auto& entry : entries.storage_texels) { - const TextureHandle handle = - GetTextureInfo(kepler_compute, via_header_index, entry, COMPUTE_SHADER_INDEX); - image_view_indices.push_back(handle.image); - } -} - -void RasterizerVulkan::SetupComputeImages(const ShaderEntries& entries) { - const bool via_header_index = kepler_compute.launch_description.linked_tsc; - for (const auto& entry : entries.images) { - const TextureHandle handle = - GetTextureInfo(kepler_compute, via_header_index, entry, COMPUTE_SHADER_INDEX); - image_view_indices.push_back(handle.image); - } -} - void RasterizerVulkan::UpdateViewportsState(Tegra::Engines::Maxwell3D::Regs& regs) { if (!state_tracker.TouchViewports()) { return; @@ -987,6 +679,14 @@ void RasterizerVulkan::UpdateStencilFaces(Tegra::Engines::Maxwell3D::Regs& regs) } } +void RasterizerVulkan::UpdateLineWidth(Tegra::Engines::Maxwell3D::Regs& regs) { + if (!state_tracker.TouchLineWidth()) { + return; + } + const float width = regs.line_smooth_enable ? regs.line_width_smooth : regs.line_width_aliased; + scheduler.Record([width](vk::CommandBuffer cmdbuf) { cmdbuf.SetLineWidth(width); }); +} + void RasterizerVulkan::UpdateCullMode(Tegra::Engines::Maxwell3D::Regs& regs) { if (!state_tracker.TouchCullMode()) { return; @@ -1001,6 +701,11 @@ void RasterizerVulkan::UpdateDepthBoundsTestEnable(Tegra::Engines::Maxwell3D::Re if (!state_tracker.TouchDepthBoundsTestEnable()) { return; } + bool enabled = regs.depth_bounds_enable; + if (enabled && !device.IsDepthBoundsSupported()) { + LOG_WARNING(Render_Vulkan, "Depth bounds is enabled but not supported"); + enabled = false; + } scheduler.Record([enable = regs.depth_bounds_enable](vk::CommandBuffer cmdbuf) { cmdbuf.SetDepthBoundsTestEnableEXT(enable); }); @@ -1088,4 +793,62 @@ void RasterizerVulkan::UpdateStencilTestEnable(Tegra::Engines::Maxwell3D::Regs& }); } +void RasterizerVulkan::UpdateVertexInput(Tegra::Engines::Maxwell3D::Regs& regs) { + auto& dirty{maxwell3d.dirty.flags}; + if (!dirty[Dirty::VertexInput]) { + return; + } + dirty[Dirty::VertexInput] = false; + + boost::container::static_vector bindings; + boost::container::static_vector attributes; + + // There seems to be a bug on Nvidia's driver where updating only higher attributes ends up + // generating dirty state. Track the highest dirty attribute and update all attributes until + // that one. + size_t highest_dirty_attr{}; + for (size_t index = 0; index < Maxwell::NumVertexAttributes; ++index) { + if (dirty[Dirty::VertexAttribute0 + index]) { + highest_dirty_attr = index; + } + } + for (size_t index = 0; index < highest_dirty_attr; ++index) { + const Maxwell::VertexAttribute attribute{regs.vertex_attrib_format[index]}; + const u32 binding{attribute.buffer}; + dirty[Dirty::VertexAttribute0 + index] = false; + dirty[Dirty::VertexBinding0 + static_cast(binding)] = true; + if (!attribute.constant) { + attributes.push_back({ + .sType = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT, + .pNext = nullptr, + .location = static_cast(index), + .binding = binding, + .format = MaxwellToVK::VertexFormat(attribute.type, attribute.size), + .offset = attribute.offset, + }); + } + } + for (size_t index = 0; index < Maxwell::NumVertexAttributes; ++index) { + if (!dirty[Dirty::VertexBinding0 + index]) { + continue; + } + dirty[Dirty::VertexBinding0 + index] = false; + + const u32 binding{static_cast(index)}; + const auto& input_binding{regs.vertex_array[binding]}; + const bool is_instanced{regs.instanced_arrays.IsInstancingEnabled(binding)}; + bindings.push_back({ + .sType = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT, + .pNext = nullptr, + .binding = binding, + .stride = input_binding.stride, + .inputRate = is_instanced ? VK_VERTEX_INPUT_RATE_INSTANCE : VK_VERTEX_INPUT_RATE_VERTEX, + .divisor = is_instanced ? input_binding.divisor : 1, + }); + } + scheduler.Record([bindings, attributes](vk::CommandBuffer cmdbuf) { + cmdbuf.SetVertexInputEXT(bindings, attributes); + }); +} + } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_rasterizer.h b/src/video_core/renderer_vulkan/vk_rasterizer.h index d15c36ddc..47408dbab 100755 --- a/src/video_core/renderer_vulkan/vk_rasterizer.h +++ b/src/video_core/renderer_vulkan/vk_rasterizer.h @@ -20,14 +20,13 @@ #include "video_core/renderer_vulkan/vk_buffer_cache.h" #include "video_core/renderer_vulkan/vk_descriptor_pool.h" #include "video_core/renderer_vulkan/vk_fence_manager.h" -#include "video_core/renderer_vulkan/vk_graphics_pipeline.h" #include "video_core/renderer_vulkan/vk_pipeline_cache.h" #include "video_core/renderer_vulkan/vk_query_cache.h" +#include "video_core/renderer_vulkan/vk_render_pass_cache.h" #include "video_core/renderer_vulkan/vk_scheduler.h" #include "video_core/renderer_vulkan/vk_staging_buffer_pool.h" #include "video_core/renderer_vulkan/vk_texture_cache.h" #include "video_core/renderer_vulkan/vk_update_descriptor.h" -#include "video_core/shader/async_shaders.h" #include "video_core/vulkan_common/vulkan_memory_allocator.h" #include "video_core/vulkan_common/vulkan_wrapper.h" @@ -60,7 +59,7 @@ public: void Draw(bool is_indexed, bool is_instanced) override; void Clear() override; - void DispatchCompute(GPUVAddr code_addr) override; + void DispatchCompute() override; void ResetCounter(VideoCore::QueryType type) override; void Query(GPUVAddr gpu_addr, VideoCore::QueryType type, std::optional timestamp) override; void BindGraphicsUniformBuffer(size_t stage, u32 index, GPUVAddr gpu_addr, u32 size) override; @@ -90,19 +89,8 @@ public: const Tegra::Engines::Fermi2D::Config& copy_config) override; bool AccelerateDisplay(const Tegra::FramebufferConfig& config, VAddr framebuffer_addr, u32 pixel_stride) override; - - VideoCommon::Shader::AsyncShaders& GetAsyncShaders() { - return async_shaders; - } - - const VideoCommon::Shader::AsyncShaders& GetAsyncShaders() const { - return async_shaders; - } - - /// Maximum supported size that a constbuffer can have in bytes. - static constexpr size_t MaxConstbufferSize = 0x10000; - static_assert(MaxConstbufferSize % (4 * sizeof(float)) == 0, - "The maximum size of a constbuffer must be a multiple of the size of GLvec4"); + void LoadDiskResources(u64 title_id, std::stop_token stop_loading, + const VideoCore::DiskResourceLoadCallback& callback) override; private: static constexpr size_t MAX_TEXTURES = 192; @@ -113,46 +101,19 @@ private: void FlushWork(); - /// Setup descriptors in the graphics pipeline. - void SetupShaderDescriptors(const std::array& shaders, - bool is_indexed); - void UpdateDynamicStates(); void BeginTransformFeedback(); void EndTransformFeedback(); - /// Setup uniform texels in the graphics pipeline. - void SetupGraphicsUniformTexels(const ShaderEntries& entries, std::size_t stage); - - /// Setup textures in the graphics pipeline. - void SetupGraphicsTextures(const ShaderEntries& entries, std::size_t stage); - - /// Setup storage texels in the graphics pipeline. - void SetupGraphicsStorageTexels(const ShaderEntries& entries, std::size_t stage); - - /// Setup images in the graphics pipeline. - void SetupGraphicsImages(const ShaderEntries& entries, std::size_t stage); - - /// Setup texel buffers in the compute pipeline. - void SetupComputeUniformTexels(const ShaderEntries& entries); - - /// Setup textures in the compute pipeline. - void SetupComputeTextures(const ShaderEntries& entries); - - /// Setup storage texels in the compute pipeline. - void SetupComputeStorageTexels(const ShaderEntries& entries); - - /// Setup images in the compute pipeline. - void SetupComputeImages(const ShaderEntries& entries); - void UpdateViewportsState(Tegra::Engines::Maxwell3D::Regs& regs); void UpdateScissorsState(Tegra::Engines::Maxwell3D::Regs& regs); void UpdateDepthBias(Tegra::Engines::Maxwell3D::Regs& regs); void UpdateBlendConstants(Tegra::Engines::Maxwell3D::Regs& regs); void UpdateDepthBounds(Tegra::Engines::Maxwell3D::Regs& regs); void UpdateStencilFaces(Tegra::Engines::Maxwell3D::Regs& regs); + void UpdateLineWidth(Tegra::Engines::Maxwell3D::Regs& regs); void UpdateCullMode(Tegra::Engines::Maxwell3D::Regs& regs); void UpdateDepthBoundsTestEnable(Tegra::Engines::Maxwell3D::Regs& regs); @@ -163,6 +124,8 @@ private: void UpdateStencilOp(Tegra::Engines::Maxwell3D::Regs& regs); void UpdateStencilTestEnable(Tegra::Engines::Maxwell3D::Regs& regs); + void UpdateVertexInput(Tegra::Engines::Maxwell3D::Regs& regs); + Tegra::GPU& gpu; Tegra::MemoryManager& gpu_memory; Tegra::Engines::Maxwell3D& maxwell3d; @@ -175,23 +138,21 @@ private: VKScheduler& scheduler; StagingBufferPool staging_pool; - VKDescriptorPool descriptor_pool; + DescriptorPool descriptor_pool; VKUpdateDescriptorQueue update_descriptor_queue; BlitImageHelper blit_image; ASTCDecoderPass astc_decoder_pass; - - GraphicsPipelineCacheKey graphics_key; + RenderPassCache render_pass_cache; TextureCacheRuntime texture_cache_runtime; TextureCache texture_cache; BufferCacheRuntime buffer_cache_runtime; BufferCache buffer_cache; - VKPipelineCache pipeline_cache; + PipelineCache pipeline_cache; VKQueryCache query_cache; VKFenceManager fence_manager; vk::Event wfi_event; - VideoCommon::Shader::AsyncShaders async_shaders; boost::container::static_vector image_view_indices; std::array image_view_ids; diff --git a/src/video_core/renderer_vulkan/vk_render_pass_cache.cpp b/src/video_core/renderer_vulkan/vk_render_pass_cache.cpp new file mode 100755 index 000000000..451ffe019 --- /dev/null +++ b/src/video_core/renderer_vulkan/vk_render_pass_cache.cpp @@ -0,0 +1,96 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include + +#include + +#include "video_core/renderer_vulkan/maxwell_to_vk.h" +#include "video_core/renderer_vulkan/vk_render_pass_cache.h" +#include "video_core/surface.h" +#include "video_core/vulkan_common/vulkan_device.h" +#include "video_core/vulkan_common/vulkan_wrapper.h" + +namespace Vulkan { +namespace { +using VideoCore::Surface::PixelFormat; + +VkAttachmentDescription AttachmentDescription(const Device& device, PixelFormat format, + VkSampleCountFlagBits samples) { + using MaxwellToVK::SurfaceFormat; + return { + .flags = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT, + .format = SurfaceFormat(device, FormatType::Optimal, true, format).format, + .samples = samples, + .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD, + .storeOp = VK_ATTACHMENT_STORE_OP_STORE, + .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD, + .stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE, + .initialLayout = VK_IMAGE_LAYOUT_GENERAL, + .finalLayout = VK_IMAGE_LAYOUT_GENERAL, + }; +} +} // Anonymous namespace + +RenderPassCache::RenderPassCache(const Device& device_) : device{&device_} {} + +VkRenderPass RenderPassCache::Get(const RenderPassKey& key) { + std::lock_guard lock{mutex}; + const auto [pair, is_new] = cache.try_emplace(key); + if (!is_new) { + return *pair->second; + } + boost::container::static_vector descriptions; + std::array references{}; + u32 num_attachments{}; + u32 num_colors{}; + for (size_t index = 0; index < key.color_formats.size(); ++index) { + const PixelFormat format{key.color_formats[index]}; + const bool is_valid{format != PixelFormat::Invalid}; + references[index] = VkAttachmentReference{ + .attachment = is_valid ? num_colors : VK_ATTACHMENT_UNUSED, + .layout = VK_IMAGE_LAYOUT_GENERAL, + }; + if (is_valid) { + descriptions.push_back(AttachmentDescription(*device, format, key.samples)); + num_attachments = static_cast(index + 1); + ++num_colors; + } + } + const bool has_depth{key.depth_format != PixelFormat::Invalid}; + VkAttachmentReference depth_reference{}; + if (key.depth_format != PixelFormat::Invalid) { + depth_reference = VkAttachmentReference{ + .attachment = num_colors, + .layout = VK_IMAGE_LAYOUT_GENERAL, + }; + descriptions.push_back(AttachmentDescription(*device, key.depth_format, key.samples)); + } + const VkSubpassDescription subpass{ + .flags = 0, + .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS, + .inputAttachmentCount = 0, + .pInputAttachments = nullptr, + .colorAttachmentCount = num_attachments, + .pColorAttachments = references.data(), + .pResolveAttachments = nullptr, + .pDepthStencilAttachment = has_depth ? &depth_reference : nullptr, + .preserveAttachmentCount = 0, + .pPreserveAttachments = nullptr, + }; + pair->second = device->GetLogical().CreateRenderPass({ + .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + .attachmentCount = static_cast(descriptions.size()), + .pAttachments = descriptions.empty() ? nullptr : descriptions.data(), + .subpassCount = 1, + .pSubpasses = &subpass, + .dependencyCount = 0, + .pDependencies = nullptr, + }); + return *pair->second; +} + +} // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_render_pass_cache.h b/src/video_core/renderer_vulkan/vk_render_pass_cache.h new file mode 100755 index 000000000..eaa0ed775 --- /dev/null +++ b/src/video_core/renderer_vulkan/vk_render_pass_cache.h @@ -0,0 +1,55 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include "video_core/surface.h" +#include "video_core/vulkan_common/vulkan_wrapper.h" + +namespace Vulkan { + +struct RenderPassKey { + auto operator<=>(const RenderPassKey&) const noexcept = default; + + std::array color_formats; + VideoCore::Surface::PixelFormat depth_format; + VkSampleCountFlagBits samples; +}; + +} // namespace Vulkan + +namespace std { +template <> +struct hash { + [[nodiscard]] size_t operator()(const Vulkan::RenderPassKey& key) const noexcept { + size_t value = static_cast(key.depth_format) << 48; + value ^= static_cast(key.samples) << 52; + for (size_t i = 0; i < key.color_formats.size(); ++i) { + value ^= static_cast(key.color_formats[i]) << (i * 6); + } + return value; + } +}; +} // namespace std + +namespace Vulkan { + +class Device; + +class RenderPassCache { +public: + explicit RenderPassCache(const Device& device_); + + VkRenderPass Get(const RenderPassKey& key); + +private: + const Device* device{}; + std::unordered_map cache; + std::mutex mutex; +}; + +} // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_resource_pool.cpp b/src/video_core/renderer_vulkan/vk_resource_pool.cpp index a8bf7bda8..2dd514968 100755 --- a/src/video_core/renderer_vulkan/vk_resource_pool.cpp +++ b/src/video_core/renderer_vulkan/vk_resource_pool.cpp @@ -10,18 +10,16 @@ namespace Vulkan { ResourcePool::ResourcePool(MasterSemaphore& master_semaphore_, size_t grow_step_) - : master_semaphore{master_semaphore_}, grow_step{grow_step_} {} - -ResourcePool::~ResourcePool() = default; + : master_semaphore{&master_semaphore_}, grow_step{grow_step_} {} size_t ResourcePool::CommitResource() { // Refresh semaphore to query updated results - master_semaphore.Refresh(); - const u64 gpu_tick = master_semaphore.KnownGpuTick(); + master_semaphore->Refresh(); + const u64 gpu_tick = master_semaphore->KnownGpuTick(); const auto search = [this, gpu_tick](size_t begin, size_t end) -> std::optional { for (size_t iterator = begin; iterator < end; ++iterator) { if (gpu_tick >= ticks[iterator]) { - ticks[iterator] = master_semaphore.CurrentTick(); + ticks[iterator] = master_semaphore->CurrentTick(); return iterator; } } @@ -36,7 +34,7 @@ size_t ResourcePool::CommitResource() { // Both searches failed, the pool is full; handle it. const size_t free_resource = ManageOverflow(); - ticks[free_resource] = master_semaphore.CurrentTick(); + ticks[free_resource] = master_semaphore->CurrentTick(); found = free_resource; } } diff --git a/src/video_core/renderer_vulkan/vk_resource_pool.h b/src/video_core/renderer_vulkan/vk_resource_pool.h index 9d0bb3b4d..f0b80ad59 100755 --- a/src/video_core/renderer_vulkan/vk_resource_pool.h +++ b/src/video_core/renderer_vulkan/vk_resource_pool.h @@ -18,8 +18,16 @@ class MasterSemaphore; */ class ResourcePool { public: + explicit ResourcePool() = default; explicit ResourcePool(MasterSemaphore& master_semaphore, size_t grow_step); - virtual ~ResourcePool(); + + virtual ~ResourcePool() = default; + + ResourcePool& operator=(ResourcePool&&) noexcept = default; + ResourcePool(ResourcePool&&) noexcept = default; + + ResourcePool& operator=(const ResourcePool&) = default; + ResourcePool(const ResourcePool&) = default; protected: size_t CommitResource(); @@ -34,7 +42,7 @@ private: /// Allocates a new page of resources. void Grow(); - MasterSemaphore& master_semaphore; + MasterSemaphore* master_semaphore{}; size_t grow_step = 0; ///< Number of new resources created after an overflow size_t hint_iterator = 0; ///< Hint to where the next free resources is likely to be found std::vector ticks; ///< Ticks for each resource diff --git a/src/video_core/renderer_vulkan/vk_scheduler.cpp b/src/video_core/renderer_vulkan/vk_scheduler.cpp index f35c120b0..4840962de 100755 --- a/src/video_core/renderer_vulkan/vk_scheduler.cpp +++ b/src/video_core/renderer_vulkan/vk_scheduler.cpp @@ -31,7 +31,7 @@ void VKScheduler::CommandChunk::ExecuteAll(vk::CommandBuffer cmdbuf) { command->~Command(); command = next; } - + submit = false; command_offset = 0; first = nullptr; last = nullptr; @@ -42,13 +42,16 @@ VKScheduler::VKScheduler(const Device& device_, StateTracker& state_tracker_) master_semaphore{std::make_unique(device)}, command_pool{std::make_unique(*master_semaphore, device)} { AcquireNewChunk(); - AllocateNewContext(); + AllocateWorkerCommandBuffer(); worker_thread = std::thread(&VKScheduler::WorkerThread, this); } VKScheduler::~VKScheduler() { - quit = true; - cv.notify_all(); + { + std::lock_guard lock{work_mutex}; + quit = true; + } + work_cv.notify_all(); worker_thread.join(); } @@ -60,6 +63,7 @@ void VKScheduler::Flush(VkSemaphore semaphore) { void VKScheduler::Finish(VkSemaphore semaphore) { const u64 presubmit_tick = CurrentTick(); SubmitExecution(semaphore); + WaitWorker(); Wait(presubmit_tick); AllocateNewContext(); } @@ -68,20 +72,19 @@ void VKScheduler::WaitWorker() { MICROPROFILE_SCOPE(Vulkan_WaitForWorker); DispatchWork(); - bool finished = false; - do { - cv.notify_all(); - std::unique_lock lock{mutex}; - finished = chunk_queue.Empty(); - } while (!finished); + std::unique_lock lock{work_mutex}; + wait_cv.wait(lock, [this] { return work_queue.empty(); }); } void VKScheduler::DispatchWork() { if (chunk->Empty()) { return; } - chunk_queue.Push(std::move(chunk)); - cv.notify_all(); + { + std::lock_guard lock{work_mutex}; + work_queue.push(std::move(chunk)); + } + work_cv.notify_one(); AcquireNewChunk(); } @@ -124,85 +127,41 @@ void VKScheduler::RequestOutsideRenderPassOperationContext() { EndRenderPass(); } -void VKScheduler::BindGraphicsPipeline(VkPipeline pipeline) { +bool VKScheduler::UpdateGraphicsPipeline(GraphicsPipeline* pipeline) { if (state.graphics_pipeline == pipeline) { - return; + return false; } state.graphics_pipeline = pipeline; - Record([pipeline](vk::CommandBuffer cmdbuf) { - cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline); - }); + return true; } void VKScheduler::WorkerThread() { - Common::SetCurrentThreadPriority(Common::ThreadPriority::High); - std::unique_lock lock{mutex}; + Common::SetCurrentThreadName("yuzu:VulkanWorker"); do { - cv.wait(lock, [this] { return !chunk_queue.Empty() || quit; }); - if (quit) { - continue; + if (work_queue.empty()) { + wait_cv.notify_all(); } - auto extracted_chunk = std::move(chunk_queue.Front()); - chunk_queue.Pop(); - extracted_chunk->ExecuteAll(current_cmdbuf); - chunk_reserve.Push(std::move(extracted_chunk)); + std::unique_ptr work; + { + std::unique_lock lock{work_mutex}; + work_cv.wait(lock, [this] { return !work_queue.empty() || quit; }); + if (quit) { + continue; + } + work = std::move(work_queue.front()); + work_queue.pop(); + } + const bool has_submit = work->HasSubmit(); + work->ExecuteAll(current_cmdbuf); + if (has_submit) { + AllocateWorkerCommandBuffer(); + } + std::lock_guard reserve_lock{reserve_mutex}; + chunk_reserve.push_back(std::move(work)); } while (!quit); } -void VKScheduler::SubmitExecution(VkSemaphore semaphore) { - EndPendingOperations(); - InvalidateState(); - WaitWorker(); - - std::unique_lock lock{mutex}; - - current_cmdbuf.End(); - - const VkSemaphore timeline_semaphore = master_semaphore->Handle(); - const u32 num_signal_semaphores = semaphore ? 2U : 1U; - - const u64 signal_value = master_semaphore->CurrentTick(); - const u64 wait_value = signal_value - 1; - const VkPipelineStageFlags wait_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; - - master_semaphore->NextTick(); - - const std::array signal_values{signal_value, u64(0)}; - const std::array signal_semaphores{timeline_semaphore, semaphore}; - - const VkTimelineSemaphoreSubmitInfoKHR timeline_si{ - .sType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, - .pNext = nullptr, - .waitSemaphoreValueCount = 1, - .pWaitSemaphoreValues = &wait_value, - .signalSemaphoreValueCount = num_signal_semaphores, - .pSignalSemaphoreValues = signal_values.data(), - }; - const VkSubmitInfo submit_info{ - .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO, - .pNext = &timeline_si, - .waitSemaphoreCount = 1, - .pWaitSemaphores = &timeline_semaphore, - .pWaitDstStageMask = &wait_stage_mask, - .commandBufferCount = 1, - .pCommandBuffers = current_cmdbuf.address(), - .signalSemaphoreCount = num_signal_semaphores, - .pSignalSemaphores = signal_semaphores.data(), - }; - switch (const VkResult result = device.GetGraphicsQueue().Submit(submit_info)) { - case VK_SUCCESS: - break; - case VK_ERROR_DEVICE_LOST: - device.ReportLoss(); - [[fallthrough]]; - default: - vk::Check(result); - } -} - -void VKScheduler::AllocateNewContext() { - std::unique_lock lock{mutex}; - +void VKScheduler::AllocateWorkerCommandBuffer() { current_cmdbuf = vk::CommandBuffer(command_pool->Commit(), device.GetDispatchLoader()); current_cmdbuf.Begin({ .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, @@ -210,7 +169,59 @@ void VKScheduler::AllocateNewContext() { .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, .pInheritanceInfo = nullptr, }); +} +void VKScheduler::SubmitExecution(VkSemaphore semaphore) { + EndPendingOperations(); + InvalidateState(); + + const u64 signal_value = master_semaphore->NextTick(); + Record([semaphore, signal_value, this](vk::CommandBuffer cmdbuf) { + cmdbuf.End(); + + const u32 num_signal_semaphores = semaphore ? 2U : 1U; + + const u64 wait_value = signal_value - 1; + const VkPipelineStageFlags wait_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; + + const VkSemaphore timeline_semaphore = master_semaphore->Handle(); + const std::array signal_values{signal_value, u64(0)}; + const std::array signal_semaphores{timeline_semaphore, semaphore}; + + const VkTimelineSemaphoreSubmitInfoKHR timeline_si{ + .sType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, + .pNext = nullptr, + .waitSemaphoreValueCount = 1, + .pWaitSemaphoreValues = &wait_value, + .signalSemaphoreValueCount = num_signal_semaphores, + .pSignalSemaphoreValues = signal_values.data(), + }; + const VkSubmitInfo submit_info{ + .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO, + .pNext = &timeline_si, + .waitSemaphoreCount = 1, + .pWaitSemaphores = &timeline_semaphore, + .pWaitDstStageMask = &wait_stage_mask, + .commandBufferCount = 1, + .pCommandBuffers = cmdbuf.address(), + .signalSemaphoreCount = num_signal_semaphores, + .pSignalSemaphores = signal_semaphores.data(), + }; + switch (const VkResult result = device.GetGraphicsQueue().Submit(submit_info)) { + case VK_SUCCESS: + break; + case VK_ERROR_DEVICE_LOST: + device.ReportLoss(); + [[fallthrough]]; + default: + vk::Check(result); + } + }); + chunk->MarkSubmit(); + DispatchWork(); +} + +void VKScheduler::AllocateNewContext() { // Enable counters once again. These are disabled when a command buffer is finished. if (query_cache) { query_cache->UpdateCounters(); @@ -265,12 +276,13 @@ void VKScheduler::EndRenderPass() { } void VKScheduler::AcquireNewChunk() { - if (chunk_reserve.Empty()) { + std::lock_guard lock{reserve_mutex}; + if (chunk_reserve.empty()) { chunk = std::make_unique(); return; } - chunk = std::move(chunk_reserve.Front()); - chunk_reserve.Pop(); + chunk = std::move(chunk_reserve.back()); + chunk_reserve.pop_back(); } } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_scheduler.h b/src/video_core/renderer_vulkan/vk_scheduler.h index 3ce48e9d2..cf39a2363 100755 --- a/src/video_core/renderer_vulkan/vk_scheduler.h +++ b/src/video_core/renderer_vulkan/vk_scheduler.h @@ -8,12 +8,12 @@ #include #include #include -#include #include #include +#include + #include "common/alignment.h" #include "common/common_types.h" -#include "common/threadsafe_queue.h" #include "video_core/renderer_vulkan/vk_master_semaphore.h" #include "video_core/vulkan_common/vulkan_wrapper.h" @@ -22,6 +22,7 @@ namespace Vulkan { class CommandPool; class Device; class Framebuffer; +class GraphicsPipeline; class StateTracker; class VKQueryCache; @@ -52,8 +53,8 @@ public: /// of a renderpass. void RequestOutsideRenderPassOperationContext(); - /// Binds a pipeline to the current execution context. - void BindGraphicsPipeline(VkPipeline pipeline); + /// Update the pipeline to the current execution context. + bool UpdateGraphicsPipeline(GraphicsPipeline* pipeline); /// Invalidates current command buffer state except for render passes void InvalidateState(); @@ -85,6 +86,10 @@ public: /// Waits for the given tick to trigger on the GPU. void Wait(u64 tick) { + if (tick >= master_semaphore->CurrentTick()) { + // Make sure we are not waiting for the current tick without signalling + Flush(); + } master_semaphore->Wait(tick); } @@ -154,15 +159,24 @@ private: return true; } + void MarkSubmit() { + submit = true; + } + bool Empty() const { return command_offset == 0; } + bool HasSubmit() const { + return submit; + } + private: Command* first = nullptr; Command* last = nullptr; size_t command_offset = 0; + bool submit = false; alignas(std::max_align_t) std::array data{}; }; @@ -170,11 +184,13 @@ private: VkRenderPass renderpass = nullptr; VkFramebuffer framebuffer = nullptr; VkExtent2D render_area = {0, 0}; - VkPipeline graphics_pipeline = nullptr; + GraphicsPipeline* graphics_pipeline = nullptr; }; void WorkerThread(); + void AllocateWorkerCommandBuffer(); + void SubmitExecution(VkSemaphore semaphore); void AllocateNewContext(); @@ -204,11 +220,13 @@ private: std::array renderpass_images{}; std::array renderpass_image_ranges{}; - Common::SPSCQueue> chunk_queue; - Common::SPSCQueue> chunk_reserve; - std::mutex mutex; - std::condition_variable cv; - bool quit = false; + std::queue> work_queue; + std::vector> chunk_reserve; + std::mutex reserve_mutex; + std::mutex work_mutex; + std::condition_variable work_cv; + std::condition_variable wait_cv; + std::atomic_bool quit{}; }; } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_staging_buffer_pool.cpp b/src/video_core/renderer_vulkan/vk_staging_buffer_pool.cpp index 0412b5234..555b12ed7 100755 --- a/src/video_core/renderer_vulkan/vk_staging_buffer_pool.cpp +++ b/src/video_core/renderer_vulkan/vk_staging_buffer_pool.cpp @@ -91,7 +91,7 @@ StagingBufferPool::StagingBufferPool(const Device& device_, MemoryAllocator& mem .flags = 0, .size = STREAM_BUFFER_SIZE, .usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | - VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, .sharingMode = VK_SHARING_MODE_EXCLUSIVE, .queueFamilyIndexCount = 0, .pQueueFamilyIndices = nullptr, diff --git a/src/video_core/renderer_vulkan/vk_state_tracker.cpp b/src/video_core/renderer_vulkan/vk_state_tracker.cpp index 956f86845..e3b7dd61c 100755 --- a/src/video_core/renderer_vulkan/vk_state_tracker.cpp +++ b/src/video_core/renderer_vulkan/vk_state_tracker.cpp @@ -29,9 +29,10 @@ using Flags = Maxwell3D::DirtyState::Flags; Flags MakeInvalidationFlags() { static constexpr int INVALIDATION_FLAGS[]{ - Viewports, Scissors, DepthBias, BlendConstants, DepthBounds, - StencilProperties, CullMode, DepthBoundsEnable, DepthTestEnable, DepthWriteEnable, - DepthCompareOp, FrontFace, StencilOp, StencilTestEnable, VertexBuffers, + Viewports, Scissors, DepthBias, BlendConstants, DepthBounds, + StencilProperties, LineWidth, CullMode, DepthBoundsEnable, DepthTestEnable, + DepthWriteEnable, DepthCompareOp, FrontFace, StencilOp, StencilTestEnable, + VertexBuffers, VertexInput, }; Flags flags{}; for (const int flag : INVALIDATION_FLAGS) { @@ -40,6 +41,12 @@ Flags MakeInvalidationFlags() { for (int index = VertexBuffer0; index <= VertexBuffer31; ++index) { flags[index] = true; } + for (int index = VertexAttribute0; index <= VertexAttribute31; ++index) { + flags[index] = true; + } + for (int index = VertexBinding0; index <= VertexBinding31; ++index) { + flags[index] = true; + } return flags; } @@ -79,6 +86,11 @@ void SetupDirtyStencilProperties(Tables& tables) { table[OFF(stencil_back_func_mask)] = StencilProperties; } +void SetupDirtyLineWidth(Tables& tables) { + tables[0][OFF(line_width_smooth)] = LineWidth; + tables[0][OFF(line_width_aliased)] = LineWidth; +} + void SetupDirtyCullMode(Tables& tables) { auto& table = tables[0]; table[OFF(cull_face)] = CullMode; @@ -134,19 +146,6 @@ void SetupDirtyBlending(Tables& tables) { FillBlock(tables[0], OFF(independent_blend), NUM(independent_blend), Blending); } -void SetupDirtyInstanceDivisors(Tables& tables) { - static constexpr size_t divisor_offset = 3; - for (size_t index = 0; index < Regs::NumVertexArrays; ++index) { - tables[0][OFF(instanced_arrays) + index] = InstanceDivisors; - tables[0][OFF(vertex_array) + index * NUM(vertex_array[0]) + divisor_offset] = - InstanceDivisors; - } -} - -void SetupDirtyVertexAttributes(Tables& tables) { - FillBlock(tables[0], OFF(vertex_attrib_format), NUM(vertex_attrib_format), VertexAttributes); -} - void SetupDirtyViewportSwizzles(Tables& tables) { static constexpr size_t swizzle_offset = 6; for (size_t index = 0; index < Regs::NumViewports; ++index) { @@ -154,11 +153,31 @@ void SetupDirtyViewportSwizzles(Tables& tables) { ViewportSwizzles; } } + +void SetupDirtyVertexAttributes(Tables& tables) { + for (size_t i = 0; i < Regs::NumVertexAttributes; ++i) { + const size_t offset = OFF(vertex_attrib_format) + i * NUM(vertex_attrib_format[0]); + FillBlock(tables[0], offset, NUM(vertex_attrib_format[0]), VertexAttribute0 + i); + } + FillBlock(tables[1], OFF(vertex_attrib_format), Regs::NumVertexAttributes, VertexInput); +} + +void SetupDirtyVertexBindings(Tables& tables) { + // Do NOT include stride here, it's implicit in VertexBuffer + static constexpr size_t divisor_offset = 3; + for (size_t i = 0; i < Regs::NumVertexArrays; ++i) { + const u8 flag = static_cast(VertexBinding0 + i); + tables[0][OFF(instanced_arrays) + i] = VertexInput; + tables[1][OFF(instanced_arrays) + i] = flag; + tables[0][OFF(vertex_array) + i * NUM(vertex_array[0]) + divisor_offset] = VertexInput; + tables[1][OFF(vertex_array) + i * NUM(vertex_array[0]) + divisor_offset] = flag; + } +} } // Anonymous namespace StateTracker::StateTracker(Tegra::GPU& gpu) : flags{gpu.Maxwell3D().dirty.flags}, invalidation_flags{MakeInvalidationFlags()} { - auto& tables = gpu.Maxwell3D().dirty.tables; + auto& tables{gpu.Maxwell3D().dirty.tables}; SetupDirtyFlags(tables); SetupDirtyViewports(tables); SetupDirtyScissors(tables); @@ -166,6 +185,7 @@ StateTracker::StateTracker(Tegra::GPU& gpu) SetupDirtyBlendConstants(tables); SetupDirtyDepthBounds(tables); SetupDirtyStencilProperties(tables); + SetupDirtyLineWidth(tables); SetupDirtyCullMode(tables); SetupDirtyDepthBoundsEnable(tables); SetupDirtyDepthTestEnable(tables); @@ -175,9 +195,9 @@ StateTracker::StateTracker(Tegra::GPU& gpu) SetupDirtyStencilOp(tables); SetupDirtyStencilTestEnable(tables); SetupDirtyBlending(tables); - SetupDirtyInstanceDivisors(tables); - SetupDirtyVertexAttributes(tables); SetupDirtyViewportSwizzles(tables); + SetupDirtyVertexAttributes(tables); + SetupDirtyVertexBindings(tables); } } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_state_tracker.h b/src/video_core/renderer_vulkan/vk_state_tracker.h index 84e918a71..5f78f6950 100755 --- a/src/video_core/renderer_vulkan/vk_state_tracker.h +++ b/src/video_core/renderer_vulkan/vk_state_tracker.h @@ -19,12 +19,19 @@ namespace Dirty { enum : u8 { First = VideoCommon::Dirty::LastCommonEntry, + VertexInput, + VertexAttribute0, + VertexAttribute31 = VertexAttribute0 + 31, + VertexBinding0, + VertexBinding31 = VertexBinding0 + 31, + Viewports, Scissors, DepthBias, BlendConstants, DepthBounds, StencilProperties, + LineWidth, CullMode, DepthBoundsEnable, @@ -36,11 +43,9 @@ enum : u8 { StencilTestEnable, Blending, - InstanceDivisors, - VertexAttributes, ViewportSwizzles, - Last + Last, }; static_assert(Last <= std::numeric_limits::max()); @@ -89,6 +94,10 @@ public: return Exchange(Dirty::StencilProperties, false); } + bool TouchLineWidth() const { + return Exchange(Dirty::LineWidth, false); + } + bool TouchCullMode() { return Exchange(Dirty::CullMode, false); } diff --git a/src/video_core/renderer_vulkan/vk_swapchain.cpp b/src/video_core/renderer_vulkan/vk_swapchain.cpp index dfd5c65ba..d990eefba 100755 --- a/src/video_core/renderer_vulkan/vk_swapchain.cpp +++ b/src/video_core/renderer_vulkan/vk_swapchain.cpp @@ -65,6 +65,9 @@ VKSwapchain::VKSwapchain(VkSurfaceKHR surface_, const Device& device_, VKSchedul VKSwapchain::~VKSwapchain() = default; void VKSwapchain::Create(u32 width, u32 height, bool srgb) { + is_outdated = false; + is_suboptimal = false; + const auto physical_device = device.GetPhysical(); const auto capabilities{physical_device.GetSurfaceCapabilitiesKHR(surface)}; if (capabilities.maxImageExtent.width == 0 || capabilities.maxImageExtent.height == 0) { @@ -82,21 +85,31 @@ void VKSwapchain::Create(u32 width, u32 height, bool srgb) { resource_ticks.resize(image_count); } -bool VKSwapchain::AcquireNextImage() { - const VkResult result = - device.GetLogical().AcquireNextImageKHR(*swapchain, std::numeric_limits::max(), - *present_semaphores[frame_index], {}, &image_index); - +void VKSwapchain::AcquireNextImage() { + const VkResult result = device.GetLogical().AcquireNextImageKHR( + *swapchain, std::numeric_limits::max(), *present_semaphores[frame_index], + VK_NULL_HANDLE, &image_index); + switch (result) { + case VK_SUCCESS: + break; + case VK_SUBOPTIMAL_KHR: + is_suboptimal = true; + break; + case VK_ERROR_OUT_OF_DATE_KHR: + is_outdated = true; + break; + default: + LOG_ERROR(Render_Vulkan, "vkAcquireNextImageKHR returned {}", vk::ToString(result)); + break; + } scheduler.Wait(resource_ticks[image_index]); - return result == VK_SUCCESS || result == VK_SUBOPTIMAL_KHR; + resource_ticks[image_index] = scheduler.CurrentTick(); } -bool VKSwapchain::Present(VkSemaphore render_semaphore) { +void VKSwapchain::Present(VkSemaphore render_semaphore) { const VkSemaphore present_semaphore{*present_semaphores[frame_index]}; const std::array semaphores{present_semaphore, render_semaphore}; const auto present_queue{device.GetPresentQueue()}; - bool recreated = false; - const VkPresentInfoKHR present_info{ .sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, .pNext = nullptr, @@ -107,7 +120,6 @@ bool VKSwapchain::Present(VkSemaphore render_semaphore) { .pImageIndices = &image_index, .pResults = nullptr, }; - switch (const VkResult result = present_queue.Present(present_info)) { case VK_SUCCESS: break; @@ -115,24 +127,16 @@ bool VKSwapchain::Present(VkSemaphore render_semaphore) { LOG_DEBUG(Render_Vulkan, "Suboptimal swapchain"); break; case VK_ERROR_OUT_OF_DATE_KHR: - if (current_width > 0 && current_height > 0) { - Create(current_width, current_height, current_srgb); - recreated = true; - } + is_outdated = true; break; default: LOG_CRITICAL(Render_Vulkan, "Failed to present with error {}", vk::ToString(result)); break; } - - resource_ticks[image_index] = scheduler.CurrentTick(); - frame_index = (frame_index + 1) % static_cast(image_count); - return recreated; -} - -bool VKSwapchain::HasFramebufferChanged(const Layout::FramebufferLayout& framebuffer) const { - // TODO(Rodrigo): Handle framebuffer pixel format changes - return framebuffer.width != current_width || framebuffer.height != current_height; + ++frame_index; + if (frame_index >= image_count) { + frame_index = 0; + } } void VKSwapchain::CreateSwapchain(const VkSurfaceCapabilitiesKHR& capabilities, u32 width, @@ -148,7 +152,6 @@ void VKSwapchain::CreateSwapchain(const VkSurfaceCapabilitiesKHR& capabilities, if (capabilities.maxImageCount > 0 && requested_image_count > capabilities.maxImageCount) { requested_image_count = capabilities.maxImageCount; } - VkSwapchainCreateInfoKHR swapchain_ci{ .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, .pNext = nullptr, @@ -169,7 +172,6 @@ void VKSwapchain::CreateSwapchain(const VkSurfaceCapabilitiesKHR& capabilities, .clipped = VK_FALSE, .oldSwapchain = nullptr, }; - const u32 graphics_family{device.GetGraphicsFamily()}; const u32 present_family{device.GetPresentFamily()}; const std::array queue_indices{graphics_family, present_family}; @@ -178,7 +180,6 @@ void VKSwapchain::CreateSwapchain(const VkSurfaceCapabilitiesKHR& capabilities, swapchain_ci.queueFamilyIndexCount = static_cast(queue_indices.size()); swapchain_ci.pQueueFamilyIndices = queue_indices.data(); } - // Request the size again to reduce the possibility of a TOCTOU race condition. const auto updated_capabilities = physical_device.GetSurfaceCapabilitiesKHR(surface); swapchain_ci.imageExtent = ChooseSwapExtent(updated_capabilities, width, height); @@ -186,8 +187,6 @@ void VKSwapchain::CreateSwapchain(const VkSurfaceCapabilitiesKHR& capabilities, swapchain = device.GetLogical().CreateSwapchainKHR(swapchain_ci); extent = swapchain_ci.imageExtent; - current_width = extent.width; - current_height = extent.height; current_srgb = srgb; images = swapchain.GetImages(); @@ -197,8 +196,8 @@ void VKSwapchain::CreateSwapchain(const VkSurfaceCapabilitiesKHR& capabilities, void VKSwapchain::CreateSemaphores() { present_semaphores.resize(image_count); - std::generate(present_semaphores.begin(), present_semaphores.end(), - [this] { return device.GetLogical().CreateSemaphore(); }); + std::ranges::generate(present_semaphores, + [this] { return device.GetLogical().CreateSemaphore(); }); } void VKSwapchain::CreateImageViews() { diff --git a/src/video_core/renderer_vulkan/vk_swapchain.h b/src/video_core/renderer_vulkan/vk_swapchain.h index adc8d27cf..35c2cdc14 100755 --- a/src/video_core/renderer_vulkan/vk_swapchain.h +++ b/src/video_core/renderer_vulkan/vk_swapchain.h @@ -28,14 +28,25 @@ public: void Create(u32 width, u32 height, bool srgb); /// Acquires the next image in the swapchain, waits as needed. - bool AcquireNextImage(); + void AcquireNextImage(); - /// Presents the rendered image to the swapchain. Returns true when the swapchains had to be - /// recreated. Takes responsability for the ownership of fence. - bool Present(VkSemaphore render_semaphore); + /// Presents the rendered image to the swapchain. + void Present(VkSemaphore render_semaphore); - /// Returns true when the framebuffer layout has changed. - bool HasFramebufferChanged(const Layout::FramebufferLayout& framebuffer) const; + /// Returns true when the color space has changed. + bool HasColorSpaceChanged(bool is_srgb) const { + return current_srgb != is_srgb; + } + + /// Returns true when the swapchain is outdated. + bool IsOutDated() const { + return is_outdated; + } + + /// Returns true when the swapchain is suboptimal. + bool IsSubOptimal() const { + return is_suboptimal; + } VkExtent2D GetSize() const { return extent; @@ -61,10 +72,6 @@ public: return image_format; } - bool GetSrgbState() const { - return current_srgb; - } - private: void CreateSwapchain(const VkSurfaceCapabilitiesKHR& capabilities, u32 width, u32 height, bool srgb); @@ -92,9 +99,9 @@ private: VkFormat image_format{}; VkExtent2D extent{}; - u32 current_width{}; - u32 current_height{}; bool current_srgb{}; + bool is_outdated{}; + bool is_suboptimal{}; }; } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_texture_cache.cpp b/src/video_core/renderer_vulkan/vk_texture_cache.cpp index a2ab4d1ee..17af62559 100755 --- a/src/video_core/renderer_vulkan/vk_texture_cache.cpp +++ b/src/video_core/renderer_vulkan/vk_texture_cache.cpp @@ -15,6 +15,7 @@ #include "video_core/renderer_vulkan/maxwell_to_vk.h" #include "video_core/renderer_vulkan/vk_compute_pass.h" #include "video_core/renderer_vulkan/vk_rasterizer.h" +#include "video_core/renderer_vulkan/vk_render_pass_cache.h" #include "video_core/renderer_vulkan/vk_scheduler.h" #include "video_core/renderer_vulkan/vk_staging_buffer_pool.h" #include "video_core/renderer_vulkan/vk_texture_cache.h" @@ -34,19 +35,6 @@ using VideoCommon::SubresourceRange; using VideoCore::Surface::IsPixelFormatASTC; namespace { - -constexpr std::array ATTACHMENT_REFERENCES{ - VkAttachmentReference{0, VK_IMAGE_LAYOUT_GENERAL}, - VkAttachmentReference{1, VK_IMAGE_LAYOUT_GENERAL}, - VkAttachmentReference{2, VK_IMAGE_LAYOUT_GENERAL}, - VkAttachmentReference{3, VK_IMAGE_LAYOUT_GENERAL}, - VkAttachmentReference{4, VK_IMAGE_LAYOUT_GENERAL}, - VkAttachmentReference{5, VK_IMAGE_LAYOUT_GENERAL}, - VkAttachmentReference{6, VK_IMAGE_LAYOUT_GENERAL}, - VkAttachmentReference{7, VK_IMAGE_LAYOUT_GENERAL}, - VkAttachmentReference{8, VK_IMAGE_LAYOUT_GENERAL}, -}; - constexpr VkBorderColor ConvertBorderColor(const std::array& color) { if (color == std::array{0, 0, 0, 0}) { return VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK; @@ -174,25 +162,6 @@ constexpr VkBorderColor ConvertBorderColor(const std::array& color) { return device.GetLogical().CreateImage(MakeImageCreateInfo(device, info)); } -[[nodiscard]] vk::Buffer MakeBuffer(const Device& device, const ImageInfo& info) { - if (info.type != ImageType::Buffer) { - return vk::Buffer{}; - } - const size_t bytes_per_block = VideoCore::Surface::BytesPerBlock(info.format); - return device.GetLogical().CreateBuffer(VkBufferCreateInfo{ - .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .size = info.size.width * bytes_per_block, - .usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | - VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | - VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, - .sharingMode = VK_SHARING_MODE_EXCLUSIVE, - .queueFamilyIndexCount = 0, - .pQueueFamilyIndices = nullptr, - }); -} - [[nodiscard]] VkImageAspectFlags ImageAspectMask(PixelFormat format) { switch (VideoCore::Surface::GetFormatType(format)) { case VideoCore::Surface::SurfaceType::ColorTexture: @@ -226,23 +195,6 @@ constexpr VkBorderColor ConvertBorderColor(const std::array& color) { } } -[[nodiscard]] VkAttachmentDescription AttachmentDescription(const Device& device, - const ImageView* image_view) { - using MaxwellToVK::SurfaceFormat; - const PixelFormat pixel_format = image_view->format; - return VkAttachmentDescription{ - .flags = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT, - .format = SurfaceFormat(device, FormatType::Optimal, true, pixel_format).format, - .samples = image_view->Samples(), - .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD, - .storeOp = VK_ATTACHMENT_STORE_OP_STORE, - .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD, - .stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE, - .initialLayout = VK_IMAGE_LAYOUT_GENERAL, - .finalLayout = VK_IMAGE_LAYOUT_GENERAL, - }; -} - [[nodiscard]] VkComponentSwizzle ComponentSwizzle(SwizzleSource swizzle) { switch (swizzle) { case SwizzleSource::Zero: @@ -263,6 +215,30 @@ constexpr VkBorderColor ConvertBorderColor(const std::array& color) { return VK_COMPONENT_SWIZZLE_ZERO; } +[[nodiscard]] VkImageViewType ImageViewType(Shader::TextureType type) { + switch (type) { + case Shader::TextureType::Color1D: + return VK_IMAGE_VIEW_TYPE_1D; + case Shader::TextureType::Color2D: + return VK_IMAGE_VIEW_TYPE_2D; + case Shader::TextureType::ColorCube: + return VK_IMAGE_VIEW_TYPE_CUBE; + case Shader::TextureType::Color3D: + return VK_IMAGE_VIEW_TYPE_3D; + case Shader::TextureType::ColorArray1D: + return VK_IMAGE_VIEW_TYPE_1D_ARRAY; + case Shader::TextureType::ColorArray2D: + return VK_IMAGE_VIEW_TYPE_2D_ARRAY; + case Shader::TextureType::ColorArrayCube: + return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY; + case Shader::TextureType::Buffer: + UNREACHABLE_MSG("Texture buffers can't be image views"); + return VK_IMAGE_VIEW_TYPE_1D; + } + UNREACHABLE_MSG("Invalid image view type={}", type); + return VK_IMAGE_VIEW_TYPE_2D; +} + [[nodiscard]] VkImageViewType ImageViewType(VideoCommon::ImageViewType type) { switch (type) { case VideoCommon::ImageViewType::e1D: @@ -280,7 +256,7 @@ constexpr VkBorderColor ConvertBorderColor(const std::array& color) { case VideoCommon::ImageViewType::CubeArray: return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY; case VideoCommon::ImageViewType::Rect: - LOG_WARNING(Render_Vulkan, "Unnormalized image view type not supported"); + UNIMPLEMENTED_MSG("Rect image view"); return VK_IMAGE_VIEW_TYPE_2D; case VideoCommon::ImageViewType::Buffer: UNREACHABLE_MSG("Texture buffers can't be image views"); @@ -327,7 +303,7 @@ constexpr VkBorderColor ConvertBorderColor(const std::array& color) { }; } -[[nodiscard]] std::vector TransformBufferCopies( +[[maybe_unused]] [[nodiscard]] std::vector TransformBufferCopies( std::span copies, size_t buffer_offset) { std::vector result(copies.size()); std::ranges::transform( @@ -587,6 +563,28 @@ struct RangedBarrierRange { } }; +[[nodiscard]] VkFormat Format(Shader::ImageFormat format) { + switch (format) { + case Shader::ImageFormat::Typeless: + break; + case Shader::ImageFormat::R8_SINT: + return VK_FORMAT_R8_SINT; + case Shader::ImageFormat::R8_UINT: + return VK_FORMAT_R8_UINT; + case Shader::ImageFormat::R16_UINT: + return VK_FORMAT_R16_UINT; + case Shader::ImageFormat::R16_SINT: + return VK_FORMAT_R16_SINT; + case Shader::ImageFormat::R32_UINT: + return VK_FORMAT_R32_UINT; + case Shader::ImageFormat::R32G32_UINT: + return VK_FORMAT_R32G32_UINT; + case Shader::ImageFormat::R32G32B32A32_UINT: + return VK_FORMAT_R32G32B32A32_UINT; + } + UNREACHABLE_MSG("Invalid image format={}", format); + return VK_FORMAT_R32_UINT; +} } // Anonymous namespace void TextureCacheRuntime::Finish() { @@ -622,7 +620,7 @@ void TextureCacheRuntime::BlitImage(Framebuffer* dst_framebuffer, ImageView& dst return; } } - ASSERT(src.ImageFormat() == dst.ImageFormat()); + ASSERT(src.format == dst.format); ASSERT(!(is_dst_msaa && !is_src_msaa)); ASSERT(operation == Fermi2D::Operation::SrcCopy); @@ -825,13 +823,9 @@ u64 TextureCacheRuntime::GetDeviceLocalMemory() const { Image::Image(TextureCacheRuntime& runtime, const ImageInfo& info_, GPUVAddr gpu_addr_, VAddr cpu_addr_) : VideoCommon::ImageBase(info_, gpu_addr_, cpu_addr_), scheduler{&runtime.scheduler}, - image(MakeImage(runtime.device, info)), buffer(MakeBuffer(runtime.device, info)), + image(MakeImage(runtime.device, info)), + commit(runtime.memory_allocator.Commit(image, MemoryUsage::DeviceLocal)), aspect_mask(ImageAspectMask(info.format)) { - if (image) { - commit = runtime.memory_allocator.Commit(image, MemoryUsage::DeviceLocal); - } else { - commit = runtime.memory_allocator.Commit(buffer, MemoryUsage::DeviceLocal); - } if (IsPixelFormatASTC(info.format) && !runtime.device.IsOptimalAstcSupported()) { if (Settings::values.accelerate_astc.GetValue()) { flags |= VideoCommon::ImageFlagBits::AcceleratedUpload; @@ -840,11 +834,7 @@ Image::Image(TextureCacheRuntime& runtime, const ImageInfo& info_, GPUVAddr gpu_ } } if (runtime.device.HasDebuggingToolAttached()) { - if (image) { - image.SetObjectNameEXT(VideoCommon::Name(*this).c_str()); - } else { - buffer.SetObjectNameEXT(VideoCommon::Name(*this).c_str()); - } + image.SetObjectNameEXT(VideoCommon::Name(*this).c_str()); } static constexpr VkImageViewUsageCreateInfo storage_image_view_usage_create_info{ .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, @@ -896,19 +886,6 @@ void Image::UploadMemory(const StagingBufferRef& map, std::span copies) { - // TODO: Move this to another API - scheduler->RequestOutsideRenderPassOperationContext(); - std::vector vk_copies = TransformBufferCopies(copies, map.offset); - const VkBuffer src_buffer = map.buffer; - const VkBuffer dst_buffer = *buffer; - scheduler->Record([src_buffer, dst_buffer, vk_copies](vk::CommandBuffer cmdbuf) { - // TODO: Barriers - cmdbuf.CopyBuffer(src_buffer, dst_buffer, vk_copies); - }); -} - void Image::DownloadMemory(const StagingBufferRef& map, std::span copies) { std::vector vk_copies = TransformBufferImageCopies(copies, map.offset, aspect_mask); scheduler->Record([buffer = map.buffer, image = *image, aspect_mask = aspect_mask, @@ -966,8 +943,9 @@ void Image::DownloadMemory(const StagingBufferRef& map, std::span swizzle{ SwizzleSource::R, @@ -1005,57 +983,54 @@ ImageView::ImageView(TextureCacheRuntime& runtime, const VideoCommon::ImageViewI }, .subresourceRange = MakeSubresourceRange(aspect_mask, info.range), }; - const auto create = [&](VideoCommon::ImageViewType view_type, std::optional num_layers) { + const auto create = [&](TextureType tex_type, std::optional num_layers) { VkImageViewCreateInfo ci{create_info}; - ci.viewType = ImageViewType(view_type); + ci.viewType = ImageViewType(tex_type); if (num_layers) { ci.subresourceRange.layerCount = *num_layers; } vk::ImageView handle = device->GetLogical().CreateImageView(ci); if (device->HasDebuggingToolAttached()) { - handle.SetObjectNameEXT(VideoCommon::Name(*this, view_type).c_str()); + handle.SetObjectNameEXT(VideoCommon::Name(*this).c_str()); } - image_views[static_cast(view_type)] = std::move(handle); + image_views[static_cast(tex_type)] = std::move(handle); }; switch (info.type) { case VideoCommon::ImageViewType::e1D: case VideoCommon::ImageViewType::e1DArray: - create(VideoCommon::ImageViewType::e1D, 1); - create(VideoCommon::ImageViewType::e1DArray, std::nullopt); - render_target = Handle(VideoCommon::ImageViewType::e1DArray); + create(TextureType::Color1D, 1); + create(TextureType::ColorArray1D, std::nullopt); + render_target = Handle(TextureType::ColorArray1D); break; case VideoCommon::ImageViewType::e2D: case VideoCommon::ImageViewType::e2DArray: - create(VideoCommon::ImageViewType::e2D, 1); - create(VideoCommon::ImageViewType::e2DArray, std::nullopt); - render_target = Handle(VideoCommon::ImageViewType::e2DArray); + create(TextureType::Color2D, 1); + create(TextureType::ColorArray2D, std::nullopt); + render_target = Handle(Shader::TextureType::ColorArray2D); break; case VideoCommon::ImageViewType::e3D: - create(VideoCommon::ImageViewType::e3D, std::nullopt); - render_target = Handle(VideoCommon::ImageViewType::e3D); + create(TextureType::Color3D, std::nullopt); + render_target = Handle(Shader::TextureType::Color3D); break; case VideoCommon::ImageViewType::Cube: case VideoCommon::ImageViewType::CubeArray: - create(VideoCommon::ImageViewType::Cube, 6); - create(VideoCommon::ImageViewType::CubeArray, std::nullopt); + create(TextureType::ColorCube, 6); + create(TextureType::ColorArrayCube, std::nullopt); break; case VideoCommon::ImageViewType::Rect: UNIMPLEMENTED(); break; case VideoCommon::ImageViewType::Buffer: - buffer_view = device->GetLogical().CreateBufferView(VkBufferViewCreateInfo{ - .sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .buffer = image.Buffer(), - .format = format_info.format, - .offset = 0, // TODO: Redesign buffer cache to support this - .range = image.guest_size_bytes, - }); + UNREACHABLE(); break; } } +ImageView::ImageView(TextureCacheRuntime&, const VideoCommon::ImageInfo& info, + const VideoCommon::ImageViewInfo& view_info, GPUVAddr gpu_addr_) + : VideoCommon::ImageViewBase{info, view_info}, gpu_addr{gpu_addr_}, + buffer_size{VideoCommon::CalculateGuestSizeInBytes(info)} {} + ImageView::ImageView(TextureCacheRuntime&, const VideoCommon::NullImageParams& params) : VideoCommon::ImageViewBase{params} {} @@ -1063,7 +1038,8 @@ VkImageView ImageView::DepthView() { if (depth_view) { return *depth_view; } - depth_view = MakeDepthStencilView(VK_IMAGE_ASPECT_DEPTH_BIT); + const auto& info = MaxwellToVK::SurfaceFormat(*device, FormatType::Optimal, true, format); + depth_view = MakeView(info.format, VK_IMAGE_ASPECT_DEPTH_BIT); return *depth_view; } @@ -1071,18 +1047,38 @@ VkImageView ImageView::StencilView() { if (stencil_view) { return *stencil_view; } - stencil_view = MakeDepthStencilView(VK_IMAGE_ASPECT_STENCIL_BIT); + const auto& info = MaxwellToVK::SurfaceFormat(*device, FormatType::Optimal, true, format); + stencil_view = MakeView(info.format, VK_IMAGE_ASPECT_STENCIL_BIT); return *stencil_view; } -vk::ImageView ImageView::MakeDepthStencilView(VkImageAspectFlags aspect_mask) { +VkImageView ImageView::StorageView(Shader::TextureType texture_type, + Shader::ImageFormat image_format) { + if (image_format == Shader::ImageFormat::Typeless) { + return Handle(texture_type); + } + const bool is_signed{image_format == Shader::ImageFormat::R8_SINT || + image_format == Shader::ImageFormat::R16_SINT}; + if (!storage_views) { + storage_views = std::make_unique(); + } + auto& views{is_signed ? storage_views->signeds : storage_views->unsigneds}; + auto& view{views[static_cast(texture_type)]}; + if (view) { + return *view; + } + view = MakeView(Format(image_format), VK_IMAGE_ASPECT_COLOR_BIT); + return *view; +} + +vk::ImageView ImageView::MakeView(VkFormat vk_format, VkImageAspectFlags aspect_mask) { return device->GetLogical().CreateImageView({ .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, .pNext = nullptr, .flags = 0, .image = image_handle, .viewType = ImageViewType(type), - .format = MaxwellToVK::SurfaceFormat(*device, FormatType::Optimal, true, format).format, + .format = vk_format, .components{ .r = VK_COMPONENT_SWIZZLE_IDENTITY, .g = VK_COMPONENT_SWIZZLE_IDENTITY, @@ -1146,7 +1142,6 @@ Sampler::Sampler(TextureCacheRuntime& runtime, const Tegra::Texture::TSCEntry& t Framebuffer::Framebuffer(TextureCacheRuntime& runtime, std::span color_buffers, ImageView* depth_buffer, const VideoCommon::RenderTargets& key) { - std::vector descriptions; std::vector attachments; RenderPassKey renderpass_key{}; s32 num_layers = 1; @@ -1157,7 +1152,6 @@ Framebuffer::Framebuffer(TextureCacheRuntime& runtime, std::spanRenderTarget()); renderpass_key.color_formats[index] = color_buffer->format; num_layers = std::max(num_layers, color_buffer->range.extent.layers); @@ -1167,10 +1161,7 @@ Framebuffer::Framebuffer(TextureCacheRuntime& runtime, std::spanRenderTarget()); renderpass_key.depth_format = depth_buffer->format; num_layers = std::max(num_layers, depth_buffer->range.extent.layers); @@ -1183,40 +1174,14 @@ Framebuffer::Framebuffer(TextureCacheRuntime& runtime, std::span(num_colors), - .pColorAttachments = num_colors != 0 ? ATTACHMENT_REFERENCES.data() : nullptr, - .pResolveAttachments = nullptr, - .pDepthStencilAttachment = depth_attachment, - .preserveAttachmentCount = 0, - .pPreserveAttachments = nullptr, - }; - cache_pair->second = device.CreateRenderPass(VkRenderPassCreateInfo{ - .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, - .pNext = nullptr, - .flags = 0, - .attachmentCount = static_cast(descriptions.size()), - .pAttachments = descriptions.data(), - .subpassCount = 1, - .pSubpasses = &subpass, - .dependencyCount = 0, - .pDependencies = nullptr, - }); - } - renderpass = *cache_pair->second; + renderpass = runtime.render_pass_cache.Get(renderpass_key); + render_area = VkExtent2D{ .width = key.size.width, .height = key.size.height, }; num_color_buffers = static_cast(num_colors); - framebuffer = device.CreateFramebuffer(VkFramebufferCreateInfo{ + framebuffer = runtime.device.GetLogical().CreateFramebuffer({ .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, .pNext = nullptr, .flags = 0, diff --git a/src/video_core/renderer_vulkan/vk_texture_cache.h b/src/video_core/renderer_vulkan/vk_texture_cache.h index 172bcdf98..0b73d55f8 100755 --- a/src/video_core/renderer_vulkan/vk_texture_cache.h +++ b/src/video_core/renderer_vulkan/vk_texture_cache.h @@ -7,6 +7,7 @@ #include #include +#include "shader_recompiler/shader_info.h" #include "video_core/renderer_vulkan/vk_staging_buffer_pool.h" #include "video_core/texture_cache/texture_cache.h" #include "video_core/vulkan_common/vulkan_memory_allocator.h" @@ -26,35 +27,10 @@ class Device; class Image; class ImageView; class Framebuffer; +class RenderPassCache; class StagingBufferPool; class VKScheduler; -struct RenderPassKey { - constexpr auto operator<=>(const RenderPassKey&) const noexcept = default; - - std::array color_formats; - PixelFormat depth_format; - VkSampleCountFlagBits samples; -}; - -} // namespace Vulkan - -namespace std { -template <> -struct hash { - [[nodiscard]] constexpr size_t operator()(const Vulkan::RenderPassKey& key) const noexcept { - size_t value = static_cast(key.depth_format) << 48; - value ^= static_cast(key.samples) << 52; - for (size_t i = 0; i < key.color_formats.size(); ++i) { - value ^= static_cast(key.color_formats[i]) << (i * 6); - } - return value; - } -}; -} // namespace std - -namespace Vulkan { - struct TextureCacheRuntime { const Device& device; VKScheduler& scheduler; @@ -62,13 +38,13 @@ struct TextureCacheRuntime { StagingBufferPool& staging_buffer_pool; BlitImageHelper& blit_image_helper; ASTCDecoderPass& astc_decoder_pass; - std::unordered_map renderpass_cache{}; + RenderPassCache& render_pass_cache; void Finish(); - [[nodiscard]] StagingBufferRef UploadStagingBuffer(size_t size); + StagingBufferRef UploadStagingBuffer(size_t size); - [[nodiscard]] StagingBufferRef DownloadStagingBuffer(size_t size); + StagingBufferRef DownloadStagingBuffer(size_t size); void BlitImage(Framebuffer* dst_framebuffer, ImageView& dst, ImageView& src, const Region2D& dst_region, const Region2D& src_region, @@ -79,7 +55,7 @@ struct TextureCacheRuntime { void ConvertImage(Framebuffer* dst, ImageView& dst_view, ImageView& src_view); - [[nodiscard]] bool CanAccelerateImageUpload(Image&) const noexcept { + bool CanAccelerateImageUpload(Image&) const noexcept { return false; } @@ -117,8 +93,6 @@ public: void UploadMemory(const StagingBufferRef& map, std::span copies); - void UploadMemory(const StagingBufferRef& map, std::span copies); - void DownloadMemory(const StagingBufferRef& map, std::span copies); @@ -126,10 +100,6 @@ public: return *image; } - [[nodiscard]] VkBuffer Buffer() const noexcept { - return *buffer; - } - [[nodiscard]] VkImageAspectFlags AspectMask() const noexcept { return aspect_mask; } @@ -146,7 +116,6 @@ public: private: VKScheduler* scheduler; vk::Image image; - vk::Buffer buffer; MemoryCommit commit; vk::ImageView image_view; std::vector storage_image_views; @@ -157,18 +126,19 @@ private: class ImageView : public VideoCommon::ImageViewBase { public: explicit ImageView(TextureCacheRuntime&, const VideoCommon::ImageViewInfo&, ImageId, Image&); + explicit ImageView(TextureCacheRuntime&, const VideoCommon::ImageInfo&, + const VideoCommon::ImageViewInfo&, GPUVAddr); explicit ImageView(TextureCacheRuntime&, const VideoCommon::NullImageParams&); [[nodiscard]] VkImageView DepthView(); [[nodiscard]] VkImageView StencilView(); - [[nodiscard]] VkImageView Handle(VideoCommon::ImageViewType query_type) const noexcept { - return *image_views[static_cast(query_type)]; - } + [[nodiscard]] VkImageView StorageView(Shader::TextureType texture_type, + Shader::ImageFormat image_format); - [[nodiscard]] VkBufferView BufferView() const noexcept { - return *buffer_view; + [[nodiscard]] VkImageView Handle(Shader::TextureType texture_type) const noexcept { + return *image_views[static_cast(texture_type)]; } [[nodiscard]] VkImage ImageHandle() const noexcept { @@ -179,26 +149,36 @@ public: return render_target; } - [[nodiscard]] PixelFormat ImageFormat() const noexcept { - return image_format; - } - [[nodiscard]] VkSampleCountFlagBits Samples() const noexcept { return samples; } + [[nodiscard]] GPUVAddr GpuAddr() const noexcept { + return gpu_addr; + } + + [[nodiscard]] u32 BufferSize() const noexcept { + return buffer_size; + } + private: - [[nodiscard]] vk::ImageView MakeDepthStencilView(VkImageAspectFlags aspect_mask); + struct StorageViews { + std::array signeds; + std::array unsigneds; + }; + + [[nodiscard]] vk::ImageView MakeView(VkFormat vk_format, VkImageAspectFlags aspect_mask); const Device* device = nullptr; - std::array image_views; + std::array image_views; + std::unique_ptr storage_views; vk::ImageView depth_view; vk::ImageView stencil_view; - vk::BufferView buffer_view; VkImage image_handle = VK_NULL_HANDLE; VkImageView render_target = VK_NULL_HANDLE; - PixelFormat image_format = PixelFormat::Invalid; VkSampleCountFlagBits samples = VK_SAMPLE_COUNT_1_BIT; + GPUVAddr gpu_addr = 0; + u32 buffer_size = 0; }; class ImageAlloc : public VideoCommon::ImageAllocBase {}; diff --git a/src/video_core/renderer_vulkan/vk_update_descriptor.cpp b/src/video_core/renderer_vulkan/vk_update_descriptor.cpp index dc45fdcb1..0df3a7fe9 100755 --- a/src/video_core/renderer_vulkan/vk_update_descriptor.cpp +++ b/src/video_core/renderer_vulkan/vk_update_descriptor.cpp @@ -15,7 +15,9 @@ namespace Vulkan { VKUpdateDescriptorQueue::VKUpdateDescriptorQueue(const Device& device_, VKScheduler& scheduler_) - : device{device_}, scheduler{scheduler_} {} + : device{device_}, scheduler{scheduler_} { + payload_cursor = payload.data(); +} VKUpdateDescriptorQueue::~VKUpdateDescriptorQueue() = default; @@ -36,13 +38,4 @@ void VKUpdateDescriptorQueue::Acquire() { upload_start = payload_cursor; } -void VKUpdateDescriptorQueue::Send(VkDescriptorUpdateTemplateKHR update_template, - VkDescriptorSet set) { - const void* const data = upload_start; - const vk::Device* const logical = &device.GetLogical(); - scheduler.Record([data, logical, set, update_template](vk::CommandBuffer) { - logical->UpdateDescriptorSet(set, update_template, data); - }); -} - } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_update_descriptor.h b/src/video_core/renderer_vulkan/vk_update_descriptor.h index d35e77c44..d7de4c490 100755 --- a/src/video_core/renderer_vulkan/vk_update_descriptor.h +++ b/src/video_core/renderer_vulkan/vk_update_descriptor.h @@ -39,7 +39,9 @@ public: void Acquire(); - void Send(VkDescriptorUpdateTemplateKHR update_template, VkDescriptorSet set); + const DescriptorUpdateEntry* UpdateData() const noexcept { + return upload_start; + } void AddSampledImage(VkImageView image_view, VkSampler sampler) { *(payload_cursor++) = VkDescriptorImageInfo{ diff --git a/src/video_core/shader_cache.cpp b/src/video_core/shader_cache.cpp new file mode 100755 index 000000000..78bf90c48 --- /dev/null +++ b/src/video_core/shader_cache.cpp @@ -0,0 +1,250 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include + +#include "common/assert.h" +#include "shader_recompiler/frontend/maxwell/control_flow.h" +#include "shader_recompiler/object_pool.h" +#include "video_core/dirty_flags.h" +#include "video_core/engines/kepler_compute.h" +#include "video_core/engines/maxwell_3d.h" +#include "video_core/memory_manager.h" +#include "video_core/shader_cache.h" +#include "video_core/shader_environment.h" + +namespace VideoCommon { + +void ShaderCache::InvalidateRegion(VAddr addr, size_t size) { + std::scoped_lock lock{invalidation_mutex}; + InvalidatePagesInRegion(addr, size); + RemovePendingShaders(); +} + +void ShaderCache::OnCPUWrite(VAddr addr, size_t size) { + std::lock_guard lock{invalidation_mutex}; + InvalidatePagesInRegion(addr, size); +} + +void ShaderCache::SyncGuestHost() { + std::scoped_lock lock{invalidation_mutex}; + RemovePendingShaders(); +} + +ShaderCache::ShaderCache(VideoCore::RasterizerInterface& rasterizer_, + Tegra::MemoryManager& gpu_memory_, Tegra::Engines::Maxwell3D& maxwell3d_, + Tegra::Engines::KeplerCompute& kepler_compute_) + : gpu_memory{gpu_memory_}, maxwell3d{maxwell3d_}, kepler_compute{kepler_compute_}, + rasterizer{rasterizer_} {} + +bool ShaderCache::RefreshStages(std::array& unique_hashes) { + auto& dirty{maxwell3d.dirty.flags}; + if (!dirty[VideoCommon::Dirty::Shaders]) { + return last_shaders_valid; + } + dirty[VideoCommon::Dirty::Shaders] = false; + + const GPUVAddr base_addr{maxwell3d.regs.code_address.CodeAddress()}; + for (size_t index = 0; index < Tegra::Engines::Maxwell3D::Regs::MaxShaderProgram; ++index) { + if (!maxwell3d.regs.IsShaderConfigEnabled(index)) { + unique_hashes[index] = 0; + continue; + } + const auto& shader_config{maxwell3d.regs.shader_config[index]}; + const auto program{static_cast(index)}; + const GPUVAddr shader_addr{base_addr + shader_config.offset}; + const std::optional cpu_shader_addr{gpu_memory.GpuToCpuAddress(shader_addr)}; + if (!cpu_shader_addr) { + LOG_ERROR(HW_GPU, "Invalid GPU address for shader 0x{:016x}", shader_addr); + last_shaders_valid = false; + return false; + } + const ShaderInfo* shader_info{TryGet(*cpu_shader_addr)}; + if (!shader_info) { + const u32 start_address{shader_config.offset}; + GraphicsEnvironment env{maxwell3d, gpu_memory, program, base_addr, start_address}; + shader_info = MakeShaderInfo(env, *cpu_shader_addr); + } + shader_infos[index] = shader_info; + unique_hashes[index] = shader_info->unique_hash; + } + last_shaders_valid = true; + return true; +} + +const ShaderInfo* ShaderCache::ComputeShader() { + const GPUVAddr program_base{kepler_compute.regs.code_loc.Address()}; + const auto& qmd{kepler_compute.launch_description}; + const GPUVAddr shader_addr{program_base + qmd.program_start}; + const std::optional cpu_shader_addr{gpu_memory.GpuToCpuAddress(shader_addr)}; + if (!cpu_shader_addr) { + LOG_ERROR(HW_GPU, "Invalid GPU address for shader 0x{:016x}", shader_addr); + return nullptr; + } + if (const ShaderInfo* const shader = TryGet(*cpu_shader_addr)) { + return shader; + } + ComputeEnvironment env{kepler_compute, gpu_memory, program_base, qmd.program_start}; + return MakeShaderInfo(env, *cpu_shader_addr); +} + +void ShaderCache::GetGraphicsEnvironments(GraphicsEnvironments& result, + const std::array& unique_hashes) { + size_t env_index{}; + const GPUVAddr base_addr{maxwell3d.regs.code_address.CodeAddress()}; + for (size_t index = 0; index < NUM_PROGRAMS; ++index) { + if (unique_hashes[index] == 0) { + continue; + } + const auto program{static_cast(index)}; + auto& env{result.envs[index]}; + const u32 start_address{maxwell3d.regs.shader_config[index].offset}; + env = GraphicsEnvironment{maxwell3d, gpu_memory, program, base_addr, start_address}; + env.SetCachedSize(shader_infos[index]->size_bytes); + result.env_ptrs[env_index++] = &env; + } +} + +ShaderInfo* ShaderCache::TryGet(VAddr addr) const { + std::scoped_lock lock{lookup_mutex}; + + const auto it = lookup_cache.find(addr); + if (it == lookup_cache.end()) { + return nullptr; + } + return it->second->data; +} + +void ShaderCache::Register(std::unique_ptr data, VAddr addr, size_t size) { + std::scoped_lock lock{invalidation_mutex, lookup_mutex}; + + const VAddr addr_end = addr + size; + Entry* const entry = NewEntry(addr, addr_end, data.get()); + + const u64 page_end = (addr_end + PAGE_SIZE - 1) >> PAGE_BITS; + for (u64 page = addr >> PAGE_BITS; page < page_end; ++page) { + invalidation_cache[page].push_back(entry); + } + + storage.push_back(std::move(data)); + + rasterizer.UpdatePagesCachedCount(addr, size, 1); +} + +void ShaderCache::InvalidatePagesInRegion(VAddr addr, size_t size) { + const VAddr addr_end = addr + size; + const u64 page_end = (addr_end + PAGE_SIZE - 1) >> PAGE_BITS; + for (u64 page = addr >> PAGE_BITS; page < page_end; ++page) { + auto it = invalidation_cache.find(page); + if (it == invalidation_cache.end()) { + continue; + } + InvalidatePageEntries(it->second, addr, addr_end); + } +} + +void ShaderCache::RemovePendingShaders() { + if (marked_for_removal.empty()) { + return; + } + // Remove duplicates + std::ranges::sort(marked_for_removal); + marked_for_removal.erase(std::unique(marked_for_removal.begin(), marked_for_removal.end()), + marked_for_removal.end()); + + std::vector removed_shaders; + removed_shaders.reserve(marked_for_removal.size()); + + std::scoped_lock lock{lookup_mutex}; + + for (Entry* const entry : marked_for_removal) { + removed_shaders.push_back(entry->data); + + const auto it = lookup_cache.find(entry->addr_start); + ASSERT(it != lookup_cache.end()); + lookup_cache.erase(it); + } + marked_for_removal.clear(); + + if (!removed_shaders.empty()) { + RemoveShadersFromStorage(std::move(removed_shaders)); + } +} + +void ShaderCache::InvalidatePageEntries(std::vector& entries, VAddr addr, VAddr addr_end) { + size_t index = 0; + while (index < entries.size()) { + Entry* const entry = entries[index]; + if (!entry->Overlaps(addr, addr_end)) { + ++index; + continue; + } + + UnmarkMemory(entry); + RemoveEntryFromInvalidationCache(entry); + marked_for_removal.push_back(entry); + } +} + +void ShaderCache::RemoveEntryFromInvalidationCache(const Entry* entry) { + const u64 page_end = (entry->addr_end + PAGE_SIZE - 1) >> PAGE_BITS; + for (u64 page = entry->addr_start >> PAGE_BITS; page < page_end; ++page) { + const auto entries_it = invalidation_cache.find(page); + ASSERT(entries_it != invalidation_cache.end()); + std::vector& entries = entries_it->second; + + const auto entry_it = std::ranges::find(entries, entry); + ASSERT(entry_it != entries.end()); + entries.erase(entry_it); + } +} + +void ShaderCache::UnmarkMemory(Entry* entry) { + if (!entry->is_memory_marked) { + return; + } + entry->is_memory_marked = false; + + const VAddr addr = entry->addr_start; + const size_t size = entry->addr_end - addr; + rasterizer.UpdatePagesCachedCount(addr, size, -1); +} + +void ShaderCache::RemoveShadersFromStorage(std::vector removed_shaders) { + // Remove them from the cache + std::erase_if(storage, [&removed_shaders](const std::unique_ptr& shader) { + return std::ranges::find(removed_shaders, shader.get()) != removed_shaders.end(); + }); +} + +ShaderCache::Entry* ShaderCache::NewEntry(VAddr addr, VAddr addr_end, ShaderInfo* data) { + auto entry = std::make_unique(Entry{addr, addr_end, data}); + Entry* const entry_pointer = entry.get(); + + lookup_cache.emplace(addr, std::move(entry)); + return entry_pointer; +} + +const ShaderInfo* ShaderCache::MakeShaderInfo(GenericEnvironment& env, VAddr cpu_addr) { + auto info = std::make_unique(); + if (const std::optional cached_hash{env.Analyze()}) { + info->unique_hash = *cached_hash; + info->size_bytes = env.CachedSize(); + } else { + // Slow path, not really hit on commercial games + // Build a control flow graph to get the real shader size + Shader::ObjectPool flow_block; + Shader::Maxwell::Flow::CFG cfg{env, flow_block, env.StartAddress()}; + info->unique_hash = env.CalculateHash(); + info->size_bytes = env.ReadSize(); + } + const size_t size_bytes{info->size_bytes}; + const ShaderInfo* const result{info.get()}; + Register(std::move(info), cpu_addr, size_bytes); + return result; +} + +} // namespace VideoCommon diff --git a/src/video_core/shader_cache.h b/src/video_core/shader_cache.h index 015a789d6..136fe294c 100755 --- a/src/video_core/shader_cache.h +++ b/src/video_core/shader_cache.h @@ -5,226 +5,147 @@ #pragma once #include +#include #include #include +#include #include #include #include -#include "common/assert.h" #include "common/common_types.h" #include "video_core/rasterizer_interface.h" +#include "video_core/shader_environment.h" + +namespace Tegra { +class MemoryManager; +} namespace VideoCommon { -template +class GenericEnvironment; + +struct ShaderInfo { + u64 unique_hash{}; + size_t size_bytes{}; +}; + class ShaderCache { static constexpr u64 PAGE_BITS = 14; static constexpr u64 PAGE_SIZE = u64(1) << PAGE_BITS; + static constexpr size_t NUM_PROGRAMS = 6; + struct Entry { VAddr addr_start; VAddr addr_end; - T* data; + ShaderInfo* data; bool is_memory_marked = true; - constexpr bool Overlaps(VAddr start, VAddr end) const noexcept { + bool Overlaps(VAddr start, VAddr end) const noexcept { return start < addr_end && addr_start < end; } }; public: - virtual ~ShaderCache() = default; - /// @brief Removes shaders inside a given region /// @note Checks for ranges /// @param addr Start address of the invalidation /// @param size Number of bytes of the invalidation - void InvalidateRegion(VAddr addr, std::size_t size) { - std::scoped_lock lock{invalidation_mutex}; - InvalidatePagesInRegion(addr, size); - RemovePendingShaders(); - } + void InvalidateRegion(VAddr addr, size_t size); /// @brief Unmarks a memory region as cached and marks it for removal /// @param addr Start address of the CPU write operation /// @param size Number of bytes of the CPU write operation - void OnCPUWrite(VAddr addr, std::size_t size) { - std::lock_guard lock{invalidation_mutex}; - InvalidatePagesInRegion(addr, size); - } + void OnCPUWrite(VAddr addr, size_t size); /// @brief Flushes delayed removal operations - void SyncGuestHost() { - std::scoped_lock lock{invalidation_mutex}; - RemovePendingShaders(); - } + void SyncGuestHost(); +protected: + struct GraphicsEnvironments { + std::array envs; + std::array env_ptrs; + + std::span Span() const noexcept { + return std::span(env_ptrs.begin(), std::ranges::find(env_ptrs, nullptr)); + } + }; + + explicit ShaderCache(VideoCore::RasterizerInterface& rasterizer_, + Tegra::MemoryManager& gpu_memory_, Tegra::Engines::Maxwell3D& maxwell3d_, + Tegra::Engines::KeplerCompute& kepler_compute_); + + /// @brief Update the hashes and information of shader stages + /// @param unique_hashes Shader hashes to store into when a stage is enabled + /// @return True no success, false on error + bool RefreshStages(std::array& unique_hashes); + + /// @brief Returns information about the current compute shader + /// @return Pointer to a valid shader, nullptr on error + const ShaderInfo* ComputeShader(); + + /// @brief Collect the current graphics environments + void GetGraphicsEnvironments(GraphicsEnvironments& result, + const std::array& unique_hashes); + + Tegra::MemoryManager& gpu_memory; + Tegra::Engines::Maxwell3D& maxwell3d; + Tegra::Engines::KeplerCompute& kepler_compute; + + std::array shader_infos{}; + bool last_shaders_valid = false; + +private: /// @brief Tries to obtain a cached shader starting in a given address /// @note Doesn't check for ranges, the given address has to be the start of the shader /// @param addr Start address of the shader, this doesn't cache for region /// @return Pointer to a valid shader, nullptr when nothing is found - T* TryGet(VAddr addr) const { - std::scoped_lock lock{lookup_mutex}; - - const auto it = lookup_cache.find(addr); - if (it == lookup_cache.end()) { - return nullptr; - } - return it->second->data; - } - -protected: - explicit ShaderCache(VideoCore::RasterizerInterface& rasterizer_) : rasterizer{rasterizer_} {} + ShaderInfo* TryGet(VAddr addr) const; /// @brief Register in the cache a given entry /// @param data Shader to store in the cache /// @param addr Start address of the shader that will be registered /// @param size Size in bytes of the shader - void Register(std::unique_ptr data, VAddr addr, std::size_t size) { - std::scoped_lock lock{invalidation_mutex, lookup_mutex}; + void Register(std::unique_ptr data, VAddr addr, size_t size); - const VAddr addr_end = addr + size; - Entry* const entry = NewEntry(addr, addr_end, data.get()); - - const u64 page_end = (addr_end + PAGE_SIZE - 1) >> PAGE_BITS; - for (u64 page = addr >> PAGE_BITS; page < page_end; ++page) { - invalidation_cache[page].push_back(entry); - } - - storage.push_back(std::move(data)); - - rasterizer.UpdatePagesCachedCount(addr, size, 1); - } - - /// @brief Called when a shader is going to be removed - /// @param shader Shader that will be removed - /// @pre invalidation_cache is locked - /// @pre lookup_mutex is locked - virtual void OnShaderRemoval([[maybe_unused]] T* shader) {} - -private: /// @brief Invalidate pages in a given region /// @pre invalidation_mutex is locked - void InvalidatePagesInRegion(VAddr addr, std::size_t size) { - const VAddr addr_end = addr + size; - const u64 page_end = (addr_end + PAGE_SIZE - 1) >> PAGE_BITS; - for (u64 page = addr >> PAGE_BITS; page < page_end; ++page) { - auto it = invalidation_cache.find(page); - if (it == invalidation_cache.end()) { - continue; - } - InvalidatePageEntries(it->second, addr, addr_end); - } - } + void InvalidatePagesInRegion(VAddr addr, size_t size); /// @brief Remove shaders marked for deletion /// @pre invalidation_mutex is locked - void RemovePendingShaders() { - if (marked_for_removal.empty()) { - return; - } - // Remove duplicates - std::sort(marked_for_removal.begin(), marked_for_removal.end()); - marked_for_removal.erase(std::unique(marked_for_removal.begin(), marked_for_removal.end()), - marked_for_removal.end()); - - std::vector removed_shaders; - removed_shaders.reserve(marked_for_removal.size()); - - std::scoped_lock lock{lookup_mutex}; - - for (Entry* const entry : marked_for_removal) { - removed_shaders.push_back(entry->data); - - const auto it = lookup_cache.find(entry->addr_start); - ASSERT(it != lookup_cache.end()); - lookup_cache.erase(it); - } - marked_for_removal.clear(); - - if (!removed_shaders.empty()) { - RemoveShadersFromStorage(std::move(removed_shaders)); - } - } + void RemovePendingShaders(); /// @brief Invalidates entries in a given range for the passed page /// @param entries Vector of entries in the page, it will be modified on overlaps /// @param addr Start address of the invalidation /// @param addr_end Non-inclusive end address of the invalidation /// @pre invalidation_mutex is locked - void InvalidatePageEntries(std::vector& entries, VAddr addr, VAddr addr_end) { - std::size_t index = 0; - while (index < entries.size()) { - Entry* const entry = entries[index]; - if (!entry->Overlaps(addr, addr_end)) { - ++index; - continue; - } - - UnmarkMemory(entry); - RemoveEntryFromInvalidationCache(entry); - marked_for_removal.push_back(entry); - } - } + void InvalidatePageEntries(std::vector& entries, VAddr addr, VAddr addr_end); /// @brief Removes all references to an entry in the invalidation cache /// @param entry Entry to remove from the invalidation cache /// @pre invalidation_mutex is locked - void RemoveEntryFromInvalidationCache(const Entry* entry) { - const u64 page_end = (entry->addr_end + PAGE_SIZE - 1) >> PAGE_BITS; - for (u64 page = entry->addr_start >> PAGE_BITS; page < page_end; ++page) { - const auto entries_it = invalidation_cache.find(page); - ASSERT(entries_it != invalidation_cache.end()); - std::vector& entries = entries_it->second; - - const auto entry_it = std::find(entries.begin(), entries.end(), entry); - ASSERT(entry_it != entries.end()); - entries.erase(entry_it); - } - } + void RemoveEntryFromInvalidationCache(const Entry* entry); /// @brief Unmarks an entry from the rasterizer cache /// @param entry Entry to unmark from memory - void UnmarkMemory(Entry* entry) { - if (!entry->is_memory_marked) { - return; - } - entry->is_memory_marked = false; - - const VAddr addr = entry->addr_start; - const std::size_t size = entry->addr_end - addr; - rasterizer.UpdatePagesCachedCount(addr, size, -1); - } + void UnmarkMemory(Entry* entry); /// @brief Removes a vector of shaders from a list /// @param removed_shaders Shaders to be removed from the storage /// @pre invalidation_mutex is locked /// @pre lookup_mutex is locked - void RemoveShadersFromStorage(std::vector removed_shaders) { - // Notify removals - for (T* const shader : removed_shaders) { - OnShaderRemoval(shader); - } - - // Remove them from the cache - const auto is_removed = [&removed_shaders](const std::unique_ptr& shader) { - return std::find(removed_shaders.begin(), removed_shaders.end(), shader.get()) != - removed_shaders.end(); - }; - std::erase_if(storage, is_removed); - } + void RemoveShadersFromStorage(std::vector removed_shaders); /// @brief Creates a new entry in the lookup cache and returns its pointer /// @pre lookup_mutex is locked - Entry* NewEntry(VAddr addr, VAddr addr_end, T* data) { - auto entry = std::make_unique(Entry{addr, addr_end, data}); - Entry* const entry_pointer = entry.get(); + Entry* NewEntry(VAddr addr, VAddr addr_end, ShaderInfo* data); - lookup_cache.emplace(addr, std::move(entry)); - return entry_pointer; - } + /// @brief Create a new shader entry and register it + const ShaderInfo* MakeShaderInfo(GenericEnvironment& env, VAddr cpu_addr); VideoCore::RasterizerInterface& rasterizer; @@ -233,7 +154,7 @@ private: std::unordered_map> lookup_cache; std::unordered_map> invalidation_cache; - std::vector> storage; + std::vector> storage; std::vector marked_for_removal; }; diff --git a/src/video_core/shader_environment.cpp b/src/video_core/shader_environment.cpp new file mode 100755 index 000000000..429cab30d --- /dev/null +++ b/src/video_core/shader_environment.cpp @@ -0,0 +1,461 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include + +#include "common/assert.h" +#include "common/cityhash.h" +#include "common/common_types.h" +#include "common/div_ceil.h" +#include "common/fs/fs.h" +#include "common/logging/log.h" +#include "shader_recompiler/environment.h" +#include "video_core/memory_manager.h" +#include "video_core/shader_environment.h" +#include "video_core/textures/texture.h" + +namespace VideoCommon { + +constexpr std::array MAGIC_NUMBER{'y', 'u', 'z', 'u', 'c', 'a', 'c', 'h'}; +constexpr u32 CACHE_VERSION = 5; + +constexpr size_t INST_SIZE = sizeof(u64); + +using Maxwell = Tegra::Engines::Maxwell3D::Regs; + +static u64 MakeCbufKey(u32 index, u32 offset) { + return (static_cast(index) << 32) | offset; +} + +static Shader::TextureType ConvertType(const Tegra::Texture::TICEntry& entry) { + switch (entry.texture_type) { + case Tegra::Texture::TextureType::Texture1D: + return Shader::TextureType::Color1D; + case Tegra::Texture::TextureType::Texture2D: + case Tegra::Texture::TextureType::Texture2DNoMipmap: + return Shader::TextureType::Color2D; + case Tegra::Texture::TextureType::Texture3D: + return Shader::TextureType::Color3D; + case Tegra::Texture::TextureType::TextureCubemap: + return Shader::TextureType::ColorCube; + case Tegra::Texture::TextureType::Texture1DArray: + return Shader::TextureType::ColorArray1D; + case Tegra::Texture::TextureType::Texture2DArray: + return Shader::TextureType::ColorArray2D; + case Tegra::Texture::TextureType::Texture1DBuffer: + return Shader::TextureType::Buffer; + case Tegra::Texture::TextureType::TextureCubeArray: + return Shader::TextureType::ColorArrayCube; + default: + throw Shader::NotImplementedException("Unknown texture type"); + } +} + +GenericEnvironment::GenericEnvironment(Tegra::MemoryManager& gpu_memory_, GPUVAddr program_base_, + u32 start_address_) + : gpu_memory{&gpu_memory_}, program_base{program_base_} { + start_address = start_address_; +} + +GenericEnvironment::~GenericEnvironment() = default; + +u32 GenericEnvironment::TextureBoundBuffer() const { + return texture_bound; +} + +u32 GenericEnvironment::LocalMemorySize() const { + return local_memory_size; +} + +u32 GenericEnvironment::SharedMemorySize() const { + return shared_memory_size; +} + +std::array GenericEnvironment::WorkgroupSize() const { + return workgroup_size; +} + +u64 GenericEnvironment::ReadInstruction(u32 address) { + read_lowest = std::min(read_lowest, address); + read_highest = std::max(read_highest, address); + + if (address >= cached_lowest && address < cached_highest) { + return code[(address - cached_lowest) / INST_SIZE]; + } + has_unbound_instructions = true; + return gpu_memory->Read(program_base + address); +} + +std::optional GenericEnvironment::Analyze() { + const std::optional size{TryFindSize()}; + if (!size) { + return std::nullopt; + } + cached_lowest = start_address; + cached_highest = start_address + static_cast(*size); + return Common::CityHash64(reinterpret_cast(code.data()), *size); +} + +void GenericEnvironment::SetCachedSize(size_t size_bytes) { + cached_lowest = start_address; + cached_highest = start_address + static_cast(size_bytes); + code.resize(CachedSize()); + gpu_memory->ReadBlock(program_base + cached_lowest, code.data(), code.size() * sizeof(u64)); +} + +size_t GenericEnvironment::CachedSize() const noexcept { + return cached_highest - cached_lowest + INST_SIZE; +} + +size_t GenericEnvironment::ReadSize() const noexcept { + return read_highest - read_lowest + INST_SIZE; +} + +bool GenericEnvironment::CanBeSerialized() const noexcept { + return !has_unbound_instructions; +} + +u64 GenericEnvironment::CalculateHash() const { + const size_t size{ReadSize()}; + const auto data{std::make_unique(size)}; + gpu_memory->ReadBlock(program_base + read_lowest, data.get(), size); + return Common::CityHash64(data.get(), size); +} + +void GenericEnvironment::Serialize(std::ofstream& file) const { + const u64 code_size{static_cast(CachedSize())}; + const u64 num_texture_types{static_cast(texture_types.size())}; + const u64 num_cbuf_values{static_cast(cbuf_values.size())}; + + file.write(reinterpret_cast(&code_size), sizeof(code_size)) + .write(reinterpret_cast(&num_texture_types), sizeof(num_texture_types)) + .write(reinterpret_cast(&num_cbuf_values), sizeof(num_cbuf_values)) + .write(reinterpret_cast(&local_memory_size), sizeof(local_memory_size)) + .write(reinterpret_cast(&texture_bound), sizeof(texture_bound)) + .write(reinterpret_cast(&start_address), sizeof(start_address)) + .write(reinterpret_cast(&cached_lowest), sizeof(cached_lowest)) + .write(reinterpret_cast(&cached_highest), sizeof(cached_highest)) + .write(reinterpret_cast(&stage), sizeof(stage)) + .write(reinterpret_cast(code.data()), code_size); + for (const auto [key, type] : texture_types) { + file.write(reinterpret_cast(&key), sizeof(key)) + .write(reinterpret_cast(&type), sizeof(type)); + } + for (const auto [key, type] : cbuf_values) { + file.write(reinterpret_cast(&key), sizeof(key)) + .write(reinterpret_cast(&type), sizeof(type)); + } + if (stage == Shader::Stage::Compute) { + file.write(reinterpret_cast(&workgroup_size), sizeof(workgroup_size)) + .write(reinterpret_cast(&shared_memory_size), sizeof(shared_memory_size)); + } else { + file.write(reinterpret_cast(&sph), sizeof(sph)); + if (stage == Shader::Stage::Geometry) { + file.write(reinterpret_cast(&gp_passthrough_mask), + sizeof(gp_passthrough_mask)); + } + } +} + +std::optional GenericEnvironment::TryFindSize() { + static constexpr size_t BLOCK_SIZE = 0x1000; + static constexpr size_t MAXIMUM_SIZE = 0x100000; + + static constexpr u64 SELF_BRANCH_A = 0xE2400FFFFF87000FULL; + static constexpr u64 SELF_BRANCH_B = 0xE2400FFFFF07000FULL; + + GPUVAddr guest_addr{program_base + start_address}; + size_t offset{0}; + size_t size{BLOCK_SIZE}; + while (size <= MAXIMUM_SIZE) { + code.resize(size / INST_SIZE); + u64* const data = code.data() + offset / INST_SIZE; + gpu_memory->ReadBlock(guest_addr, data, BLOCK_SIZE); + for (size_t index = 0; index < BLOCK_SIZE; index += INST_SIZE) { + const u64 inst = data[index / INST_SIZE]; + if (inst == SELF_BRANCH_A || inst == SELF_BRANCH_B) { + return offset + index; + } + } + guest_addr += BLOCK_SIZE; + size += BLOCK_SIZE; + offset += BLOCK_SIZE; + } + return std::nullopt; +} + +Shader::TextureType GenericEnvironment::ReadTextureTypeImpl(GPUVAddr tic_addr, u32 tic_limit, + bool via_header_index, u32 raw) { + const auto handle{Tegra::Texture::TexturePair(raw, via_header_index)}; + const GPUVAddr descriptor_addr{tic_addr + handle.first * sizeof(Tegra::Texture::TICEntry)}; + Tegra::Texture::TICEntry entry; + gpu_memory->ReadBlock(descriptor_addr, &entry, sizeof(entry)); + const Shader::TextureType result{ConvertType(entry)}; + texture_types.emplace(raw, result); + return result; +} + +GraphicsEnvironment::GraphicsEnvironment(Tegra::Engines::Maxwell3D& maxwell3d_, + Tegra::MemoryManager& gpu_memory_, + Maxwell::ShaderProgram program, GPUVAddr program_base_, + u32 start_address_) + : GenericEnvironment{gpu_memory_, program_base_, start_address_}, maxwell3d{&maxwell3d_} { + gpu_memory->ReadBlock(program_base + start_address, &sph, sizeof(sph)); + gp_passthrough_mask = maxwell3d->regs.gp_passthrough_mask; + switch (program) { + case Maxwell::ShaderProgram::VertexA: + stage = Shader::Stage::VertexA; + stage_index = 0; + break; + case Maxwell::ShaderProgram::VertexB: + stage = Shader::Stage::VertexB; + stage_index = 0; + break; + case Maxwell::ShaderProgram::TesselationControl: + stage = Shader::Stage::TessellationControl; + stage_index = 1; + break; + case Maxwell::ShaderProgram::TesselationEval: + stage = Shader::Stage::TessellationEval; + stage_index = 2; + break; + case Maxwell::ShaderProgram::Geometry: + stage = Shader::Stage::Geometry; + stage_index = 3; + break; + case Maxwell::ShaderProgram::Fragment: + stage = Shader::Stage::Fragment; + stage_index = 4; + break; + default: + UNREACHABLE_MSG("Invalid program={}", program); + break; + } + const u64 local_size{sph.LocalMemorySize()}; + ASSERT(local_size <= std::numeric_limits::max()); + local_memory_size = static_cast(local_size) + sph.common3.shader_local_memory_crs_size; + texture_bound = maxwell3d->regs.tex_cb_index; +} + +u32 GraphicsEnvironment::ReadCbufValue(u32 cbuf_index, u32 cbuf_offset) { + const auto& cbuf{maxwell3d->state.shader_stages[stage_index].const_buffers[cbuf_index]}; + ASSERT(cbuf.enabled); + u32 value{}; + if (cbuf_offset < cbuf.size) { + value = gpu_memory->Read(cbuf.address + cbuf_offset); + } + cbuf_values.emplace(MakeCbufKey(cbuf_index, cbuf_offset), value); + return value; +} + +Shader::TextureType GraphicsEnvironment::ReadTextureType(u32 handle) { + const auto& regs{maxwell3d->regs}; + const bool via_header_index{regs.sampler_index == Maxwell::SamplerIndex::ViaHeaderIndex}; + return ReadTextureTypeImpl(regs.tic.Address(), regs.tic.limit, via_header_index, handle); +} + +ComputeEnvironment::ComputeEnvironment(Tegra::Engines::KeplerCompute& kepler_compute_, + Tegra::MemoryManager& gpu_memory_, GPUVAddr program_base_, + u32 start_address_) + : GenericEnvironment{gpu_memory_, program_base_, start_address_}, kepler_compute{ + &kepler_compute_} { + const auto& qmd{kepler_compute->launch_description}; + stage = Shader::Stage::Compute; + local_memory_size = qmd.local_pos_alloc + qmd.local_crs_alloc; + texture_bound = kepler_compute->regs.tex_cb_index; + shared_memory_size = qmd.shared_alloc; + workgroup_size = {qmd.block_dim_x, qmd.block_dim_y, qmd.block_dim_z}; +} + +u32 ComputeEnvironment::ReadCbufValue(u32 cbuf_index, u32 cbuf_offset) { + const auto& qmd{kepler_compute->launch_description}; + ASSERT(((qmd.const_buffer_enable_mask.Value() >> cbuf_index) & 1) != 0); + const auto& cbuf{qmd.const_buffer_config[cbuf_index]}; + u32 value{}; + if (cbuf_offset < cbuf.size) { + value = gpu_memory->Read(cbuf.Address() + cbuf_offset); + } + cbuf_values.emplace(MakeCbufKey(cbuf_index, cbuf_offset), value); + return value; +} + +Shader::TextureType ComputeEnvironment::ReadTextureType(u32 handle) { + const auto& regs{kepler_compute->regs}; + const auto& qmd{kepler_compute->launch_description}; + return ReadTextureTypeImpl(regs.tic.Address(), regs.tic.limit, qmd.linked_tsc != 0, handle); +} + +void FileEnvironment::Deserialize(std::ifstream& file) { + u64 code_size{}; + u64 num_texture_types{}; + u64 num_cbuf_values{}; + file.read(reinterpret_cast(&code_size), sizeof(code_size)) + .read(reinterpret_cast(&num_texture_types), sizeof(num_texture_types)) + .read(reinterpret_cast(&num_cbuf_values), sizeof(num_cbuf_values)) + .read(reinterpret_cast(&local_memory_size), sizeof(local_memory_size)) + .read(reinterpret_cast(&texture_bound), sizeof(texture_bound)) + .read(reinterpret_cast(&start_address), sizeof(start_address)) + .read(reinterpret_cast(&read_lowest), sizeof(read_lowest)) + .read(reinterpret_cast(&read_highest), sizeof(read_highest)) + .read(reinterpret_cast(&stage), sizeof(stage)); + code = std::make_unique(Common::DivCeil(code_size, sizeof(u64))); + file.read(reinterpret_cast(code.get()), code_size); + for (size_t i = 0; i < num_texture_types; ++i) { + u32 key; + Shader::TextureType type; + file.read(reinterpret_cast(&key), sizeof(key)) + .read(reinterpret_cast(&type), sizeof(type)); + texture_types.emplace(key, type); + } + for (size_t i = 0; i < num_cbuf_values; ++i) { + u64 key; + u32 value; + file.read(reinterpret_cast(&key), sizeof(key)) + .read(reinterpret_cast(&value), sizeof(value)); + cbuf_values.emplace(key, value); + } + if (stage == Shader::Stage::Compute) { + file.read(reinterpret_cast(&workgroup_size), sizeof(workgroup_size)) + .read(reinterpret_cast(&shared_memory_size), sizeof(shared_memory_size)); + } else { + file.read(reinterpret_cast(&sph), sizeof(sph)); + if (stage == Shader::Stage::Geometry) { + file.read(reinterpret_cast(&gp_passthrough_mask), sizeof(gp_passthrough_mask)); + } + } +} + +u64 FileEnvironment::ReadInstruction(u32 address) { + if (address < read_lowest || address > read_highest) { + throw Shader::LogicError("Out of bounds address {}", address); + } + return code[(address - read_lowest) / sizeof(u64)]; +} + +u32 FileEnvironment::ReadCbufValue(u32 cbuf_index, u32 cbuf_offset) { + const auto it{cbuf_values.find(MakeCbufKey(cbuf_index, cbuf_offset))}; + if (it == cbuf_values.end()) { + throw Shader::LogicError("Uncached read texture type"); + } + return it->second; +} + +Shader::TextureType FileEnvironment::ReadTextureType(u32 handle) { + const auto it{texture_types.find(handle)}; + if (it == texture_types.end()) { + throw Shader::LogicError("Uncached read texture type"); + } + return it->second; +} + +u32 FileEnvironment::LocalMemorySize() const { + return local_memory_size; +} + +u32 FileEnvironment::SharedMemorySize() const { + return shared_memory_size; +} + +u32 FileEnvironment::TextureBoundBuffer() const { + return texture_bound; +} + +std::array FileEnvironment::WorkgroupSize() const { + return workgroup_size; +} + +void SerializePipeline(std::span key, std::span envs, + const std::filesystem::path& filename) try { + std::ofstream file(filename, std::ios::binary | std::ios::ate | std::ios::app); + file.exceptions(std::ifstream::failbit); + if (!file.is_open()) { + LOG_ERROR(Common_Filesystem, "Failed to open pipeline cache file {}", + Common::FS::PathToUTF8String(filename)); + return; + } + if (file.tellp() == 0) { + // Write header + file.write(MAGIC_NUMBER.data(), MAGIC_NUMBER.size()) + .write(reinterpret_cast(&CACHE_VERSION), sizeof(CACHE_VERSION)); + } + if (!std::ranges::all_of(envs, &GenericEnvironment::CanBeSerialized)) { + return; + } + const u32 num_envs{static_cast(envs.size())}; + file.write(reinterpret_cast(&num_envs), sizeof(num_envs)); + for (const GenericEnvironment* const env : envs) { + env->Serialize(file); + } + file.write(key.data(), key.size_bytes()); + +} catch (const std::ios_base::failure& e) { + LOG_ERROR(Common_Filesystem, "{}", e.what()); + if (!Common::FS::RemoveFile(filename)) { + LOG_ERROR(Common_Filesystem, "Failed to delete pipeline cache file {}", + Common::FS::PathToUTF8String(filename)); + } +} + +void LoadPipelines( + std::stop_token stop_loading, const std::filesystem::path& filename, + Common::UniqueFunction load_compute, + Common::UniqueFunction> load_graphics) try { + std::ifstream file(filename, std::ios::binary | std::ios::ate); + if (!file.is_open()) { + return; + } + file.exceptions(std::ifstream::failbit); + const auto end{file.tellg()}; + file.seekg(0, std::ios::beg); + + std::array magic_number; + u32 cache_version; + file.read(magic_number.data(), magic_number.size()) + .read(reinterpret_cast(&cache_version), sizeof(cache_version)); + if (magic_number != MAGIC_NUMBER || cache_version != CACHE_VERSION) { + file.close(); + if (Common::FS::RemoveFile(filename)) { + if (magic_number != MAGIC_NUMBER) { + LOG_ERROR(Common_Filesystem, "Invalid pipeline cache file"); + } + if (cache_version != CACHE_VERSION) { + LOG_INFO(Common_Filesystem, "Deleting old pipeline cache"); + } + } else { + LOG_ERROR(Common_Filesystem, + "Invalid pipeline cache file and failed to delete it in \"{}\"", + Common::FS::PathToUTF8String(filename)); + } + return; + } + while (file.tellg() != end) { + if (stop_loading.stop_requested()) { + return; + } + u32 num_envs{}; + file.read(reinterpret_cast(&num_envs), sizeof(num_envs)); + std::vector envs(num_envs); + for (FileEnvironment& env : envs) { + env.Deserialize(file); + } + if (envs.front().ShaderStage() == Shader::Stage::Compute) { + load_compute(file, std::move(envs.front())); + } else { + load_graphics(file, std::move(envs)); + } + } + +} catch (const std::ios_base::failure& e) { + LOG_ERROR(Common_Filesystem, "{}", e.what()); + if (!Common::FS::RemoveFile(filename)) { + LOG_ERROR(Common_Filesystem, "Failed to delete pipeline cache file {}", + Common::FS::PathToUTF8String(filename)); + } +} + +} // namespace VideoCommon diff --git a/src/video_core/shader_environment.h b/src/video_core/shader_environment.h new file mode 100755 index 000000000..d26dbfaab --- /dev/null +++ b/src/video_core/shader_environment.h @@ -0,0 +1,182 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "common/common_types.h" +#include "common/unique_function.h" +#include "shader_recompiler/environment.h" +#include "video_core/engines/kepler_compute.h" +#include "video_core/engines/maxwell_3d.h" +#include "video_core/textures/texture.h" + +namespace Tegra { +class Memorymanager; +} + +namespace VideoCommon { + +class GenericEnvironment : public Shader::Environment { +public: + explicit GenericEnvironment() = default; + explicit GenericEnvironment(Tegra::MemoryManager& gpu_memory_, GPUVAddr program_base_, + u32 start_address_); + + ~GenericEnvironment() override; + + [[nodiscard]] u32 TextureBoundBuffer() const final; + + [[nodiscard]] u32 LocalMemorySize() const final; + + [[nodiscard]] u32 SharedMemorySize() const final; + + [[nodiscard]] std::array WorkgroupSize() const final; + + [[nodiscard]] u64 ReadInstruction(u32 address) final; + + [[nodiscard]] std::optional Analyze(); + + void SetCachedSize(size_t size_bytes); + + [[nodiscard]] size_t CachedSize() const noexcept; + + [[nodiscard]] size_t ReadSize() const noexcept; + + [[nodiscard]] bool CanBeSerialized() const noexcept; + + [[nodiscard]] u64 CalculateHash() const; + + void Serialize(std::ofstream& file) const; + +protected: + std::optional TryFindSize(); + + Shader::TextureType ReadTextureTypeImpl(GPUVAddr tic_addr, u32 tic_limit, bool via_header_index, + u32 raw); + + Tegra::MemoryManager* gpu_memory{}; + GPUVAddr program_base{}; + + std::vector code; + std::unordered_map texture_types; + std::unordered_map cbuf_values; + + u32 local_memory_size{}; + u32 texture_bound{}; + u32 shared_memory_size{}; + std::array workgroup_size{}; + + u32 read_lowest = std::numeric_limits::max(); + u32 read_highest = 0; + + u32 cached_lowest = std::numeric_limits::max(); + u32 cached_highest = 0; + + bool has_unbound_instructions = false; +}; + +class GraphicsEnvironment final : public GenericEnvironment { +public: + explicit GraphicsEnvironment() = default; + explicit GraphicsEnvironment(Tegra::Engines::Maxwell3D& maxwell3d_, + Tegra::MemoryManager& gpu_memory_, + Tegra::Engines::Maxwell3D::Regs::ShaderProgram program, + GPUVAddr program_base_, u32 start_address_); + + ~GraphicsEnvironment() override = default; + + u32 ReadCbufValue(u32 cbuf_index, u32 cbuf_offset) override; + + Shader::TextureType ReadTextureType(u32 handle) override; + +private: + Tegra::Engines::Maxwell3D* maxwell3d{}; + size_t stage_index{}; +}; + +class ComputeEnvironment final : public GenericEnvironment { +public: + explicit ComputeEnvironment() = default; + explicit ComputeEnvironment(Tegra::Engines::KeplerCompute& kepler_compute_, + Tegra::MemoryManager& gpu_memory_, GPUVAddr program_base_, + u32 start_address_); + + ~ComputeEnvironment() override = default; + + u32 ReadCbufValue(u32 cbuf_index, u32 cbuf_offset) override; + + Shader::TextureType ReadTextureType(u32 handle) override; + +private: + Tegra::Engines::KeplerCompute* kepler_compute{}; +}; + +class FileEnvironment final : public Shader::Environment { +public: + FileEnvironment() = default; + ~FileEnvironment() override = default; + + FileEnvironment& operator=(FileEnvironment&&) noexcept = default; + FileEnvironment(FileEnvironment&&) noexcept = default; + + FileEnvironment& operator=(const FileEnvironment&) = delete; + FileEnvironment(const FileEnvironment&) = delete; + + void Deserialize(std::ifstream& file); + + [[nodiscard]] u64 ReadInstruction(u32 address) override; + + [[nodiscard]] u32 ReadCbufValue(u32 cbuf_index, u32 cbuf_offset) override; + + [[nodiscard]] Shader::TextureType ReadTextureType(u32 handle) override; + + [[nodiscard]] u32 LocalMemorySize() const override; + + [[nodiscard]] u32 SharedMemorySize() const override; + + [[nodiscard]] u32 TextureBoundBuffer() const override; + + [[nodiscard]] std::array WorkgroupSize() const override; + +private: + std::unique_ptr code; + std::unordered_map texture_types; + std::unordered_map cbuf_values; + std::array workgroup_size{}; + u32 local_memory_size{}; + u32 shared_memory_size{}; + u32 texture_bound{}; + u32 read_lowest{}; + u32 read_highest{}; +}; + +void SerializePipeline(std::span key, std::span envs, + const std::filesystem::path& filename); + +template +void SerializePipeline(const Key& key, const Envs& envs, const std::filesystem::path& filename) { + static_assert(std::is_trivially_copyable_v); + static_assert(std::has_unique_object_representations_v); + SerializePipeline(std::span(reinterpret_cast(&key), sizeof(key)), + std::span(envs.data(), envs.size()), filename); +} + +void LoadPipelines( + std::stop_token stop_loading, const std::filesystem::path& filename, + Common::UniqueFunction load_compute, + Common::UniqueFunction> load_graphics); + +} // namespace VideoCommon diff --git a/src/video_core/shader_notify.cpp b/src/video_core/shader_notify.cpp index 693e47158..dc6995b46 100755 --- a/src/video_core/shader_notify.cpp +++ b/src/video_core/shader_notify.cpp @@ -2,42 +2,35 @@ // Licensed under GPLv2 or any later version // Refer to the license.txt file included. -#include +#include +#include +#include + #include "video_core/shader_notify.h" using namespace std::chrono_literals; namespace VideoCore { -namespace { -constexpr auto UPDATE_TICK = 32ms; -} -ShaderNotify::ShaderNotify() = default; -ShaderNotify::~ShaderNotify() = default; +const auto TIME_TO_STOP_REPORTING = 2s; -std::size_t ShaderNotify::GetShadersBuilding() { - const auto now = std::chrono::high_resolution_clock::now(); - const auto diff = now - last_update; - if (diff > UPDATE_TICK) { - std::shared_lock lock(mutex); - last_updated_count = accurate_count; +int ShaderNotify::ShadersBuilding() noexcept { + const int now_complete = num_complete.load(std::memory_order::relaxed); + const int now_building = num_building.load(std::memory_order::relaxed); + if (now_complete == now_building) { + const auto now = std::chrono::high_resolution_clock::now(); + if (completed && num_complete == num_when_completed) { + if (now - complete_time > TIME_TO_STOP_REPORTING) { + report_base = now_complete; + completed = false; + } + } else { + completed = true; + num_when_completed = num_complete; + complete_time = now; + } } - return last_updated_count; -} - -std::size_t ShaderNotify::GetShadersBuildingAccurate() { - std::shared_lock lock{mutex}; - return accurate_count; -} - -void ShaderNotify::MarkShaderComplete() { - std::unique_lock lock{mutex}; - accurate_count--; -} - -void ShaderNotify::MarkSharderBuilding() { - std::unique_lock lock{mutex}; - accurate_count++; + return now_building - report_base; } } // namespace VideoCore diff --git a/src/video_core/shader_notify.h b/src/video_core/shader_notify.h index a9c92d179..ad363bfb5 100755 --- a/src/video_core/shader_notify.h +++ b/src/video_core/shader_notify.h @@ -4,26 +4,30 @@ #pragma once +#include #include -#include -#include "common/common_types.h" +#include namespace VideoCore { class ShaderNotify { public: - ShaderNotify(); - ~ShaderNotify(); + [[nodiscard]] int ShadersBuilding() noexcept; - std::size_t GetShadersBuilding(); - std::size_t GetShadersBuildingAccurate(); + void MarkShaderComplete() noexcept { + ++num_complete; + } - void MarkShaderComplete(); - void MarkSharderBuilding(); + void MarkShaderBuilding() noexcept { + ++num_building; + } private: - std::size_t last_updated_count{}; - std::size_t accurate_count{}; - std::shared_mutex mutex; - std::chrono::high_resolution_clock::time_point last_update{}; + std::atomic_int num_building{}; + std::atomic_int num_complete{}; + int report_base{}; + + bool completed{}; + int num_when_completed{}; + std::chrono::high_resolution_clock::time_point complete_time; }; } // namespace VideoCore diff --git a/src/video_core/texture_cache/formatter.cpp b/src/video_core/texture_cache/formatter.cpp index d10ba4ccd..249cc4d0f 100755 --- a/src/video_core/texture_cache/formatter.cpp +++ b/src/video_core/texture_cache/formatter.cpp @@ -43,7 +43,7 @@ std::string Name(const ImageBase& image) { return "Invalid"; } -std::string Name(const ImageViewBase& image_view, std::optional type) { +std::string Name(const ImageViewBase& image_view) { const u32 width = image_view.size.width; const u32 height = image_view.size.height; const u32 depth = image_view.size.depth; @@ -51,7 +51,7 @@ std::string Name(const ImageViewBase& image_view, std::optional t const u32 num_layers = image_view.range.extent.layers; const std::string level = num_levels > 1 ? fmt::format(":{}", num_levels) : ""; - switch (type.value_or(image_view.type)) { + switch (image_view.type) { case ImageViewType::e1D: return fmt::format("ImageView 1D {}{}", width, level); case ImageViewType::e2D: diff --git a/src/video_core/texture_cache/formatter.h b/src/video_core/texture_cache/formatter.h index a48413983..c6cf0583f 100755 --- a/src/video_core/texture_cache/formatter.h +++ b/src/video_core/texture_cache/formatter.h @@ -255,8 +255,7 @@ struct RenderTargets; [[nodiscard]] std::string Name(const ImageBase& image); -[[nodiscard]] std::string Name(const ImageViewBase& image_view, - std::optional type = std::nullopt); +[[nodiscard]] std::string Name(const ImageViewBase& image_view); [[nodiscard]] std::string Name(const RenderTargets& render_targets); diff --git a/src/video_core/texture_cache/image_view_base.cpp b/src/video_core/texture_cache/image_view_base.cpp index e8d632f9e..450becbeb 100755 --- a/src/video_core/texture_cache/image_view_base.cpp +++ b/src/video_core/texture_cache/image_view_base.cpp @@ -36,6 +36,15 @@ ImageViewBase::ImageViewBase(const ImageViewInfo& info, const ImageInfo& image_i } } +ImageViewBase::ImageViewBase(const ImageInfo& info, const ImageViewInfo& view_info) + : format{info.format}, type{ImageViewType::Buffer}, size{ + .width = info.size.width, + .height = 1, + .depth = 1, + } { + ASSERT_MSG(view_info.type == ImageViewType::Buffer, "Expected texture buffer"); +} + ImageViewBase::ImageViewBase(const NullImageParams&) {} } // namespace VideoCommon diff --git a/src/video_core/texture_cache/image_view_base.h b/src/video_core/texture_cache/image_view_base.h index 73954167e..903f715c5 100755 --- a/src/video_core/texture_cache/image_view_base.h +++ b/src/video_core/texture_cache/image_view_base.h @@ -27,6 +27,7 @@ DECLARE_ENUM_FLAG_OPERATORS(ImageViewFlagBits) struct ImageViewBase { explicit ImageViewBase(const ImageViewInfo& info, const ImageInfo& image_info, ImageId image_id); + explicit ImageViewBase(const ImageInfo& info, const ImageViewInfo& view_info); explicit ImageViewBase(const NullImageParams&); [[nodiscard]] bool IsBuffer() const noexcept { diff --git a/src/video_core/texture_cache/texture_cache.h b/src/video_core/texture_cache/texture_cache.h index 01de2d498..f34c9d9ca 100755 --- a/src/video_core/texture_cache/texture_cache.h +++ b/src/video_core/texture_cache/texture_cache.h @@ -117,6 +117,9 @@ public: /// Return a reference to the given image view id [[nodiscard]] ImageView& GetImageView(ImageViewId id) noexcept; + /// Mark an image as modified from the GPU + void MarkModification(ImageId id) noexcept; + /// Fill image_view_ids with the graphics images in indices void FillGraphicsImageViews(std::span indices, std::span image_view_ids); @@ -526,6 +529,11 @@ typename P::ImageView& TextureCache

::GetImageView(ImageViewId id) noexcept { return slot_image_views[id]; } +template +void TextureCache

::MarkModification(ImageId id) noexcept { + MarkModification(slot_images[id]); +} + template void TextureCache

::FillGraphicsImageViews(std::span indices, std::span image_view_ids) { @@ -540,13 +548,13 @@ void TextureCache

::FillComputeImageViews(std::span indices, template typename P::Sampler* TextureCache

::GetGraphicsSampler(u32 index) { - [[unlikely]] if (index > graphics_sampler_table.Limit()) { - LOG_ERROR(HW_GPU, "Invalid sampler index={}", index); + if (index > graphics_sampler_table.Limit()) { + LOG_DEBUG(HW_GPU, "Invalid sampler index={}", index); return &slot_samplers[NULL_SAMPLER_ID]; } const auto [descriptor, is_new] = graphics_sampler_table.Read(index); SamplerId& id = graphics_sampler_ids[index]; - [[unlikely]] if (is_new) { + if (is_new) { id = FindSampler(descriptor); } return &slot_samplers[id]; @@ -554,13 +562,13 @@ typename P::Sampler* TextureCache

::GetGraphicsSampler(u32 index) { template typename P::Sampler* TextureCache

::GetComputeSampler(u32 index) { - [[unlikely]] if (index > compute_sampler_table.Limit()) { - LOG_ERROR(HW_GPU, "Invalid sampler index={}", index); + if (index > compute_sampler_table.Limit()) { + LOG_DEBUG(HW_GPU, "Invalid sampler index={}", index); return &slot_samplers[NULL_SAMPLER_ID]; } const auto [descriptor, is_new] = compute_sampler_table.Read(index); SamplerId& id = compute_sampler_ids[index]; - [[unlikely]] if (is_new) { + if (is_new) { id = FindSampler(descriptor); } return &slot_samplers[id]; @@ -599,6 +607,12 @@ void TextureCache

::UpdateRenderTargets(bool is_clear) { using namespace VideoCommon::Dirty; auto& flags = maxwell3d.dirty.flags; if (!flags[Dirty::RenderTargets]) { + for (size_t index = 0; index < NUM_RT; ++index) { + ImageViewId& color_buffer_id = render_targets.color_buffer_ids[index]; + PrepareImageView(color_buffer_id, true, is_clear && IsFullClear(color_buffer_id)); + } + const ImageViewId depth_buffer_id = render_targets.depth_buffer_id; + PrepareImageView(depth_buffer_id, true, is_clear && IsFullClear(depth_buffer_id)); return; } flags[Dirty::RenderTargets] = false; @@ -655,7 +669,7 @@ ImageViewId TextureCache

::VisitImageView(DescriptorTable& table, std::span cached_image_view_ids, u32 index) { if (index > table.Limit()) { - LOG_ERROR(HW_GPU, "Invalid image view index={}", index); + LOG_DEBUG(HW_GPU, "Invalid image view index={}", index); return NULL_IMAGE_VIEW_ID; } const auto [descriptor, is_new] = table.Read(index); @@ -962,9 +976,6 @@ void TextureCache

::UploadImageContents(Image& image, StagingBuffer& staging) auto copies = UnswizzleImage(gpu_memory, gpu_addr, image.info, unswizzled_data); ConvertImage(unswizzled_data, image.info, mapped_span, copies); image.UploadMemory(staging, copies); - } else if (image.info.type == ImageType::Buffer) { - const std::array copies{UploadBufferCopy(gpu_memory, gpu_addr, image, mapped_span)}; - image.UploadMemory(staging, copies); } else { const auto copies = UnswizzleImage(gpu_memory, gpu_addr, image.info, mapped_span); image.UploadMemory(staging, copies); @@ -987,7 +998,12 @@ ImageViewId TextureCache

::FindImageView(const TICEntry& config) { template ImageViewId TextureCache

::CreateImageView(const TICEntry& config) { const ImageInfo info(config); - const GPUVAddr image_gpu_addr = config.Address() - config.BaseLayer() * info.layer_stride; + if (info.type == ImageType::Buffer) { + const ImageViewInfo view_info(config, 0); + return slot_image_views.insert(runtime, info, view_info, config.Address()); + } + const u32 layer_offset = config.BaseLayer() * info.layer_stride; + const GPUVAddr image_gpu_addr = config.Address() - layer_offset; const ImageId image_id = FindOrInsertImage(info, image_gpu_addr); if (!image_id) { return NULL_IMAGE_VIEW_ID; @@ -1795,6 +1811,9 @@ void TextureCache

::PrepareImageView(ImageViewId image_view_id, bool is_modifi return; } const ImageViewBase& image_view = slot_image_views[image_view_id]; + if (image_view.IsBuffer()) { + return; + } PrepareImage(image_view.image_id, is_modification, invalidate); } diff --git a/src/video_core/textures/texture.h b/src/video_core/textures/texture.h index c1d14335e..1a9399455 100755 --- a/src/video_core/textures/texture.h +++ b/src/video_core/textures/texture.h @@ -154,6 +154,15 @@ union TextureHandle { }; static_assert(sizeof(TextureHandle) == 4, "TextureHandle has wrong size"); +[[nodiscard]] inline std::pair TexturePair(u32 raw, bool via_header_index) { + if (via_header_index) { + return {raw, raw}; + } else { + const Tegra::Texture::TextureHandle handle{raw}; + return {handle.tic_id, via_header_index ? handle.tic_id : handle.tsc_id}; + } +} + struct TICEntry { union { struct { diff --git a/src/video_core/transform_feedback.cpp b/src/video_core/transform_feedback.cpp new file mode 100755 index 000000000..ba26ac3f1 --- /dev/null +++ b/src/video_core/transform_feedback.cpp @@ -0,0 +1,99 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include + +#include "common/alignment.h" +#include "common/assert.h" +#include "shader_recompiler/shader_info.h" +#include "video_core/transform_feedback.h" + +namespace VideoCommon { + +std::vector MakeTransformFeedbackVaryings( + const TransformFeedbackState& state) { + static constexpr std::array VECTORS{ + 28, // gl_Position + 32, // Generic 0 + 36, // Generic 1 + 40, // Generic 2 + 44, // Generic 3 + 48, // Generic 4 + 52, // Generic 5 + 56, // Generic 6 + 60, // Generic 7 + 64, // Generic 8 + 68, // Generic 9 + 72, // Generic 10 + 76, // Generic 11 + 80, // Generic 12 + 84, // Generic 13 + 88, // Generic 14 + 92, // Generic 15 + 96, // Generic 16 + 100, // Generic 17 + 104, // Generic 18 + 108, // Generic 19 + 112, // Generic 20 + 116, // Generic 21 + 120, // Generic 22 + 124, // Generic 23 + 128, // Generic 24 + 132, // Generic 25 + 136, // Generic 26 + 140, // Generic 27 + 144, // Generic 28 + 148, // Generic 29 + 152, // Generic 30 + 156, // Generic 31 + 160, // gl_FrontColor + 164, // gl_FrontSecondaryColor + 160, // gl_BackColor + 164, // gl_BackSecondaryColor + 192, // gl_TexCoord[0] + 196, // gl_TexCoord[1] + 200, // gl_TexCoord[2] + 204, // gl_TexCoord[3] + 208, // gl_TexCoord[4] + 212, // gl_TexCoord[5] + 216, // gl_TexCoord[6] + 220, // gl_TexCoord[7] + }; + std::vector xfb(256); + for (size_t buffer = 0; buffer < state.layouts.size(); ++buffer) { + const auto& locations = state.varyings[buffer]; + const auto& layout = state.layouts[buffer]; + const u32 varying_count = layout.varying_count; + u32 highest = 0; + for (u32 offset = 0; offset < varying_count; ++offset) { + const u32 base_offset = offset; + const u8 location = locations[offset]; + + UNIMPLEMENTED_IF_MSG(layout.stream != 0, "Stream is not zero: {}", layout.stream); + Shader::TransformFeedbackVarying varying{ + .buffer = static_cast(buffer), + .stride = layout.stride, + .offset = offset * 4, + .components = 1, + }; + if (std::ranges::find(VECTORS, Common::AlignDown(location, 4)) != VECTORS.end()) { + UNIMPLEMENTED_IF_MSG(location % 4 != 0, "Unaligned TFB"); + + const u8 base_index = location / 4; + while (offset + 1 < varying_count && base_index == locations[offset + 1] / 4) { + ++offset; + ++varying.components; + } + } + xfb[location] = varying; + highest = std::max(highest, (base_offset + varying.components) * 4); + } + UNIMPLEMENTED_IF(highest != layout.stride); + } + return xfb; +} + +} // namespace VideoCommon diff --git a/src/video_core/transform_feedback.h b/src/video_core/transform_feedback.h new file mode 100755 index 000000000..8f6946d65 --- /dev/null +++ b/src/video_core/transform_feedback.h @@ -0,0 +1,30 @@ +// Copyright 2021 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include + +#include "common/common_types.h" +#include "shader_recompiler/runtime_info.h" +#include "video_core/engines/maxwell_3d.h" + +namespace VideoCommon { + +struct TransformFeedbackState { + struct Layout { + u32 stream; + u32 varying_count; + u32 stride; + }; + std::array layouts; + std::array, Tegra::Engines::Maxwell3D::Regs::NumTransformFeedbackBuffers> + varyings; +}; + +std::vector MakeTransformFeedbackVaryings( + const TransformFeedbackState& state); + +} // namespace VideoCommon diff --git a/src/video_core/vulkan_common/nsight_aftermath_tracker.cpp b/src/video_core/vulkan_common/nsight_aftermath_tracker.cpp index 758c038ba..fdd1a5081 100755 --- a/src/video_core/vulkan_common/nsight_aftermath_tracker.cpp +++ b/src/video_core/vulkan_common/nsight_aftermath_tracker.cpp @@ -73,12 +73,11 @@ NsightAftermathTracker::~NsightAftermathTracker() { } } -void NsightAftermathTracker::SaveShader(const std::vector& spirv) const { +void NsightAftermathTracker::SaveShader(std::span spirv) const { if (!initialized) { return; } - - std::vector spirv_copy = spirv; + std::vector spirv_copy(spirv.begin(), spirv.end()); GFSDK_Aftermath_SpirvCode shader; shader.pData = spirv_copy.data(); shader.size = static_cast(spirv_copy.size() * 4); @@ -100,7 +99,7 @@ void NsightAftermathTracker::SaveShader(const std::vector& spirv) const { LOG_ERROR(Render_Vulkan, "Failed to dump SPIR-V module with hash={:016x}", hash.hash); return; } - if (file.Write(spirv) != spirv.size()) { + if (file.WriteSpan(spirv) != spirv.size()) { LOG_ERROR(Render_Vulkan, "Failed to write SPIR-V module with hash={:016x}", hash.hash); return; } diff --git a/src/video_core/vulkan_common/nsight_aftermath_tracker.h b/src/video_core/vulkan_common/nsight_aftermath_tracker.h index 4fe2b14d9..eae1891dd 100755 --- a/src/video_core/vulkan_common/nsight_aftermath_tracker.h +++ b/src/video_core/vulkan_common/nsight_aftermath_tracker.h @@ -6,6 +6,7 @@ #include #include +#include #include #include @@ -33,7 +34,7 @@ public: NsightAftermathTracker(NsightAftermathTracker&&) = delete; NsightAftermathTracker& operator=(NsightAftermathTracker&&) = delete; - void SaveShader(const std::vector& spirv) const; + void SaveShader(std::span spirv) const; private: #ifdef HAS_NSIGHT_AFTERMATH @@ -61,21 +62,21 @@ private: bool initialized = false; Common::DynamicLibrary dl; - PFN_GFSDK_Aftermath_DisableGpuCrashDumps GFSDK_Aftermath_DisableGpuCrashDumps; - PFN_GFSDK_Aftermath_EnableGpuCrashDumps GFSDK_Aftermath_EnableGpuCrashDumps; - PFN_GFSDK_Aftermath_GetShaderDebugInfoIdentifier GFSDK_Aftermath_GetShaderDebugInfoIdentifier; - PFN_GFSDK_Aftermath_GetShaderHashSpirv GFSDK_Aftermath_GetShaderHashSpirv; - PFN_GFSDK_Aftermath_GpuCrashDump_CreateDecoder GFSDK_Aftermath_GpuCrashDump_CreateDecoder; - PFN_GFSDK_Aftermath_GpuCrashDump_DestroyDecoder GFSDK_Aftermath_GpuCrashDump_DestroyDecoder; - PFN_GFSDK_Aftermath_GpuCrashDump_GenerateJSON GFSDK_Aftermath_GpuCrashDump_GenerateJSON; - PFN_GFSDK_Aftermath_GpuCrashDump_GetJSON GFSDK_Aftermath_GpuCrashDump_GetJSON; + PFN_GFSDK_Aftermath_DisableGpuCrashDumps GFSDK_Aftermath_DisableGpuCrashDumps{}; + PFN_GFSDK_Aftermath_EnableGpuCrashDumps GFSDK_Aftermath_EnableGpuCrashDumps{}; + PFN_GFSDK_Aftermath_GetShaderDebugInfoIdentifier GFSDK_Aftermath_GetShaderDebugInfoIdentifier{}; + PFN_GFSDK_Aftermath_GetShaderHashSpirv GFSDK_Aftermath_GetShaderHashSpirv{}; + PFN_GFSDK_Aftermath_GpuCrashDump_CreateDecoder GFSDK_Aftermath_GpuCrashDump_CreateDecoder{}; + PFN_GFSDK_Aftermath_GpuCrashDump_DestroyDecoder GFSDK_Aftermath_GpuCrashDump_DestroyDecoder{}; + PFN_GFSDK_Aftermath_GpuCrashDump_GenerateJSON GFSDK_Aftermath_GpuCrashDump_GenerateJSON{}; + PFN_GFSDK_Aftermath_GpuCrashDump_GetJSON GFSDK_Aftermath_GpuCrashDump_GetJSON{}; #endif }; #ifndef HAS_NSIGHT_AFTERMATH inline NsightAftermathTracker::NsightAftermathTracker() = default; inline NsightAftermathTracker::~NsightAftermathTracker() = default; -inline void NsightAftermathTracker::SaveShader(const std::vector&) const {} +inline void NsightAftermathTracker::SaveShader(std::span) const {} #endif } // namespace Vulkan diff --git a/src/video_core/vulkan_common/vulkan_device.cpp b/src/video_core/vulkan_common/vulkan_device.cpp index f214510da..13d938434 100755 --- a/src/video_core/vulkan_common/vulkan_device.cpp +++ b/src/video_core/vulkan_common/vulkan_device.cpp @@ -2,6 +2,7 @@ // Licensed under GPLv2 or any later version // Refer to the license.txt file included. +#include #include #include #include @@ -43,11 +44,14 @@ constexpr std::array REQUIRED_EXTENSIONS{ VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME, + VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME, + VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, VK_EXT_ROBUSTNESS_2_EXTENSION_NAME, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, + VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME, #ifdef _WIN32 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME, #endif @@ -112,6 +116,7 @@ std::unordered_map GetFormatProperties(vk::Physica VK_FORMAT_R16G16_SFLOAT, VK_FORMAT_R16G16_SINT, VK_FORMAT_R16_UNORM, + VK_FORMAT_R16_SNORM, VK_FORMAT_R16_UINT, VK_FORMAT_R8G8B8A8_SRGB, VK_FORMAT_R8G8_UNORM, @@ -191,15 +196,26 @@ std::unordered_map GetFormatProperties(vk::Physica return format_properties; } +std::vector GetSupportedExtensions(vk::PhysicalDevice physical) { + const std::vector extensions = physical.EnumerateDeviceExtensionProperties(); + std::vector supported_extensions(std::size(extensions)); + for (const auto& extension : extensions) { + supported_extensions.emplace_back(extension.extensionName); + } + return supported_extensions; +} + } // Anonymous namespace Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR surface, const vk::InstanceDispatch& dld_) : instance{instance_}, dld{dld_}, physical{physical_}, properties{physical.GetProperties()}, - format_properties{GetFormatProperties(physical)} { + supported_extensions{GetSupportedExtensions(physical)}, + format_properties(GetFormatProperties(physical)) { CheckSuitability(surface != nullptr); SetupFamilies(surface); SetupFeatures(); + SetupProperties(); const auto queue_cis = GetDeviceQueueCreateInfos(); const std::vector extensions = LoadExtensions(surface != nullptr); @@ -214,16 +230,16 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR .independentBlend = true, .geometryShader = true, .tessellationShader = true, - .sampleRateShading = false, - .dualSrcBlend = false, + .sampleRateShading = true, + .dualSrcBlend = true, .logicOp = false, .multiDrawIndirect = false, .drawIndirectFirstInstance = false, .depthClamp = true, .depthBiasClamp = true, - .fillModeNonSolid = false, - .depthBounds = false, - .wideLines = false, + .fillModeNonSolid = true, + .depthBounds = is_depth_bounds_supported, + .wideLines = true, .largePoints = true, .alphaToOne = false, .multiViewport = true, @@ -245,11 +261,11 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR .shaderSampledImageArrayDynamicIndexing = false, .shaderStorageBufferArrayDynamicIndexing = false, .shaderStorageImageArrayDynamicIndexing = false, - .shaderClipDistance = false, - .shaderCullDistance = false, - .shaderFloat64 = false, - .shaderInt64 = false, - .shaderInt16 = false, + .shaderClipDistance = true, + .shaderCullDistance = true, + .shaderFloat64 = is_shader_float64_supported, + .shaderInt64 = is_shader_int64_supported, + .shaderInt16 = is_shader_int16_supported, .shaderResourceResidency = false, .shaderResourceMinLod = false, .sparseBinding = false, @@ -278,7 +294,7 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR VkPhysicalDevice16BitStorageFeaturesKHR bit16_storage{ .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, .pNext = nullptr, - .storageBuffer16BitAccess = false, + .storageBuffer16BitAccess = true, .uniformAndStorageBuffer16BitAccess = true, .storagePushConstant16 = false, .storageInputOutput16 = false, @@ -310,6 +326,21 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR }; SetNext(next, host_query_reset); + VkPhysicalDeviceVariablePointerFeaturesKHR variable_pointers{ + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + .pNext = nullptr, + .variablePointersStorageBuffer = VK_TRUE, + .variablePointers = VK_TRUE, + }; + SetNext(next, variable_pointers); + + VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT demote{ + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + .pNext = nullptr, + .shaderDemoteToHelperInvocation = true, + }; + SetNext(next, demote); + VkPhysicalDeviceFloat16Int8FeaturesKHR float16_int8; if (is_float16_supported) { float16_int8 = { @@ -327,6 +358,14 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR LOG_INFO(Render_Vulkan, "Device doesn't support viewport swizzles"); } + if (!nv_viewport_array2) { + LOG_INFO(Render_Vulkan, "Device doesn't support viewport masks"); + } + + if (!nv_geometry_shader_passthrough) { + LOG_INFO(Render_Vulkan, "Device doesn't support passthrough geometry shaders"); + } + VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR std430_layout; if (khr_uniform_buffer_standard_layout) { std430_layout = { @@ -389,12 +428,83 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR LOG_INFO(Render_Vulkan, "Device doesn't support extended dynamic state"); } + VkPhysicalDeviceLineRasterizationFeaturesEXT line_raster; + if (ext_line_rasterization) { + line_raster = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + .pNext = nullptr, + .rectangularLines = VK_TRUE, + .bresenhamLines = VK_FALSE, + .smoothLines = VK_TRUE, + .stippledRectangularLines = VK_FALSE, + .stippledBresenhamLines = VK_FALSE, + .stippledSmoothLines = VK_FALSE, + }; + SetNext(next, line_raster); + } else { + LOG_INFO(Render_Vulkan, "Device doesn't support smooth lines"); + } + + if (!ext_conservative_rasterization) { + LOG_INFO(Render_Vulkan, "Device doesn't support conservative rasterization"); + } + + VkPhysicalDeviceProvokingVertexFeaturesEXT provoking_vertex; + if (ext_provoking_vertex) { + provoking_vertex = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT, + .pNext = nullptr, + .provokingVertexLast = VK_TRUE, + .transformFeedbackPreservesProvokingVertex = VK_TRUE, + }; + SetNext(next, provoking_vertex); + } else { + LOG_INFO(Render_Vulkan, "Device doesn't support provoking vertex last"); + } + + VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT vertex_input_dynamic; + if (ext_vertex_input_dynamic_state) { + vertex_input_dynamic = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT, + .pNext = nullptr, + .vertexInputDynamicState = VK_TRUE, + }; + SetNext(next, vertex_input_dynamic); + } else { + LOG_INFO(Render_Vulkan, "Device doesn't support vertex input dynamic state"); + } + + VkPhysicalDeviceShaderAtomicInt64FeaturesKHR atomic_int64; + if (ext_shader_atomic_int64) { + atomic_int64 = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, + .pNext = nullptr, + .shaderBufferInt64Atomics = VK_TRUE, + .shaderSharedInt64Atomics = VK_TRUE, + }; + SetNext(next, atomic_int64); + } + + VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR workgroup_layout; + if (khr_workgroup_memory_explicit_layout) { + workgroup_layout = { + .sType = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR, + .pNext = nullptr, + .workgroupMemoryExplicitLayout = VK_TRUE, + .workgroupMemoryExplicitLayoutScalarBlockLayout = VK_TRUE, + .workgroupMemoryExplicitLayout8BitAccess = VK_TRUE, + .workgroupMemoryExplicitLayout16BitAccess = VK_TRUE, + }; + SetNext(next, workgroup_layout); + } + if (!ext_depth_range_unrestricted) { LOG_INFO(Render_Vulkan, "Device doesn't support depth range unrestricted"); } VkDeviceDiagnosticsConfigCreateInfoNV diagnostics_nv; - if (nv_device_diagnostics_config) { + if (Settings::values.enable_nsight_aftermath && nv_device_diagnostics_config) { nsight_aftermath_tracker = std::make_unique(); diagnostics_nv = { @@ -412,11 +522,25 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR CollectTelemetryParameters(); CollectToolingInfo(); + if (driver_id == VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR && is_float16_supported) { + if (std::ranges::find(supported_extensions, VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME) != + supported_extensions.end()) { + LOG_WARNING(Render_Vulkan, "Blacklisting Ampere devices from float16 math"); + is_float16_supported = false; + } + } if (ext_extended_dynamic_state && driver_id == VK_DRIVER_ID_MESA_RADV) { - LOG_WARNING( - Render_Vulkan, - "Blacklisting RADV for VK_EXT_extended_dynamic state, likely due to a bug in yuzu"); - ext_extended_dynamic_state = false; + // Mask driver version variant + const u32 version = (properties.driverVersion << 3) >> 3; + if (version < VK_MAKE_API_VERSION(0, 21, 2, 0)) { + LOG_WARNING(Render_Vulkan, + "RADV versions older than 21.2 have broken VK_EXT_extended_dynamic_state"); + ext_extended_dynamic_state = false; + } + } + if (ext_vertex_input_dynamic_state && driver_id == VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS) { + LOG_WARNING(Render_Vulkan, "Blacklisting Intel for VK_EXT_vertex_input_dynamic_state"); + ext_vertex_input_dynamic_state = false; } if (is_float16_supported && driver_id == VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS) { // Intel's compiler crashes when using fp16 on Astral Chain, disable it for the time being. @@ -426,8 +550,6 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR graphics_queue = logical.GetQueue(graphics_family); present_queue = logical.GetQueue(present_family); - - use_asynchronous_shaders = Settings::values.use_asynchronous_shaders.GetValue(); } Device::~Device() = default; @@ -471,7 +593,7 @@ void Device::ReportLoss() const { std::this_thread::sleep_for(std::chrono::seconds{15}); } -void Device::SaveShader(const std::vector& spirv) const { +void Device::SaveShader(std::span spirv) const { if (nsight_aftermath_tracker) { nsight_aftermath_tracker->SaveShader(spirv); } @@ -597,10 +719,20 @@ void Device::CheckSuitability(bool requires_swapchain) const { throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT); } } + VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT demote{}; + demote.sType = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT; + demote.pNext = nullptr; + + VkPhysicalDeviceVariablePointerFeaturesKHR variable_pointers{}; + variable_pointers.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR; + variable_pointers.pNext = &demote; + VkPhysicalDeviceRobustness2FeaturesEXT robustness2{}; robustness2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT; + robustness2.pNext = &variable_pointers; - VkPhysicalDeviceFeatures2 features2{}; + VkPhysicalDeviceFeatures2KHR features2{}; features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; features2.pNext = &robustness2; @@ -610,7 +742,6 @@ void Device::CheckSuitability(bool requires_swapchain) const { const std::array feature_report{ std::make_pair(features.robustBufferAccess, "robustBufferAccess"), std::make_pair(features.vertexPipelineStoresAndAtomics, "vertexPipelineStoresAndAtomics"), - std::make_pair(features.robustBufferAccess, "robustBufferAccess"), std::make_pair(features.imageCubeArray, "imageCubeArray"), std::make_pair(features.independentBlend, "independentBlend"), std::make_pair(features.depthClamp, "depthClamp"), @@ -618,13 +749,23 @@ void Device::CheckSuitability(bool requires_swapchain) const { std::make_pair(features.largePoints, "largePoints"), std::make_pair(features.multiViewport, "multiViewport"), std::make_pair(features.depthBiasClamp, "depthBiasClamp"), + std::make_pair(features.fillModeNonSolid, "fillModeNonSolid"), + std::make_pair(features.wideLines, "wideLines"), std::make_pair(features.geometryShader, "geometryShader"), std::make_pair(features.tessellationShader, "tessellationShader"), + std::make_pair(features.sampleRateShading, "sampleRateShading"), + std::make_pair(features.dualSrcBlend, "dualSrcBlend"), std::make_pair(features.occlusionQueryPrecise, "occlusionQueryPrecise"), std::make_pair(features.fragmentStoresAndAtomics, "fragmentStoresAndAtomics"), std::make_pair(features.shaderImageGatherExtended, "shaderImageGatherExtended"), std::make_pair(features.shaderStorageImageWriteWithoutFormat, "shaderStorageImageWriteWithoutFormat"), + std::make_pair(features.shaderClipDistance, "shaderClipDistance"), + std::make_pair(features.shaderCullDistance, "shaderCullDistance"), + std::make_pair(demote.shaderDemoteToHelperInvocation, "shaderDemoteToHelperInvocation"), + std::make_pair(variable_pointers.variablePointers, "variablePointers"), + std::make_pair(variable_pointers.variablePointersStorageBuffer, + "variablePointersStorageBuffer"), std::make_pair(robustness2.robustBufferAccess2, "robustBufferAccess2"), std::make_pair(robustness2.robustImageAccess2, "robustImageAccess2"), std::make_pair(robustness2.nullDescriptor, "nullDescriptor"), @@ -647,14 +788,19 @@ std::vector Device::LoadExtensions(bool requires_surface) { } bool has_khr_shader_float16_int8{}; + bool has_khr_workgroup_memory_explicit_layout{}; bool has_ext_subgroup_size_control{}; bool has_ext_transform_feedback{}; bool has_ext_custom_border_color{}; bool has_ext_extended_dynamic_state{}; - for (const VkExtensionProperties& extension : physical.EnumerateDeviceExtensionProperties()) { + bool has_ext_shader_atomic_int64{}; + bool has_ext_provoking_vertex{}; + bool has_ext_vertex_input_dynamic_state{}; + bool has_ext_line_rasterization{}; + for (const std::string& extension : supported_extensions) { const auto test = [&](std::optional> status, const char* name, bool push) { - if (extension.extensionName != std::string_view(name)) { + if (extension != name) { return; } if (push) { @@ -665,8 +811,13 @@ std::vector Device::LoadExtensions(bool requires_surface) { } }; test(nv_viewport_swizzle, VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME, true); + test(nv_viewport_array2, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, true); + test(nv_geometry_shader_passthrough, VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME, + true); test(khr_uniform_buffer_standard_layout, VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, true); + test(khr_spirv_1_4, VK_KHR_SPIRV_1_4_EXTENSION_NAME, true); + test(khr_push_descriptor, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME, true); test(has_khr_shader_float16_int8, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, false); test(ext_depth_range_unrestricted, VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, true); test(ext_index_type_uint8, VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, true); @@ -675,16 +826,25 @@ std::vector Device::LoadExtensions(bool requires_surface) { true); test(ext_tooling_info, VK_EXT_TOOLING_INFO_EXTENSION_NAME, true); test(ext_shader_stencil_export, VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, true); + test(ext_conservative_rasterization, VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME, + true); test(has_ext_transform_feedback, VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, false); test(has_ext_custom_border_color, VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME, false); test(has_ext_extended_dynamic_state, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME, false); test(has_ext_subgroup_size_control, VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME, false); - if (Settings::values.renderer_debug) { + test(has_ext_provoking_vertex, VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, false); + test(has_ext_vertex_input_dynamic_state, VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME, + false); + test(has_ext_shader_atomic_int64, VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, false); + test(has_khr_workgroup_memory_explicit_layout, + VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME, false); + test(has_ext_line_rasterization, VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, false); + if (Settings::values.enable_nsight_aftermath) { test(nv_device_diagnostics_config, VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME, true); } } - VkPhysicalDeviceFeatures2KHR features; + VkPhysicalDeviceFeatures2KHR features{}; features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR; VkPhysicalDeviceProperties2KHR physical_properties; @@ -722,10 +882,49 @@ std::vector Device::LoadExtensions(bool requires_surface) { subgroup_properties.maxSubgroupSize >= GuestWarpSize) { extensions.push_back(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME); guest_warp_stages = subgroup_properties.requiredSubgroupSizeStages; + ext_subgroup_size_control = true; } } else { is_warp_potentially_bigger = true; } + if (has_ext_provoking_vertex) { + VkPhysicalDeviceProvokingVertexFeaturesEXT provoking_vertex; + provoking_vertex.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT; + provoking_vertex.pNext = nullptr; + features.pNext = &provoking_vertex; + physical.GetFeatures2KHR(features); + + if (provoking_vertex.provokingVertexLast && + provoking_vertex.transformFeedbackPreservesProvokingVertex) { + extensions.push_back(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME); + ext_provoking_vertex = true; + } + } + if (has_ext_vertex_input_dynamic_state) { + VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT vertex_input; + vertex_input.sType = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT; + vertex_input.pNext = nullptr; + features.pNext = &vertex_input; + physical.GetFeatures2KHR(features); + + if (vertex_input.vertexInputDynamicState) { + extensions.push_back(VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME); + ext_vertex_input_dynamic_state = true; + } + } + if (has_ext_shader_atomic_int64) { + VkPhysicalDeviceShaderAtomicInt64Features atomic_int64; + atomic_int64.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT; + atomic_int64.pNext = nullptr; + features.pNext = &atomic_int64; + physical.GetFeatures2KHR(features); + + if (atomic_int64.shaderBufferInt64Atomics && atomic_int64.shaderSharedInt64Atomics) { + extensions.push_back(VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME); + ext_shader_atomic_int64 = true; + } + } if (has_ext_transform_feedback) { VkPhysicalDeviceTransformFeedbackFeaturesEXT tfb_features; tfb_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT; @@ -760,17 +959,55 @@ std::vector Device::LoadExtensions(bool requires_surface) { } } if (has_ext_extended_dynamic_state) { - VkPhysicalDeviceExtendedDynamicStateFeaturesEXT dynamic_state; - dynamic_state.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT; - dynamic_state.pNext = nullptr; - features.pNext = &dynamic_state; + VkPhysicalDeviceExtendedDynamicStateFeaturesEXT extended_dynamic_state; + extended_dynamic_state.sType = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT; + extended_dynamic_state.pNext = nullptr; + features.pNext = &extended_dynamic_state; physical.GetFeatures2KHR(features); - if (dynamic_state.extendedDynamicState) { + if (extended_dynamic_state.extendedDynamicState) { extensions.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME); ext_extended_dynamic_state = true; } } + if (has_ext_line_rasterization) { + VkPhysicalDeviceLineRasterizationFeaturesEXT line_raster; + line_raster.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT; + line_raster.pNext = nullptr; + features.pNext = &line_raster; + physical.GetFeatures2KHR(features); + if (line_raster.rectangularLines && line_raster.smoothLines) { + extensions.push_back(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME); + ext_line_rasterization = true; + } + } + if (has_khr_workgroup_memory_explicit_layout) { + VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR layout; + layout.sType = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR; + layout.pNext = nullptr; + features.pNext = &layout; + physical.GetFeatures2KHR(features); + + if (layout.workgroupMemoryExplicitLayout && + layout.workgroupMemoryExplicitLayout8BitAccess && + layout.workgroupMemoryExplicitLayout16BitAccess && + layout.workgroupMemoryExplicitLayoutScalarBlockLayout) { + extensions.push_back(VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME); + khr_workgroup_memory_explicit_layout = true; + } + } + if (khr_push_descriptor) { + VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor; + push_descriptor.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR; + push_descriptor.pNext = nullptr; + + physical_properties.pNext = &push_descriptor; + physical.GetProperties2KHR(physical_properties); + + max_push_descriptors = push_descriptor.maxPushDescriptors; + } return extensions; } @@ -806,11 +1043,25 @@ void Device::SetupFamilies(VkSurfaceKHR surface) { } void Device::SetupFeatures() { - const auto supported_features{physical.GetFeatures()}; - is_formatless_image_load_supported = supported_features.shaderStorageImageReadWithoutFormat; - is_shader_storage_image_multisample = supported_features.shaderStorageImageMultisample; + const VkPhysicalDeviceFeatures features{physical.GetFeatures()}; + is_depth_bounds_supported = features.depthBounds; + is_formatless_image_load_supported = features.shaderStorageImageReadWithoutFormat; + is_shader_float64_supported = features.shaderFloat64; + is_shader_int64_supported = features.shaderInt64; + is_shader_int16_supported = features.shaderInt16; + is_shader_storage_image_multisample = features.shaderStorageImageMultisample; is_blit_depth_stencil_supported = TestDepthStencilBlits(); - is_optimal_astc_supported = IsOptimalAstcSupported(supported_features); + is_optimal_astc_supported = IsOptimalAstcSupported(features); +} + +void Device::SetupProperties() { + float_controls.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR; + + VkPhysicalDeviceProperties2KHR properties2{}; + properties2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR; + properties2.pNext = &float_controls; + + physical.GetProperties2KHR(properties2); } void Device::CollectTelemetryParameters() { @@ -832,12 +1083,6 @@ void Device::CollectTelemetryParameters() { driver_id = driver.driverID; vendor_name = driver.driverName; - - const std::vector extensions = physical.EnumerateDeviceExtensionProperties(); - reported_extensions.reserve(std::size(extensions)); - for (const auto& extension : extensions) { - reported_extensions.emplace_back(extension.extensionName); - } } void Device::CollectPhysicalMemoryInfo() { diff --git a/src/video_core/vulkan_common/vulkan_device.h b/src/video_core/vulkan_common/vulkan_device.h index 96c0f8c60..df394e384 100755 --- a/src/video_core/vulkan_common/vulkan_device.h +++ b/src/video_core/vulkan_common/vulkan_device.h @@ -4,6 +4,7 @@ #pragma once +#include #include #include #include @@ -43,7 +44,7 @@ public: void ReportLoss() const; /// Reports a shader to Nsight Aftermath. - void SaveShader(const std::vector& spirv) const; + void SaveShader(std::span spirv) const; /// Returns the name of the VkDriverId reported from Vulkan. std::string GetDriverName() const; @@ -128,6 +129,11 @@ public: return properties.limits.maxComputeSharedMemorySize; } + /// Returns float control properties of the device. + const VkPhysicalDeviceFloatControlsPropertiesKHR& FloatControlProperties() const { + return float_controls; + } + /// Returns true if ASTC is natively supported. bool IsOptimalAstcSupported() const { return is_optimal_astc_supported; @@ -148,11 +154,31 @@ public: return guest_warp_stages & stage; } + /// Returns the maximum number of push descriptors. + u32 MaxPushDescriptors() const { + return max_push_descriptors; + } + /// Returns true if formatless image load is supported. bool IsFormatlessImageLoadSupported() const { return is_formatless_image_load_supported; } + /// Returns true if shader int64 is supported. + bool IsShaderInt64Supported() const { + return is_shader_int64_supported; + } + + /// Returns true if shader int16 is supported. + bool IsShaderInt16Supported() const { + return is_shader_int16_supported; + } + + // Returns true if depth bounds is supported. + bool IsDepthBoundsSupported() const { + return is_depth_bounds_supported; + } + /// Returns true when blitting from and to depth stencil images is supported. bool IsBlitDepthStencilSupported() const { return is_blit_depth_stencil_supported; @@ -163,11 +189,36 @@ public: return nv_viewport_swizzle; } - /// Returns true if the device supports VK_EXT_scalar_block_layout. + /// Returns true if the device supports VK_NV_viewport_array2. + bool IsNvViewportArray2Supported() const { + return nv_viewport_array2; + } + + /// Returns true if the device supports VK_NV_geometry_shader_passthrough. + bool IsNvGeometryShaderPassthroughSupported() const { + return nv_geometry_shader_passthrough; + } + + /// Returns true if the device supports VK_KHR_uniform_buffer_standard_layout. bool IsKhrUniformBufferStandardLayoutSupported() const { return khr_uniform_buffer_standard_layout; } + /// Returns true if the device supports VK_KHR_spirv_1_4. + bool IsKhrSpirv1_4Supported() const { + return khr_spirv_1_4; + } + + /// Returns true if the device supports VK_KHR_push_descriptor. + bool IsKhrPushDescriptorSupported() const { + return khr_push_descriptor; + } + + /// Returns true if the device supports VK_KHR_workgroup_memory_explicit_layout. + bool IsKhrWorkgroupMemoryExplicitLayoutSupported() const { + return khr_workgroup_memory_explicit_layout; + } + /// Returns true if the device supports VK_EXT_index_type_uint8. bool IsExtIndexTypeUint8Supported() const { return ext_index_type_uint8; @@ -188,6 +239,11 @@ public: return ext_shader_viewport_index_layer; } + /// Returns true if the device supports VK_EXT_subgroup_size_control. + bool IsExtSubgroupSizeControlSupported() const { + return ext_subgroup_size_control; + } + /// Returns true if the device supports VK_EXT_transform_feedback. bool IsExtTransformFeedbackSupported() const { return ext_transform_feedback; @@ -203,11 +259,36 @@ public: return ext_extended_dynamic_state; } + /// Returns true if the device supports VK_EXT_line_rasterization. + bool IsExtLineRasterizationSupported() const { + return ext_line_rasterization; + } + + /// Returns true if the device supports VK_EXT_vertex_input_dynamic_state. + bool IsExtVertexInputDynamicStateSupported() const { + return ext_vertex_input_dynamic_state; + } + /// Returns true if the device supports VK_EXT_shader_stencil_export. bool IsExtShaderStencilExportSupported() const { return ext_shader_stencil_export; } + /// Returns true if the device supports VK_EXT_conservative_rasterization. + bool IsExtConservativeRasterizationSupported() const { + return ext_conservative_rasterization; + } + + /// Returns true if the device supports VK_EXT_provoking_vertex. + bool IsExtProvokingVertexSupported() const { + return ext_provoking_vertex; + } + + /// Returns true if the device supports VK_KHR_shader_atomic_int64. + bool IsExtShaderAtomicInt64Supported() const { + return ext_shader_atomic_int64; + } + /// Returns true when a known debugging tool is attached. bool HasDebuggingToolAttached() const { return has_renderdoc || has_nsight_graphics; @@ -220,12 +301,7 @@ public: /// Returns the list of available extensions. const std::vector& GetAvailableExtensions() const { - return reported_extensions; - } - - /// Returns true if the setting for async shader compilation is enabled. - bool UseAsynchronousShaders() const { - return use_asynchronous_shaders; + return supported_extensions; } u64 GetDeviceLocalMemory() const { @@ -245,6 +321,9 @@ private: /// Sets up device features. void SetupFeatures(); + /// Sets up device properties. + void SetupProperties(); + /// Collects telemetry information from the device. void CollectTelemetryParameters(); @@ -267,46 +346,60 @@ private: bool IsFormatSupported(VkFormat wanted_format, VkFormatFeatureFlags wanted_usage, FormatType format_type) const; - VkInstance instance; ///< Vulkan instance. - vk::DeviceDispatch dld; ///< Device function pointers. - vk::PhysicalDevice physical; ///< Physical device. - VkPhysicalDeviceProperties properties; ///< Device properties. - vk::Device logical; ///< Logical device. - vk::Queue graphics_queue; ///< Main graphics queue. - vk::Queue present_queue; ///< Main present queue. - u32 instance_version{}; ///< Vulkan onstance version. + VkInstance instance; ///< Vulkan instance. + vk::DeviceDispatch dld; ///< Device function pointers. + vk::PhysicalDevice physical; ///< Physical device. + VkPhysicalDeviceProperties properties; ///< Device properties. + VkPhysicalDeviceFloatControlsPropertiesKHR float_controls{}; ///< Float control properties. + vk::Device logical; ///< Logical device. + vk::Queue graphics_queue; ///< Main graphics queue. + vk::Queue present_queue; ///< Main present queue. + u32 instance_version{}; ///< Vulkan onstance version. u32 graphics_family{}; ///< Main graphics queue family index. u32 present_family{}; ///< Main present queue family index. VkDriverIdKHR driver_id{}; ///< Driver ID. VkShaderStageFlags guest_warp_stages{}; ///< Stages where the guest warp size can be forced. u64 device_access_memory{}; ///< Total size of device local memory in bytes. + u32 max_push_descriptors{}; ///< Maximum number of push descriptors bool is_optimal_astc_supported{}; ///< Support for native ASTC. bool is_float16_supported{}; ///< Support for float16 arithmetics. bool is_warp_potentially_bigger{}; ///< Host warp size can be bigger than guest. bool is_formatless_image_load_supported{}; ///< Support for shader image read without format. + bool is_depth_bounds_supported{}; ///< Support for depth bounds. + bool is_shader_float64_supported{}; ///< Support for float64. + bool is_shader_int64_supported{}; ///< Support for int64. + bool is_shader_int16_supported{}; ///< Support for int16. bool is_shader_storage_image_multisample{}; ///< Support for image operations on MSAA images. bool is_blit_depth_stencil_supported{}; ///< Support for blitting from and to depth stencil. bool nv_viewport_swizzle{}; ///< Support for VK_NV_viewport_swizzle. - bool khr_uniform_buffer_standard_layout{}; ///< Support for std430 on UBOs. - bool ext_index_type_uint8{}; ///< Support for VK_EXT_index_type_uint8. - bool ext_sampler_filter_minmax{}; ///< Support for VK_EXT_sampler_filter_minmax. - bool ext_depth_range_unrestricted{}; ///< Support for VK_EXT_depth_range_unrestricted. - bool ext_shader_viewport_index_layer{}; ///< Support for VK_EXT_shader_viewport_index_layer. - bool ext_tooling_info{}; ///< Support for VK_EXT_tooling_info. - bool ext_transform_feedback{}; ///< Support for VK_EXT_transform_feedback. - bool ext_custom_border_color{}; ///< Support for VK_EXT_custom_border_color. - bool ext_extended_dynamic_state{}; ///< Support for VK_EXT_extended_dynamic_state. - bool ext_shader_stencil_export{}; ///< Support for VK_EXT_shader_stencil_export. - bool nv_device_diagnostics_config{}; ///< Support for VK_NV_device_diagnostics_config. - bool has_renderdoc{}; ///< Has RenderDoc attached - bool has_nsight_graphics{}; ///< Has Nsight Graphics attached - - // Asynchronous Graphics Pipeline setting - bool use_asynchronous_shaders{}; ///< Setting to use asynchronous shaders/graphics pipeline + bool nv_viewport_array2{}; ///< Support for VK_NV_viewport_array2. + bool nv_geometry_shader_passthrough{}; ///< Support for VK_NV_geometry_shader_passthrough. + bool khr_uniform_buffer_standard_layout{}; ///< Support for scalar uniform buffer layouts. + bool khr_spirv_1_4{}; ///< Support for VK_KHR_spirv_1_4. + bool khr_workgroup_memory_explicit_layout{}; ///< Support for explicit workgroup layouts. + bool khr_push_descriptor{}; ///< Support for VK_KHR_push_descritor. + bool ext_index_type_uint8{}; ///< Support for VK_EXT_index_type_uint8. + bool ext_sampler_filter_minmax{}; ///< Support for VK_EXT_sampler_filter_minmax. + bool ext_depth_range_unrestricted{}; ///< Support for VK_EXT_depth_range_unrestricted. + bool ext_shader_viewport_index_layer{}; ///< Support for VK_EXT_shader_viewport_index_layer. + bool ext_tooling_info{}; ///< Support for VK_EXT_tooling_info. + bool ext_subgroup_size_control{}; ///< Support for VK_EXT_subgroup_size_control. + bool ext_transform_feedback{}; ///< Support for VK_EXT_transform_feedback. + bool ext_custom_border_color{}; ///< Support for VK_EXT_custom_border_color. + bool ext_extended_dynamic_state{}; ///< Support for VK_EXT_extended_dynamic_state. + bool ext_line_rasterization{}; ///< Support for VK_EXT_line_rasterization. + bool ext_vertex_input_dynamic_state{}; ///< Support for VK_EXT_vertex_input_dynamic_state. + bool ext_shader_stencil_export{}; ///< Support for VK_EXT_shader_stencil_export. + bool ext_shader_atomic_int64{}; ///< Support for VK_KHR_shader_atomic_int64. + bool ext_conservative_rasterization{}; ///< Support for VK_EXT_conservative_rasterization. + bool ext_provoking_vertex{}; ///< Support for VK_EXT_provoking_vertex. + bool nv_device_diagnostics_config{}; ///< Support for VK_NV_device_diagnostics_config. + bool has_renderdoc{}; ///< Has RenderDoc attached + bool has_nsight_graphics{}; ///< Has Nsight Graphics attached // Telemetry parameters - std::string vendor_name; ///< Device's driver name. - std::vector reported_extensions; ///< Reported Vulkan extensions. + std::string vendor_name; ///< Device's driver name. + std::vector supported_extensions; ///< Reported Vulkan extensions. /// Format properties dictionary. std::unordered_map format_properties; diff --git a/src/video_core/vulkan_common/vulkan_wrapper.cpp b/src/video_core/vulkan_common/vulkan_wrapper.cpp index 2aa0ffbe6..bbf0fccae 100755 --- a/src/video_core/vulkan_common/vulkan_wrapper.cpp +++ b/src/video_core/vulkan_common/vulkan_wrapper.cpp @@ -103,6 +103,7 @@ void Load(VkDevice device, DeviceDispatch& dld) noexcept { X(vkCmdFillBuffer); X(vkCmdPipelineBarrier); X(vkCmdPushConstants); + X(vkCmdPushDescriptorSetWithTemplateKHR); X(vkCmdSetBlendConstants); X(vkCmdSetDepthBias); X(vkCmdSetDepthBounds); @@ -120,9 +121,11 @@ void Load(VkDevice device, DeviceDispatch& dld) noexcept { X(vkCmdSetDepthTestEnableEXT); X(vkCmdSetDepthWriteEnableEXT); X(vkCmdSetFrontFaceEXT); + X(vkCmdSetLineWidth); X(vkCmdSetPrimitiveTopologyEXT); X(vkCmdSetStencilOpEXT); X(vkCmdSetStencilTestEnableEXT); + X(vkCmdSetVertexInputEXT); X(vkCmdResolveImage); X(vkCreateBuffer); X(vkCreateBufferView); @@ -311,8 +314,6 @@ const char* ToString(VkResult result) noexcept { return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT"; case VkResult::VK_ERROR_UNKNOWN: return "VK_ERROR_UNKNOWN"; - case VkResult::VK_ERROR_INCOMPATIBLE_VERSION_KHR: - return "VK_ERROR_INCOMPATIBLE_VERSION_KHR"; case VkResult::VK_THREAD_IDLE_KHR: return "VK_THREAD_IDLE_KHR"; case VkResult::VK_THREAD_DONE_KHR: diff --git a/src/video_core/vulkan_common/vulkan_wrapper.h b/src/video_core/vulkan_common/vulkan_wrapper.h index 3e36d356a..d76bb4324 100755 --- a/src/video_core/vulkan_common/vulkan_wrapper.h +++ b/src/video_core/vulkan_common/vulkan_wrapper.h @@ -193,15 +193,16 @@ struct DeviceDispatch : InstanceDispatch { PFN_vkBeginCommandBuffer vkBeginCommandBuffer{}; PFN_vkBindBufferMemory vkBindBufferMemory{}; PFN_vkBindImageMemory vkBindImageMemory{}; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT{}; PFN_vkCmdBeginQuery vkCmdBeginQuery{}; PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass{}; PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT{}; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT{}; PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets{}; PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer{}; PFN_vkCmdBindPipeline vkCmdBindPipeline{}; PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT{}; PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers{}; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT{}; PFN_vkCmdBlitImage vkCmdBlitImage{}; PFN_vkCmdClearAttachments vkCmdClearAttachments{}; PFN_vkCmdCopyBuffer vkCmdCopyBuffer{}; @@ -211,34 +212,36 @@ struct DeviceDispatch : InstanceDispatch { PFN_vkCmdDispatch vkCmdDispatch{}; PFN_vkCmdDraw vkCmdDraw{}; PFN_vkCmdDrawIndexed vkCmdDrawIndexed{}; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT{}; PFN_vkCmdEndQuery vkCmdEndQuery{}; PFN_vkCmdEndRenderPass vkCmdEndRenderPass{}; PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT{}; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT{}; PFN_vkCmdFillBuffer vkCmdFillBuffer{}; PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier{}; PFN_vkCmdPushConstants vkCmdPushConstants{}; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR{}; + PFN_vkCmdResolveImage vkCmdResolveImage{}; PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants{}; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT{}; PFN_vkCmdSetDepthBias vkCmdSetDepthBias{}; PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds{}; - PFN_vkCmdSetEvent vkCmdSetEvent{}; - PFN_vkCmdSetScissor vkCmdSetScissor{}; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask{}; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference{}; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask{}; - PFN_vkCmdSetViewport vkCmdSetViewport{}; - PFN_vkCmdWaitEvents vkCmdWaitEvents{}; - PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT{}; - PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT{}; PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT{}; PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT{}; PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT{}; PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT{}; + PFN_vkCmdSetEvent vkCmdSetEvent{}; PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT{}; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth{}; PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT{}; + PFN_vkCmdSetScissor vkCmdSetScissor{}; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask{}; PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT{}; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference{}; PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT{}; - PFN_vkCmdResolveImage vkCmdResolveImage{}; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask{}; + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT{}; + PFN_vkCmdSetViewport vkCmdSetViewport{}; + PFN_vkCmdWaitEvents vkCmdWaitEvents{}; PFN_vkCreateBuffer vkCreateBuffer{}; PFN_vkCreateBufferView vkCreateBufferView{}; PFN_vkCreateCommandPool vkCreateCommandPool{}; @@ -989,6 +992,12 @@ public: dynamic_offsets.size(), dynamic_offsets.data()); } + void PushDescriptorSetWithTemplateKHR(VkDescriptorUpdateTemplateKHR update_template, + VkPipelineLayout layout, u32 set, + const void* data) const noexcept { + dld->vkCmdPushDescriptorSetWithTemplateKHR(handle, update_template, layout, set, data); + } + void BindPipeline(VkPipelineBindPoint bind_point, VkPipeline pipeline) const noexcept { dld->vkCmdBindPipeline(handle, bind_point, pipeline); } @@ -1190,6 +1199,10 @@ public: dld->vkCmdSetFrontFaceEXT(handle, front_face); } + void SetLineWidth(float line_width) const noexcept { + dld->vkCmdSetLineWidth(handle, line_width); + } + void SetPrimitiveTopologyEXT(VkPrimitiveTopology primitive_topology) const noexcept { dld->vkCmdSetPrimitiveTopologyEXT(handle, primitive_topology); } @@ -1203,6 +1216,13 @@ public: dld->vkCmdSetStencilTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE); } + void SetVertexInputEXT( + vk::Span bindings, + vk::Span attributes) const noexcept { + dld->vkCmdSetVertexInputEXT(handle, bindings.size(), bindings.data(), attributes.size(), + attributes.data()); + } + void BindTransformFeedbackBuffersEXT(u32 first, u32 count, const VkBuffer* buffers, const VkDeviceSize* offsets, const VkDeviceSize* sizes) const noexcept { diff --git a/src/yuzu/bootmanager.cpp b/src/yuzu/bootmanager.cpp index 7524e3c40..92e95510b 100755 --- a/src/yuzu/bootmanager.cpp +++ b/src/yuzu/bootmanager.cpp @@ -64,12 +64,13 @@ void EmuThread::run() { emit LoadProgress(VideoCore::LoadCallbackStage::Prepare, 0, 0); - system.Renderer().ReadRasterizer()->LoadDiskResources( - system.CurrentProcess()->GetTitleID(), stop_token, - [this](VideoCore::LoadCallbackStage stage, std::size_t value, std::size_t total) { - emit LoadProgress(stage, value, total); - }); - + if (Settings::values.use_disk_shader_cache.GetValue()) { + system.Renderer().ReadRasterizer()->LoadDiskResources( + system.CurrentProcess()->GetTitleID(), stop_token, + [this](VideoCore::LoadCallbackStage stage, std::size_t value, std::size_t total) { + emit LoadProgress(stage, value, total); + }); + } emit LoadProgress(VideoCore::LoadCallbackStage::Complete, 0, 0); gpu.ReleaseContext(); diff --git a/src/yuzu/configuration/config.cpp b/src/yuzu/configuration/config.cpp index 87cb9dc93..92a82e1cd 100755 --- a/src/yuzu/configuration/config.cpp +++ b/src/yuzu/configuration/config.cpp @@ -840,7 +840,7 @@ void Config::ReadRendererValues() { ReadGlobalSetting(Settings::values.accelerate_astc); ReadGlobalSetting(Settings::values.use_vsync); ReadGlobalSetting(Settings::values.disable_fps_limit); - ReadGlobalSetting(Settings::values.use_assembly_shaders); + ReadGlobalSetting(Settings::values.shader_backend); ReadGlobalSetting(Settings::values.use_asynchronous_shaders); ReadGlobalSetting(Settings::values.use_fast_gpu_time); ReadGlobalSetting(Settings::values.use_caches_gc); @@ -850,6 +850,8 @@ void Config::ReadRendererValues() { if (global) { ReadBasicSetting(Settings::values.renderer_debug); + ReadBasicSetting(Settings::values.enable_nsight_aftermath); + ReadBasicSetting(Settings::values.disable_shader_loop_safety_checks); } qt_config->endGroup(); @@ -1370,7 +1372,10 @@ void Config::SaveRendererValues() { WriteGlobalSetting(Settings::values.accelerate_astc); WriteGlobalSetting(Settings::values.use_vsync); WriteGlobalSetting(Settings::values.disable_fps_limit); - WriteGlobalSetting(Settings::values.use_assembly_shaders); + WriteSetting(QString::fromStdString(Settings::values.shader_backend.GetLabel()), + static_cast(Settings::values.shader_backend.GetValue(global)), + static_cast(Settings::values.shader_backend.GetDefault()), + Settings::values.shader_backend.UsingGlobal()); WriteGlobalSetting(Settings::values.use_asynchronous_shaders); WriteGlobalSetting(Settings::values.use_fast_gpu_time); WriteGlobalSetting(Settings::values.use_caches_gc); @@ -1380,6 +1385,8 @@ void Config::SaveRendererValues() { if (global) { WriteBasicSetting(Settings::values.renderer_debug); + WriteBasicSetting(Settings::values.enable_nsight_aftermath); + WriteBasicSetting(Settings::values.disable_shader_loop_safety_checks); } qt_config->endGroup(); diff --git a/src/yuzu/configuration/config.h b/src/yuzu/configuration/config.h index 96f9b6de1..4bbb9f1cd 100755 --- a/src/yuzu/configuration/config.h +++ b/src/yuzu/configuration/config.h @@ -180,5 +180,6 @@ private: // These metatype declarations cannot be in common/settings.h because core is devoid of QT Q_DECLARE_METATYPE(Settings::CPUAccuracy); -Q_DECLARE_METATYPE(Settings::RendererBackend); Q_DECLARE_METATYPE(Settings::GPUAccuracy); +Q_DECLARE_METATYPE(Settings::RendererBackend); +Q_DECLARE_METATYPE(Settings::ShaderBackend); diff --git a/src/yuzu/configuration/configure_debug.cpp b/src/yuzu/configuration/configure_debug.cpp index 8fceb3878..f7e29dbd7 100755 --- a/src/yuzu/configuration/configure_debug.cpp +++ b/src/yuzu/configuration/configure_debug.cpp @@ -45,8 +45,13 @@ void ConfigureDebug::SetConfiguration() { ui->enable_graphics_debugging->setChecked(Settings::values.renderer_debug.GetValue()); ui->enable_cpu_debugging->setEnabled(runtime_lock); ui->enable_cpu_debugging->setChecked(Settings::values.cpu_debug_mode.GetValue()); + ui->enable_nsight_aftermath->setEnabled(runtime_lock); + ui->enable_nsight_aftermath->setChecked(Settings::values.enable_nsight_aftermath.GetValue()); ui->disable_macro_jit->setEnabled(runtime_lock); ui->disable_macro_jit->setChecked(Settings::values.disable_macro_jit.GetValue()); + ui->disable_loop_safety_checks->setEnabled(runtime_lock); + ui->disable_loop_safety_checks->setChecked( + Settings::values.disable_shader_loop_safety_checks.GetValue()); ui->extended_logging->setChecked(Settings::values.extended_logging.GetValue()); } @@ -61,6 +66,9 @@ void ConfigureDebug::ApplyConfiguration() { Settings::values.use_auto_stub = ui->use_auto_stub->isChecked(); Settings::values.renderer_debug = ui->enable_graphics_debugging->isChecked(); Settings::values.cpu_debug_mode = ui->enable_cpu_debugging->isChecked(); + Settings::values.enable_nsight_aftermath = ui->enable_nsight_aftermath->isChecked(); + Settings::values.disable_shader_loop_safety_checks = + ui->disable_loop_safety_checks->isChecked(); Settings::values.disable_macro_jit = ui->disable_macro_jit->isChecked(); Settings::values.extended_logging = ui->extended_logging->isChecked(); Debugger::ToggleConsole(); diff --git a/src/yuzu/configuration/configure_debug.ui b/src/yuzu/configuration/configure_debug.ui index 1260ad6f0..c8baf2921 100755 --- a/src/yuzu/configuration/configure_debug.ui +++ b/src/yuzu/configuration/configure_debug.ui @@ -125,6 +125,16 @@ + + + + When checked, it enables Nsight Aftermath crash dumps + + + Enable Nsight Aftermath + + + @@ -138,6 +148,16 @@ + + + + When checked, it executes shaders without loop logic changes + + + Disable Loop safety checks + + + @@ -252,11 +272,17 @@ log_filter_edit toggle_console + extended_logging open_log_button homebrew_args_edit enable_graphics_debugging + enable_nsight_aftermath + disable_macro_jit + disable_loop_safety_checks reporting_services quest_flag + use_debug_asserts + use_auto_stub diff --git a/src/yuzu/configuration/configure_graphics.cpp b/src/yuzu/configuration/configure_graphics.cpp index 41a69d9b8..7097e4c2d 100755 --- a/src/yuzu/configuration/configure_graphics.cpp +++ b/src/yuzu/configuration/configure_graphics.cpp @@ -26,19 +26,29 @@ ConfigureGraphics::ConfigureGraphics(QWidget* parent) ui->setupUi(this); + for (const auto& device : vulkan_devices) { + ui->device->addItem(device); + } + + ui->backend->addItem(QStringLiteral("GLSL")); + ui->backend->addItem(tr("GLASM (NVIDIA Only)")); + ui->backend->addItem(QStringLiteral("SPIR-V (Experimental, Mesa Only)")); + SetupPerGameUI(); SetConfiguration(); connect(ui->api, qOverload(&QComboBox::currentIndexChanged), this, [this] { - UpdateDeviceComboBox(); + UpdateAPILayout(); if (!Settings::IsConfiguringGlobal()) { ConfigurationShared::SetHighlight( - ui->api_layout, ui->api->currentIndex() != ConfigurationShared::USE_GLOBAL_INDEX); + ui->api_widget, ui->api->currentIndex() != ConfigurationShared::USE_GLOBAL_INDEX); } }); connect(ui->device, qOverload(&QComboBox::activated), this, [this](int device) { UpdateDeviceSelection(device); }); + connect(ui->backend, qOverload(&QComboBox::activated), this, + [this](int backend) { UpdateShaderBackendSelection(backend); }); connect(ui->bg_button, &QPushButton::clicked, this, [this] { const QColor new_bg_color = QColorDialog::getColor(bg_color); @@ -61,12 +71,21 @@ void ConfigureGraphics::UpdateDeviceSelection(int device) { } } +void ConfigureGraphics::UpdateShaderBackendSelection(int backend) { + if (backend == -1) { + return; + } + if (GetCurrentGraphicsBackend() == Settings::RendererBackend::OpenGL) { + shader_backend = static_cast(backend); + } +} + ConfigureGraphics::~ConfigureGraphics() = default; void ConfigureGraphics::SetConfiguration() { const bool runtime_lock = !Core::System::GetInstance().IsPoweredOn(); - ui->api->setEnabled(runtime_lock); + ui->api_widget->setEnabled(runtime_lock); ui->use_asynchronous_gpu_emulation->setEnabled(runtime_lock); ui->use_disk_shader_cache->setEnabled(runtime_lock); ui->use_nvdec_emulation->setEnabled(runtime_lock); @@ -83,7 +102,7 @@ void ConfigureGraphics::SetConfiguration() { ui->aspect_ratio_combobox->setCurrentIndex(Settings::values.aspect_ratio.GetValue()); } else { ConfigurationShared::SetPerGameSetting(ui->api, &Settings::values.renderer_backend); - ConfigurationShared::SetHighlight(ui->api_layout, + ConfigurationShared::SetHighlight(ui->api_widget, !Settings::values.renderer_backend.UsingGlobal()); ConfigurationShared::SetPerGameSetting(ui->fullscreen_mode_combobox, @@ -104,7 +123,7 @@ void ConfigureGraphics::SetConfiguration() { UpdateBackgroundColorButton(QColor::fromRgbF(Settings::values.bg_red.GetValue(), Settings::values.bg_green.GetValue(), Settings::values.bg_blue.GetValue())); - UpdateDeviceComboBox(); + UpdateAPILayout(); } void ConfigureGraphics::ApplyConfiguration() { @@ -128,6 +147,9 @@ void ConfigureGraphics::ApplyConfiguration() { if (Settings::values.renderer_backend.UsingGlobal()) { Settings::values.renderer_backend.SetValue(GetCurrentGraphicsBackend()); } + if (Settings::values.shader_backend.UsingGlobal()) { + Settings::values.shader_backend.SetValue(shader_backend); + } if (Settings::values.vulkan_device.UsingGlobal()) { Settings::values.vulkan_device.SetValue(vulkan_device); } @@ -139,15 +161,22 @@ void ConfigureGraphics::ApplyConfiguration() { } else { if (ui->api->currentIndex() == ConfigurationShared::USE_GLOBAL_INDEX) { Settings::values.renderer_backend.SetGlobal(true); + Settings::values.shader_backend.SetGlobal(true); Settings::values.vulkan_device.SetGlobal(true); } else { Settings::values.renderer_backend.SetGlobal(false); Settings::values.renderer_backend.SetValue(GetCurrentGraphicsBackend()); - if (GetCurrentGraphicsBackend() == Settings::RendererBackend::Vulkan) { + switch (GetCurrentGraphicsBackend()) { + case Settings::RendererBackend::OpenGL: + Settings::values.shader_backend.SetGlobal(false); + Settings::values.vulkan_device.SetGlobal(true); + Settings::values.shader_backend.SetValue(shader_backend); + break; + case Settings::RendererBackend::Vulkan: + Settings::values.shader_backend.SetGlobal(true); Settings::values.vulkan_device.SetGlobal(false); Settings::values.vulkan_device.SetValue(vulkan_device); - } else { - Settings::values.vulkan_device.SetGlobal(true); + break; } } @@ -188,32 +217,32 @@ void ConfigureGraphics::UpdateBackgroundColorButton(QColor color) { ui->bg_button->setIcon(color_icon); } -void ConfigureGraphics::UpdateDeviceComboBox() { - ui->device->clear(); - - bool enabled = false; - +void ConfigureGraphics::UpdateAPILayout() { if (!Settings::IsConfiguringGlobal() && ui->api->currentIndex() == ConfigurationShared::USE_GLOBAL_INDEX) { + vulkan_device = Settings::values.vulkan_device.GetValue(true); + shader_backend = Settings::values.shader_backend.GetValue(true); + ui->device_widget->setEnabled(false); + ui->backend_widget->setEnabled(false); + } else { vulkan_device = Settings::values.vulkan_device.GetValue(); + shader_backend = Settings::values.shader_backend.GetValue(); + ui->device_widget->setEnabled(true); + ui->backend_widget->setEnabled(true); } + switch (GetCurrentGraphicsBackend()) { case Settings::RendererBackend::OpenGL: - ui->device->addItem(tr("OpenGL Graphics Device")); - enabled = false; + ui->backend->setCurrentIndex(static_cast(shader_backend)); + ui->device_widget->setVisible(false); + ui->backend_widget->setVisible(true); break; case Settings::RendererBackend::Vulkan: - for (const auto& device : vulkan_devices) { - ui->device->addItem(device); - } ui->device->setCurrentIndex(vulkan_device); - enabled = !vulkan_devices.empty(); + ui->device_widget->setVisible(true); + ui->backend_widget->setVisible(false); break; } - // If in per-game config and use global is selected, don't enable. - enabled &= !(!Settings::IsConfiguringGlobal() && - ui->api->currentIndex() == ConfigurationShared::USE_GLOBAL_INDEX); - ui->device->setEnabled(enabled && !Core::System::GetInstance().IsPoweredOn()); } void ConfigureGraphics::RetrieveVulkanDevices() try { diff --git a/src/yuzu/configuration/configure_graphics.h b/src/yuzu/configuration/configure_graphics.h index 6418115cf..c866b911b 100755 --- a/src/yuzu/configuration/configure_graphics.h +++ b/src/yuzu/configuration/configure_graphics.h @@ -34,8 +34,9 @@ private: void SetConfiguration(); void UpdateBackgroundColorButton(QColor color); - void UpdateDeviceComboBox(); + void UpdateAPILayout(); void UpdateDeviceSelection(int device); + void UpdateShaderBackendSelection(int backend); void RetrieveVulkanDevices(); @@ -53,4 +54,5 @@ private: std::vector vulkan_devices; u32 vulkan_device{}; + Settings::ShaderBackend shader_backend{}; }; diff --git a/src/yuzu/configuration/configure_graphics.ui b/src/yuzu/configuration/configure_graphics.ui index 5b999d84d..099ddbb7c 100755 --- a/src/yuzu/configuration/configure_graphics.ui +++ b/src/yuzu/configuration/configure_graphics.ui @@ -23,7 +23,7 @@ - + 0 @@ -40,37 +40,107 @@ 6 + + + + + 0 + + + 0 + + + 0 + + + 0 + + + + + Shader Backend: + + + + + + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + + + Device: + + + + + + + + + - - - API: - - - - - - - - OpenGL + + + + 0 - - - - Vulkan + + 0 - + + 0 + + + 0 + + + + + API: + + + + + + + + 0 + 0 + + + + + OpenGL + + + + + Vulkan + + + + + - - - - Device: - - - - - - diff --git a/src/yuzu/configuration/configure_graphics_advanced.cpp b/src/yuzu/configuration/configure_graphics_advanced.cpp index 8d13c9857..e570fd2de 100755 --- a/src/yuzu/configuration/configure_graphics_advanced.cpp +++ b/src/yuzu/configuration/configure_graphics_advanced.cpp @@ -23,13 +23,11 @@ ConfigureGraphicsAdvanced::~ConfigureGraphicsAdvanced() = default; void ConfigureGraphicsAdvanced::SetConfiguration() { const bool runtime_lock = !Core::System::GetInstance().IsPoweredOn(); ui->use_vsync->setEnabled(runtime_lock); - ui->use_assembly_shaders->setEnabled(runtime_lock); ui->use_asynchronous_shaders->setEnabled(runtime_lock); ui->anisotropic_filtering_combobox->setEnabled(runtime_lock); ui->use_vsync->setChecked(Settings::values.use_vsync.GetValue()); ui->disable_fps_limit->setChecked(Settings::values.disable_fps_limit.GetValue()); - ui->use_assembly_shaders->setChecked(Settings::values.use_assembly_shaders.GetValue()); ui->use_asynchronous_shaders->setChecked(Settings::values.use_asynchronous_shaders.GetValue()); ui->use_caches_gc->setChecked(Settings::values.use_caches_gc.GetValue()); ui->use_fast_gpu_time->setChecked(Settings::values.use_fast_gpu_time.GetValue()); @@ -61,8 +59,6 @@ void ConfigureGraphicsAdvanced::ApplyConfiguration() { ConfigurationShared::ApplyPerGameSetting(&Settings::values.use_vsync, ui->use_vsync, use_vsync); ConfigurationShared::ApplyPerGameSetting(&Settings::values.disable_fps_limit, ui->disable_fps_limit, disable_fps_limit); - ConfigurationShared::ApplyPerGameSetting(&Settings::values.use_assembly_shaders, - ui->use_assembly_shaders, use_assembly_shaders); ConfigurationShared::ApplyPerGameSetting(&Settings::values.use_asynchronous_shaders, ui->use_asynchronous_shaders, use_asynchronous_shaders); @@ -104,7 +100,6 @@ void ConfigureGraphicsAdvanced::SetupPerGameUI() { ui->gpu_accuracy->setEnabled(Settings::values.gpu_accuracy.UsingGlobal()); ui->use_vsync->setEnabled(Settings::values.use_vsync.UsingGlobal()); ui->disable_fps_limit->setEnabled(Settings::values.disable_fps_limit.UsingGlobal()); - ui->use_assembly_shaders->setEnabled(Settings::values.use_assembly_shaders.UsingGlobal()); ui->use_asynchronous_shaders->setEnabled( Settings::values.use_asynchronous_shaders.UsingGlobal()); ui->use_fast_gpu_time->setEnabled(Settings::values.use_fast_gpu_time.UsingGlobal()); @@ -118,8 +113,6 @@ void ConfigureGraphicsAdvanced::SetupPerGameUI() { ConfigurationShared::SetColoredTristate(ui->use_vsync, Settings::values.use_vsync, use_vsync); ConfigurationShared::SetColoredTristate(ui->disable_fps_limit, Settings::values.disable_fps_limit, disable_fps_limit); - ConfigurationShared::SetColoredTristate( - ui->use_assembly_shaders, Settings::values.use_assembly_shaders, use_assembly_shaders); ConfigurationShared::SetColoredTristate(ui->use_asynchronous_shaders, Settings::values.use_asynchronous_shaders, use_asynchronous_shaders); diff --git a/src/yuzu/configuration/configure_graphics_advanced.h b/src/yuzu/configuration/configure_graphics_advanced.h index 6ac5f20ec..b4e5d7bd4 100755 --- a/src/yuzu/configuration/configure_graphics_advanced.h +++ b/src/yuzu/configuration/configure_graphics_advanced.h @@ -36,7 +36,6 @@ private: ConfigurationShared::CheckState use_vsync; ConfigurationShared::CheckState disable_fps_limit; - ConfigurationShared::CheckState use_assembly_shaders; ConfigurationShared::CheckState use_asynchronous_shaders; ConfigurationShared::CheckState use_fast_gpu_time; ConfigurationShared::CheckState use_caches_gc; diff --git a/src/yuzu/configuration/configure_graphics_advanced.ui b/src/yuzu/configuration/configure_graphics_advanced.ui index 18c43629e..38fd9b37e 100755 --- a/src/yuzu/configuration/configure_graphics_advanced.ui +++ b/src/yuzu/configuration/configure_graphics_advanced.ui @@ -94,16 +94,6 @@ - - - - Enabling this reduces shader stutter. Enables OpenGL assembly shaders on supported Nvidia devices (NV_gpu_program5 is required). This feature is experimental. - - - Use assembly shaders (experimental, Nvidia OpenGL only) - - - @@ -162,22 +152,22 @@ - 2x + 2x (WILL BREAK THINGS) - 4x + 4x (WILL BREAK THINGS) - 8x + 8x (WILL BREAK THINGS) - 16x + 16x (WILL BREAK THINGS) diff --git a/src/yuzu/main.cpp b/src/yuzu/main.cpp index d94c660b3..6bf6643da 100755 --- a/src/yuzu/main.cpp +++ b/src/yuzu/main.cpp @@ -1631,35 +1631,15 @@ void GMainWindow::OnGameListOpenFolder(u64 program_id, GameListOpenTarget target void GMainWindow::OnTransferableShaderCacheOpenFile(u64 program_id) { const auto shader_cache_dir = Common::FS::GetYuzuPath(Common::FS::YuzuPath::ShaderDir); - const auto transferable_shader_cache_folder_path = shader_cache_dir / "opengl" / "transferable"; - const auto transferable_shader_cache_file_path = - transferable_shader_cache_folder_path / fmt::format("{:016X}.bin", program_id); - - if (!Common::FS::Exists(transferable_shader_cache_file_path)) { + const auto shader_cache_folder_path{shader_cache_dir / fmt::format("{:016x}", program_id)}; + if (!Common::FS::Exists(shader_cache_folder_path)) { QMessageBox::warning(this, tr("Error Opening Transferable Shader Cache"), tr("A shader cache for this title does not exist.")); return; } - - const auto qt_shader_cache_folder_path = - QString::fromStdString(Common::FS::PathToUTF8String(transferable_shader_cache_folder_path)); - const auto qt_shader_cache_file_path = - QString::fromStdString(Common::FS::PathToUTF8String(transferable_shader_cache_file_path)); - - // Windows supports opening a folder with selecting a specified file in explorer. On every other - // OS we just open the transferable shader cache folder without preselecting the transferable - // shader cache file for the selected game. -#if defined(Q_OS_WIN) - const QString explorer = QStringLiteral("explorer"); - QStringList param; - if (!QFileInfo(qt_shader_cache_file_path).isDir()) { - param << QStringLiteral("/select,"); - } - param << QDir::toNativeSeparators(qt_shader_cache_file_path); - QProcess::startDetached(explorer, param); -#else - QDesktopServices::openUrl(QUrl::fromLocalFile(qt_shader_cache_folder_path)); -#endif + const auto shader_path_string{Common::FS::PathToUTF8String(shader_cache_folder_path)}; + const auto qt_shader_cache_path = QString::fromStdString(shader_path_string); + QDesktopServices::openUrl(QUrl::fromLocalFile(qt_shader_cache_path)); } static std::size_t CalculateRomFSEntrySize(const FileSys::VirtualDir& dir, bool full) { @@ -2877,13 +2857,13 @@ void GMainWindow::UpdateStatusBar() { return; } - auto results = Core::System::GetInstance().GetAndResetPerfStats(); - auto& shader_notify = Core::System::GetInstance().GPU().ShaderNotify(); - const auto shaders_building = shader_notify.GetShadersBuilding(); + auto& system = Core::System::GetInstance(); + auto results = system.GetAndResetPerfStats(); + auto& shader_notify = system.GPU().ShaderNotify(); + const int shaders_building = shader_notify.ShadersBuilding(); - if (shaders_building != 0) { - shader_building_label->setText( - tr("Building: %n shader(s)", "", static_cast(shaders_building))); + if (shaders_building > 0) { + shader_building_label->setText(tr("Building: %n shader(s)", "", shaders_building)); shader_building_label->setVisible(true); } else { shader_building_label->setVisible(false); diff --git a/src/yuzu_cmd/config.cpp b/src/yuzu_cmd/config.cpp index 325584a1a..8079f8100 100755 --- a/src/yuzu_cmd/config.cpp +++ b/src/yuzu_cmd/config.cpp @@ -424,6 +424,8 @@ void Config::ReadValues() { // Renderer ReadSetting("Renderer", Settings::values.renderer_backend); ReadSetting("Renderer", Settings::values.renderer_debug); + ReadSetting("Renderer", Settings::values.enable_nsight_aftermath); + ReadSetting("Renderer", Settings::values.disable_shader_loop_safety_checks); ReadSetting("Renderer", Settings::values.vulkan_device); ReadSetting("Renderer", Settings::values.aspect_ratio); @@ -435,7 +437,7 @@ void Config::ReadValues() { ReadSetting("Renderer", Settings::values.use_asynchronous_gpu_emulation); ReadSetting("Renderer", Settings::values.use_vsync); ReadSetting("Renderer", Settings::values.disable_fps_limit); - ReadSetting("Renderer", Settings::values.use_assembly_shaders); + ReadSetting("Renderer", Settings::values.shader_backend); ReadSetting("Renderer", Settings::values.use_asynchronous_shaders); ReadSetting("Renderer", Settings::values.use_nvdec_emulation); ReadSetting("Renderer", Settings::values.accelerate_astc); diff --git a/src/yuzu_cmd/default_ini.h b/src/yuzu_cmd/default_ini.h index cc9850aad..76d983247 100755 --- a/src/yuzu_cmd/default_ini.h +++ b/src/yuzu_cmd/default_ini.h @@ -163,6 +163,14 @@ backend = # 0 (default): Disabled, 1: Enabled debug = +# Enable Nsight Aftermath crash dumps +# 0 (default): Disabled, 1: Enabled +nsight_aftermath = + +# Disable shader loop safety checks, executing the shader without loop logic changes +# 0 (default): Disabled, 1: Enabled +disable_shader_loop_safety_checks = + # Which Vulkan physical device to use (defaults to 0) vulkan_device = @@ -186,9 +194,10 @@ max_anisotropy = # 0 (default): Off, 1: On use_vsync = -# Whether to use OpenGL assembly shaders or not. NV_gpu_program5 is required. -# 0: Off, 1 (default): On -use_assembly_shaders = +# Selects the OpenGL shader backend. NV_gpu_program5 is required for GLASM. If NV_gpu_program5 is +# not available and GLASM is selected, GLSL will be used. +# 0: GLSL, 1 (default): GLASM, 2: SPIR-V +shader_backend = # Whether to allow asynchronous shader building. # 0 (default): Off, 1: On diff --git a/src/yuzu_cmd/yuzu.cpp b/src/yuzu_cmd/yuzu.cpp index 9607cdcb1..f46731a33 100755 --- a/src/yuzu_cmd/yuzu.cpp +++ b/src/yuzu_cmd/yuzu.cpp @@ -218,9 +218,11 @@ int main(int argc, char** argv) { // Core is loaded, start the GPU (makes the GPU contexts current to this thread) system.GPU().Start(); - system.Renderer().ReadRasterizer()->LoadDiskResources( - system.CurrentProcess()->GetTitleID(), std::stop_token{}, - [](VideoCore::LoadCallbackStage, size_t value, size_t total) {}); + if (Settings::values.use_disk_shader_cache.GetValue()) { + system.Renderer().ReadRasterizer()->LoadDiskResources( + system.CurrentProcess()->GetTitleID(), std::stop_token{}, + [](VideoCore::LoadCallbackStage, size_t value, size_t total) {}); + } void(system.Run()); while (emu_window->IsOpen()) {